Merge "Downmix : Add libeffect implementation" am: f1a9007f3d am: ecc90a6127 am: ba1af68dcd

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2353846

Change-Id: If3e48fae8232a0db9ef1d5d41ada85004f889ea7
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/Android.bp b/camera/Android.bp
index 3e28e4f..f27eb31 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -47,7 +47,7 @@
     name: "camera_headers",
     export_include_dirs: ["include"],
 }
-cc_library_shared {
+cc_library {
     name: "libcamera_client",
 
     aidl: {
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index d1aa36a..0706ac1 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -130,6 +130,12 @@
         return err;
     }
 
+    int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+    if ((err = parcel->readInt32(&colorSpace)) != OK) {
+        ALOGE("%s: Failed to read color space from parcel", __FUNCTION__);
+        return err;
+    }
+
     mWidth = width;
     mHeight = height;
     mFormat = format;
@@ -146,6 +152,7 @@
     mHistogramCounts = std::move(histogramCounts);
     mDynamicRangeProfile = dynamicRangeProfile;
     mStreamUseCase = streamUseCase;
+    mColorSpace = colorSpace;
 
     return OK;
 }
@@ -238,6 +245,11 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mColorSpace)) != OK) {
+        ALOGE("%s: Failed to write color space", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index b37803a..151b653 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -598,7 +598,6 @@
 status_t VendorTagDescriptor::setAsGlobalVendorTagDescriptor(const sp<VendorTagDescriptor>& desc) {
     status_t res = OK;
     Mutex::Autolock al(sLock);
-    sGlobalVendorTagDescriptor = desc;
 
     vendor_tag_ops_t* opsPtr = NULL;
     if (desc != NULL) {
@@ -613,6 +612,9 @@
         ALOGE("%s: Could not set vendor tag descriptor, received error %s (%d)."
                 , __FUNCTION__, strerror(-res), res);
     }
+
+    sGlobalVendorTagDescriptor = desc;
+
     return res;
 }
 
@@ -631,7 +633,6 @@
         const sp<VendorTagDescriptorCache>& cache) {
     status_t res = OK;
     Mutex::Autolock al(sLock);
-    sGlobalVendorTagDescriptorCache = cache;
 
     struct vendor_tag_cache_ops* opsPtr = NULL;
     if (cache != NULL) {
@@ -646,6 +647,9 @@
         ALOGE("%s: Could not set vendor tag cache, received error %s (%d)."
                 , __FUNCTION__, strerror(-res), res);
     }
+
+    sGlobalVendorTagDescriptorCache = cache;
+
     return res;
 }
 
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index fefea13..be8a00f 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -46,6 +46,13 @@
     int getRotateAndCropOverride(String packageName, int lensFacing, int userId);
 
     /**
+     * Returns the necessary autoframing override for the top activity which
+     * will be one of ({@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_FALSE},
+     * {@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_TRUE}).
+     */
+    int getAutoframingOverride(String packageName);
+
+    /**
      * Checks if the camera has been disabled via device policy.
      */
     boolean isCameraDisabled(int userId);
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 11d4960..d50566d 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -26,8 +26,8 @@
 #include <system/camera_metadata.h>
 #include <utils/String8.h>
 
-namespace android {
 
+namespace android {
 
 const int OutputConfiguration::INVALID_ROTATION = -1;
 const int OutputConfiguration::INVALID_SET_ID = -1;
@@ -81,6 +81,10 @@
     return mDynamicRangeProfile;
 }
 
+int32_t OutputConfiguration::getColorSpace() const {
+    return mColorSpace;
+}
+
 int64_t OutputConfiguration::getStreamUseCase() const {
     return mStreamUseCase;
 }
@@ -103,6 +107,7 @@
         mIsShared(false),
         mIsMultiResolution(false),
         mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+        mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
         mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
         mTimestampBase(TIMESTAMP_BASE_DEFAULT),
         mMirrorMode(MIRROR_MODE_AUTO) {
@@ -191,6 +196,11 @@
         ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
         return err;
     }
+    int32_t colorSpace;
+    if ((err = parcel->readInt32(&colorSpace)) != OK) {
+        ALOGE("%s: Failed to read surface color space flag from parcel", __FUNCTION__);
+        return err;
+    }
 
     int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
     if ((err = parcel->readInt64(&streamUseCase)) != OK) {
@@ -230,6 +240,7 @@
 
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
     mDynamicRangeProfile = dynamicProfile;
+    mColorSpace = colorSpace;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
           " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
@@ -252,6 +263,7 @@
     mPhysicalCameraId = physicalId;
     mIsMultiResolution = false;
     mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    mColorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
     mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
     mTimestampBase = TIMESTAMP_BASE_DEFAULT;
     mMirrorMode = MIRROR_MODE_AUTO;
@@ -265,6 +277,7 @@
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
     mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
     mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+    mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
     mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
     mTimestampBase(TIMESTAMP_BASE_DEFAULT),
     mMirrorMode(MIRROR_MODE_AUTO) { }
@@ -317,6 +330,9 @@
     err = parcel->writeInt64(mDynamicRangeProfile);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mColorSpace);
+    if (err != OK) return err;
+
     err = parcel->writeInt64(mStreamUseCase);
     if (err != OK) return err;
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 094a3c1..27ebb7a 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -43,7 +43,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
diff --git a/camera/cameraserver/cameraserver.rc b/camera/cameraserver/cameraserver.rc
index 8f51458..6a311f2 100644
--- a/camera/cameraserver/cameraserver.rc
+++ b/camera/cameraserver/cameraserver.rc
@@ -5,3 +5,4 @@
     ioprio rt 4
     task_profiles CameraServiceCapacity MaxPerformance
     rlimit rtprio 10 10
+    onrestart class_restart cameraWatchdog
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index aaa88b2..90ee924 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -67,22 +67,26 @@
     int64_t mDynamicRangeProfile;
     // Stream use case
     int64_t mStreamUseCase;
+    // Color space
+    int32_t mColorSpace;
 
     CameraStreamStats() :
             mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
             mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
             mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
             mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
-            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+            mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
     CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
             int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
-            int streamUseCase)
+            int streamUseCase, int32_t colorSpace)
             : mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
               mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
               mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
               mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
               mDynamicRangeProfile(dynamicRangeProfile),
-              mStreamUseCase(streamUseCase) {}
+              mStreamUseCase(streamUseCase),
+              mColorSpace(colorSpace) {}
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index b7c7f7f..a713b40 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -61,6 +61,7 @@
     int                        getWidth() const;
     int                        getHeight() const;
     int64_t                    getDynamicRangeProfile() const;
+    int32_t                    getColorSpace() const;
     bool                       isDeferred() const;
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
@@ -111,6 +112,7 @@
                 mIsMultiResolution == other.mIsMultiResolution &&
                 sensorPixelModesUsedEqual(other) &&
                 mDynamicRangeProfile == other.mDynamicRangeProfile &&
+                mColorSpace == other.mColorSpace &&
                 mStreamUseCase == other.mStreamUseCase &&
                 mTimestampBase == other.mTimestampBase &&
                 mMirrorMode == other.mMirrorMode);
@@ -153,6 +155,9 @@
         if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
             return mDynamicRangeProfile < other.mDynamicRangeProfile;
         }
+        if (mColorSpace != other.mColorSpace) {
+            return mColorSpace < other.mColorSpace;
+        }
         if (mStreamUseCase != other.mStreamUseCase) {
             return mStreamUseCase < other.mStreamUseCase;
         }
@@ -187,6 +192,7 @@
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
     int64_t                    mDynamicRangeProfile;
+    int32_t                    mColorSpace;
     int64_t                    mStreamUseCase;
     int                        mTimestampBase;
     int                        mMirrorMode;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 05124c0..4995dc4 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -537,6 +537,8 @@
         case ACAMERA_CONTROL_ENABLE_ZSL:
         case ACAMERA_CONTROL_EXTENDED_SCENE_MODE:
         case ACAMERA_CONTROL_ZOOM_RATIO:
+        case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
+        case ACAMERA_CONTROL_AUTOFRAMING:
         case ACAMERA_EDGE_MODE:
         case ACAMERA_FLASH_MODE:
         case ACAMERA_HOT_PIXEL_MODE:
@@ -585,6 +587,7 @@
     ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
     ANDROID_CONTROL_AE_PRECAPTURE_ID,
     ANDROID_CONTROL_AF_TRIGGER_ID,
+    ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
     ANDROID_DEMOSAIC_MODE,
     ANDROID_EDGE_STRENGTH,
     ANDROID_FLASH_FIRING_POWER,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 729182e..7388678 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -209,7 +209,8 @@
  * Query the capabilities of a camera device. These capabilities are
  * immutable for a given camera.
  *
- * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ * <p>See {@link ACameraMetadata} document and <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details.</p>
  *
  * <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
  * characteristics.</p>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index b331d50..a9f53dd 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -96,9 +96,12 @@
     /**
      * The tag identifying the entry.
      *
-     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * <p> It is one of the values defined in
+     * <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * , and defines how the
      * entry should be interpreted and which parts of the API provide it.
-     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * for more details. </p>
      */
     uint32_t tag;
 
@@ -141,9 +144,11 @@
     /**
      * The tag identifying the entry.
      *
-     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * <p> It is one of the values defined in <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * , and defines how the
      * entry should be interpreted and which parts of the API provide it.
-     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * for more details. </p>
      */
     uint32_t tag;
 
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 0d156a5..152b786 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -74,6 +74,8 @@
     ACAMERA_HEIC_INFO,
     ACAMERA_AUTOMOTIVE,
     ACAMERA_AUTOMOTIVE_LENS,
+    ACAMERA_EXTENSION,
+    ACAMERA_JPEGR,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -119,6 +121,8 @@
     ACAMERA_HEIC_INFO_START        = ACAMERA_HEIC_INFO         << 16,
     ACAMERA_AUTOMOTIVE_START       = ACAMERA_AUTOMOTIVE        << 16,
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
+    ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
+    ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -2044,6 +2048,175 @@
      */
     ACAMERA_CONTROL_ZOOM_RATIO =                                // float
             ACAMERA_CONTROL_START + 47,
+    /**
+     * <p>The desired CaptureRequest settings override with which certain keys are
+     * applied earlier so that they can take effect sooner.</p>
+     *
+     * <p>Type: int32 (acamera_metadata_enum_android_control_settings_override_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>There are some CaptureRequest keys which can be applied earlier than others
+     * when controls within a CaptureRequest aren't required to take effect at the same time.
+     * One such example is zoom. Zoom can be applied at a later stage of the camera pipeline.
+     * As soon as the camera device receives the CaptureRequest, it can apply the requested
+     * zoom value onto an earlier request that's already in the pipeline, thus improves zoom
+     * latency.</p>
+     * <p>This key's value in the capture result reflects whether the controls for this capture
+     * are overridden "by" a newer request. This means that if a capture request turns on
+     * settings override, the capture result of an earlier request will contain the key value
+     * of ZOOM. On the other hand, if a capture request has settings override turned on,
+     * but all newer requests have it turned off, the key's value in the capture result will
+     * be OFF because this capture isn't overridden by a newer capture. In the two examples
+     * below, the capture results columns illustrate the settingsOverride values in different
+     * scenarios.</p>
+     * <p>Assuming the zoom settings override can speed up by 1 frame, below example illustrates
+     * the speed-up at the start of capture session:</p>
+     * <pre><code>Camera session created
+     * Request 1 (zoom=1.0x, override=ZOOM) -&gt;
+     * Request 2 (zoom=1.2x, override=ZOOM) -&gt;
+     * Request 3 (zoom=1.4x, override=ZOOM) -&gt;  Result 1 (zoom=1.2x, override=ZOOM)
+     * Request 4 (zoom=1.6x, override=ZOOM) -&gt;  Result 2 (zoom=1.4x, override=ZOOM)
+     * Request 5 (zoom=1.8x, override=ZOOM) -&gt;  Result 3 (zoom=1.6x, override=ZOOM)
+     *                                      -&gt;  Result 4 (zoom=1.8x, override=ZOOM)
+     *                                      -&gt;  Result 5 (zoom=1.8x, override=OFF)
+     * </code></pre>
+     * <p>The application can turn on settings override and use zoom as normal. The example
+     * shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom
+     * values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4).</p>
+     * <p>The application must make sure the settings override doesn't interfere with user
+     * journeys requiring simultaneous application of all controls in CaptureRequest on the
+     * requested output targets. For example, if the application takes a still capture using
+     * CameraCaptureSession#capture, and the repeating request immediately sets a different
+     * zoom value using override, the inflight still capture could have its zoom value
+     * overwritten unexpectedly.</p>
+     * <p>So the application is strongly recommended to turn off settingsOverride when taking
+     * still/burst captures, and turn it back on when there is only repeating viewfinder
+     * request and no inflight still/burst captures.</p>
+     * <p>Below is the example demonstrating the transitions in and out of the
+     * settings override:</p>
+     * <pre><code>Request 1 (zoom=1.0x, override=OFF)
+     * Request 2 (zoom=1.2x, override=OFF)
+     * Request 3 (zoom=1.4x, override=ZOOM)  -&gt; Result 1 (zoom=1.0x, override=OFF)
+     * Request 4 (zoom=1.6x, override=ZOOM)  -&gt; Result 2 (zoom=1.4x, override=ZOOM)
+     * Request 5 (zoom=1.8x, override=OFF)   -&gt; Result 3 (zoom=1.6x, override=ZOOM)
+     *                                       -&gt; Result 4 (zoom=1.6x, override=OFF)
+     *                                       -&gt; Result 5 (zoom=1.8x, override=OFF)
+     * </code></pre>
+     * <p>This example shows that:</p>
+     * <ul>
+     * <li>The application "ramps in" settings override by setting the control to ZOOM.
+     * In the example, request #3 enables zoom settings override. Because the camera device
+     * can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the
+     * value specified in request #3.</li>
+     * <li>The application "ramps out" of settings override by setting the control to OFF. In
+     * the example, request #5 changes the override to OFF. Because request #4's zoom
+     * takes effect in result #3, result #4's zoom remains the same until new value takes
+     * effect in result #5.</li>
+     * </ul>
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE =                         // int32 (acamera_metadata_enum_android_control_settings_override_t)
+            ACAMERA_CONTROL_START + 49,
+    /**
+     * <p>List of available settings overrides supported by the camera device that can
+     * be used to speed up certain controls.</p>
+     *
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>When not all controls within a CaptureRequest are required to take effect
+     * at the same time on the outputs, the camera device may apply certain request keys sooner
+     * to improve latency. This list contains such supported settings overrides. Each settings
+     * override corresponds to a set of CaptureRequest keys that can be sped up when applying.</p>
+     * <p>A supported settings override can be passed in via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_SETTINGS_OVERRIDE">CaptureRequest#CONTROL_SETTINGS_OVERRIDE</a>, and the
+     * CaptureRequest keys corresponding to the override are applied as soon as possible, not
+     * bound by per-frame synchronization. See ACAMERA_CONTROL_SETTINGS_OVERRIDE for the
+     * CaptureRequest keys for each override.</p>
+     * <p>OFF is always included in this list.</p>
+     *
+     * @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
+     */
+    ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES =              // int32[n]
+            ACAMERA_CONTROL_START + 50,
+    /**
+     * <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>Auto-framing is a special mode provided by the camera device to dynamically crop, zoom
+     * or pan the camera feed to try to ensure that the people in a scene occupy a reasonable
+     * portion of the viewport. It is primarily designed to support video calling in
+     * situations where the user isn't directly in front of the device, especially for
+     * wide-angle cameras.
+     * ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO in CaptureResult will be used
+     * to denote the coordinates of the auto-framed region.
+     * Zoom and video stabilization controls are disabled when auto-framing is enabled. The 3A
+     * regions must map the screen coordinates into the scaler crop returned from the capture
+     * result instead of using the active array sensor.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_AUTOFRAMING =                               // byte (acamera_metadata_enum_android_control_autoframing_t)
+            ACAMERA_CONTROL_START + 52,
+    /**
+     * <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
+     *
+     * @see ACAMERA_CONTROL_AUTOFRAMING
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_available_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Will be <code>false</code> if auto-framing is not available.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE =                     // byte (acamera_metadata_enum_android_control_autoframing_available_t)
+            ACAMERA_CONTROL_START + 53,
+    /**
+     * <p>Current state of auto-framing.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_state_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>When the camera doesn't have auto-framing available (i.e
+     * <code>ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE</code> == false) or it is not enabled (i.e
+     * <code>ACAMERA_CONTROL_AUTOFRAMING</code> == OFF), the state will always be INACTIVE.
+     * Other states indicate the current auto-framing state:</p>
+     * <ul>
+     * <li>When <code>ACAMERA_CONTROL_AUTOFRAMING</code> is set to ON, auto-framing will take
+     * place. While the frame is aligning itself to center the object (doing things like
+     * zooming in, zooming out or pan), the state will be FRAMING.</li>
+     * <li>When field of view is not being adjusted anymore and has reached a stable state, the
+     * state will be CONVERGED.</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AUTOFRAMING
+     * @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE =                         // byte (acamera_metadata_enum_android_control_autoframing_state_t)
+            ACAMERA_CONTROL_START + 54,
     ACAMERA_CONTROL_END,
 
     /**
@@ -3520,6 +3693,26 @@
      */
     ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP =      // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
             ACAMERA_REQUEST_START + 19,
+    /**
+     * <p>A list of all possible color space profiles supported by a camera device.</p>
+     *
+     * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A color space profile is a combination of a color space, an image format, and a dynamic range
+     * profile. If a camera does not support the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+     * capability, the dynamic range profile will always be
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>. Camera clients can
+     * use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+     * a color space.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP =        // int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)
+            ACAMERA_REQUEST_START + 21,
     ACAMERA_REQUEST_END,
 
     /**
@@ -7322,6 +7515,145 @@
             ACAMERA_AUTOMOTIVE_LENS_START,
     ACAMERA_AUTOMOTIVE_LENS_END,
 
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+     * <p>If the camera device supports Jpeg/R, it will support the same stream combinations with
+     * Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported
+     * by the device is determined by the device's hardware level and capabilities.</p>
+     * <p>All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats.
+     * Configuring JPEG and Jpeg/R streams at the same time is not supported.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEGR format as OUTPUT only.</p>
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS =      // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)
+            ACAMERA_JPEGR_START,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This should correspond to the frame duration when only that
+     * stream is active, with all processing (typically in android.*.mode)
+     * set to either OFF or FAST.</p>
+     * <p>When multiple streams are used in a request, the minimum frame
+     * duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS =        // int64[4*n]
+            ACAMERA_JPEGR_START + 1,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A stall duration is how much extra time would get added
+     * to the normal minimum frame duration for a repeating request
+     * that has streams with non-zero stall.</p>
+     * <p>This functions similarly to
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for Jpeg/R
+     * streams.</p>
+     * <p>All Jpeg/R output stream formats may have a nonzero stall
+     * duration.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS =            // int64[4*n]
+            ACAMERA_JPEGR_START + 2,
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEG_R format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)
+            ACAMERA_JPEGR_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 5,
+    ACAMERA_JPEGR_END,
+
 } acamera_metadata_tag_t;
 
 /**
@@ -8475,6 +8807,87 @@
 
 } acamera_metadata_enum_android_control_extended_scene_mode_t;
 
+// ACAMERA_CONTROL_SETTINGS_OVERRIDE
+typedef enum acamera_metadata_enum_acamera_control_settings_override {
+    /**
+     * <p>No keys are applied sooner than the other keys when applying CaptureRequest
+     * settings to the camera device. This is the default value.</p>
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE_OFF                            = 0,
+
+    /**
+     * <p>Zoom related keys are applied sooner than the other keys in the CaptureRequest. The
+     * zoom related keys are:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_ZOOM_RATIO</li>
+     * <li>ACAMERA_SCALER_CROP_REGION</li>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * </ul>
+     * <p>Even though ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+     * and ACAMERA_CONTROL_AF_REGIONS are not directly zoom related, applications
+     * typically scale these regions together with ACAMERA_SCALER_CROP_REGION to have a
+     * consistent mapping within the current field of view. In this aspect, they are
+     * related to ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE_ZOOM                           = 1,
+
+} acamera_metadata_enum_android_control_settings_override_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING
+typedef enum acamera_metadata_enum_acamera_control_autoframing {
+    /**
+     * <p>Disable autoframing.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_OFF                                  = 0,
+
+    /**
+     * <p>Enable autoframing to keep people in the frame's field of view.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_ON                                   = 1,
+
+    /**
+     * <p>Automatically select ON or OFF based on the system level preferences.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_AUTO                                 = 2,
+
+} acamera_metadata_enum_android_control_autoframing_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
+typedef enum acamera_metadata_enum_acamera_control_autoframing_available {
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE_FALSE                      = 0,
+
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE_TRUE                       = 1,
+
+} acamera_metadata_enum_android_control_autoframing_available_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING_STATE
+typedef enum acamera_metadata_enum_acamera_control_autoframing_state {
+    /**
+     * <p>Auto-framing is inactive.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_INACTIVE                       = 0,
+
+    /**
+     * <p>Auto-framing is in process - either zooming in, zooming out or pan is taking place.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_FRAMING                        = 1,
+
+    /**
+     * <p>Auto-framing has reached a stable state (frame/fov is not being adjusted). The state
+     * may transition back to FRAMING if the scene changes.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_CONVERGED                      = 2,
+
+} acamera_metadata_enum_android_control_autoframing_state_t;
+
 
 
 // ACAMERA_EDGE_MODE
@@ -9448,6 +9861,99 @@
 
 } acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
 
+// ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_color_space_profiles_map {
+    /**
+     * <p>Default value, when not explicitly specified. The Camera device will choose the color
+     * space to employ.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED   = -1,
+
+    /**
+     * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB          = 0,
+
+    /**
+     * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB   = 1,
+
+    /**
+     * <p>RGB color space scRGB-nl standardized as IEC 61966-2-2:2003.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2,
+
+    /**
+     * <p>RGB color space scRGB standardized as IEC 61966-2-2:2003.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB
+                                                                      = 3,
+
+    /**
+     * <p>RGB color space BT.709 standardized as Rec. ITU-R BT.709-5.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709         = 4,
+
+    /**
+     * <p>RGB color space BT.2020 standardized as Rec. ITU-R BT.2020-1.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020        = 5,
+
+    /**
+     * <p>RGB color space DCI-P3 standardized as SMPTE RP 431-2-2007.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3        = 6,
+
+    /**
+     * <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3    = 7,
+
+    /**
+     * <p>RGB color space NTSC, 1953 standard.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953     = 8,
+
+    /**
+     * <p>RGB color space SMPTE C.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C       = 9,
+
+    /**
+     * <p>RGB color space Adobe RGB (1998).</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB     = 10,
+
+    /**
+     * <p>RGB color space ProPhoto RGB standardized as ROMM RGB ISO 22028-2:2013.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11,
+
+    /**
+     * <p>RGB color space ACES standardized as SMPTE ST 2065-1:2012.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES          = 12,
+
+    /**
+     * <p>RGB color space ACEScg standardized as Academy S-2014-004.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG        = 13,
+
+    /**
+     * <p>XYZ color space CIE XYZ. This color space assumes standard illuminant D50 as its white
+     * point.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ       = 14,
+
+    /**
+     * <p>Lab color space CIE L<em>a</em>b*. This color space uses CIE XYZ D50 as a profile conversion
+     * space.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB       = 15,
+
+} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
 
 // ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
 typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
@@ -10626,6 +11132,26 @@
 
 
 
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT       = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t;
+
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations_maximum_resolution {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t;
+
+
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..bae8706
--- /dev/null
+++ b/camera/tests/fuzzer/Android.bp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_camera_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
+cc_defaults {
+    name: "camera_defaults",
+    static_libs: [
+        "libcamera_client",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libutils",
+        "liblog",
+        "libbinder",
+        "libgui",
+        "libcamera_metadata",
+        "libnativewindow",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "camera_fuzzer",
+    srcs: [
+        "camera_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2CaptureRequest_fuzzer",
+    srcs: [
+        "camera_c2CaptureRequest_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2ConcurrentCamera_fuzzer",
+    srcs: [
+        "camera_c2ConcurrentCamera_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2SubmitInfo_fuzzer",
+    srcs: [
+        "camera_c2SubmitInfo_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2SessionConfiguration_fuzzer",
+    srcs: [
+        "camera_c2SessionConfiguration_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2OutputConfiguration_fuzzer",
+    srcs: [
+        "camera_c2OutputConfiguration_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_vendorTagDescriptor_fuzzer",
+    srcs: [
+        "camera_vendorTagDescriptor_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+    include_dirs: [
+        "system/media/camera/tests",
+        "system/media/private/camera/include",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_Parameters_fuzzer",
+    srcs: [
+        "camera_Parameters_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_SessionStats_fuzzer",
+    srcs: [
+        "camera_SessionStats_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_captureResult_fuzzer",
+    srcs: [
+        "camera_captureResult_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
diff --git a/camera/tests/fuzzer/README.md b/camera/tests/fuzzer/README.md
new file mode 100644
index 0000000..c07ac04
--- /dev/null
+++ b/camera/tests/fuzzer/README.md
@@ -0,0 +1,74 @@
+# Fuzzers for libcamera_client
+
+## Plugin Design Considerations
+The fuzzer plugins for libcamera_client are designed based on the understanding of the
+source code and try to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzers.
+
+libcamera_client supports the following parameters:
+1. Command (parameter name: `cmd`)
+2. Video Buffer Mode (parameter name: `videoBufferMode`)
+3. Preview Callback Flag (parameter name: `previewCallbackFlag`)
+4. Facing (parameter name: `facing`)
+5. Orientation (parameter name: `orientation`)
+6. Format (parameter name: `format`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cmd` | 0.`CAMERA_CMD_START_SMOOTH_ZOOM` 1.`CAMERA_CMD_STOP_SMOOTH_ZOOM` 3.`CAMERA_CMD_SET_DISPLAY_ORIENTATION` 4.`CAMERA_CMD_ENABLE_SHUTTER_SOUND` 5.`CAMERA_CMD_PLAY_RECORDING_SOUND` 6.`CAMERA_CMD_START_FACE_DETECTION` 7.`CAMERA_CMD_STOP_FACE_DETECTION` 8.`CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG` 9.`CAMERA_CMD_PING` 10.`CAMERA_CMD_SET_VIDEO_BUFFER_COUNT` 11.`CAMERA_CMD_SET_VIDEO_FORMAT`| Value obtained from FuzzedDataProvider|
+| `videoBufferMode` |0. `ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV` 1.`ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA` 2.`ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE`| Value obtained from FuzzedDataProvider|
+| `previewCallbackFlag` | 0. `CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK` 1.`CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK` 2.`CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK` 3.`CAMERA_FRAME_CALLBACK_FLAG_NOOP` 4.`CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER` 5.`CAMERA_FRAME_CALLBACK_FLAG_CAMERA` 6.`CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER`| Value obtained from FuzzedDataProvider|
+| `facing` | 0.`android::hardware::CAMERA_FACING_BACK` 1.`android::hardware::CAMERA_FACING_FRONT`| Value obtained from FuzzedDataProvider|
+| `orientation` | 0.`0` 1.`90` 2.`180`3.`270`| Value obtained from FuzzedDataProvider|
+| `format` | 0.`CameraParameters::PIXEL_FORMAT_YUV422SP` 1.`CameraParameters::PIXEL_FORMAT_YUV420SP` 2.`CameraParameters::PIXEL_FORMAT_YUV422I` 3.`CameraParameters::PIXEL_FORMAT_YUV420P` 4.`CameraParameters::PIXEL_FORMAT_RGB565` 5.`CameraParameters::PIXEL_FORMAT_RGBA8888` 6.`CameraParameters::PIXEL_FORMAT_JPEG` 7.`CameraParameters::PIXEL_FORMAT_BAYER_RGGB` 8.`CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE`| Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugins are always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugins feed the entire input data to the module.
+This ensures that the plugins tolerate any kind of input (empty, huge,
+malformed, etc) and dont `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_fuzzer, camera2CaptureRequest_fuzzer, camera2ConcurrentCamera_fuzzer, camera2SubmitInfo_fuzzer, camera2SessionConfiguration_fuzzer, camera2OutputConfiguration_fuzzer, vendorTagDescriptor_fuzzer, cameraParameters_fuzzer, cameraSessionStats_fuzzer and captureResult_fuzzer binaries
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) camera_fuzzer
+  $ mm -j$(nproc) camera_c2CaptureRequest_fuzzer
+  $ mm -j$(nproc) camera_c2ConcurrentCamera_fuzzer
+  $ mm -j$(nproc) camera_c2SubmitInfo_fuzzer
+  $ mm -j$(nproc) camera_c2SessionConfiguration_fuzzer
+  $ mm -j$(nproc) camera_c2OutputConfiguration_fuzzer
+  $ mm -j$(nproc) camera_vendorTagDescriptor_fuzzer
+  $ mm -j$(nproc) camera_Parameters_fuzzer
+  $ mm -j$(nproc) camera_SessionStats_fuzzer
+  $ mm -j$(nproc) camera_captureResult_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_fuzzer/camera_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2CaptureRequest_fuzzer/camera_c2CaptureRequest_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2ConcurrentCamera_fuzzer/camera_c2ConcurrentCamera_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SubmitInfo_fuzzer/camera_c2SubmitInfo_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SessionConfiguration_fuzzer/camera_c2SessionConfiguration_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2OutputConfiguration_fuzzer/camera_c2OutputConfiguration_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_vendorTagDescriptor_fuzzer/camera_vendorTagDescriptor_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_Parameters_fuzzer/camera_Parameters_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_SessionStats_fuzzer/camera_SessionStats_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_captureResult_fuzzer/camera_captureResult_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/camera/tests/fuzzer/camera2common.h b/camera/tests/fuzzer/camera2common.h
new file mode 100644
index 0000000..14a1b1b
--- /dev/null
+++ b/camera/tests/fuzzer/camera2common.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef CAMERA2COMMON_H
+#define CAMERA2COMMON_H
+
+#include <binder/Parcel.h>
+
+using namespace android;
+
+template <class type>
+void invokeReadWriteNullParcel(type* obj) {
+    Parcel* parcelNull = nullptr;
+    obj->writeToParcel(parcelNull);
+    obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteNullParcelsp(sp<type> obj) {
+    Parcel* parcelNull = nullptr;
+    obj->writeToParcel(parcelNull);
+    obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteParcel(type* obj) {
+    Parcel* parcel = new Parcel();
+    obj->writeToParcel(parcel);
+    parcel->setDataPosition(0);
+    obj->readFromParcel(parcel);
+    delete parcel;
+}
+
+template <class type>
+void invokeReadWriteParcelsp(sp<type> obj) {
+    Parcel* parcel = new Parcel();
+    obj->writeToParcel(parcel);
+    parcel->setDataPosition(0);
+    obj->readFromParcel(parcel);
+    delete parcel;
+}
+
+#endif  // CAMERA2COMMON_H
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
new file mode 100644
index 0000000..45b3526
--- /dev/null
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraParameters.h>
+#include <CameraParameters2.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
+
+using namespace std;
+using namespace android;
+
+string kValidFormats[] = {
+        CameraParameters::PIXEL_FORMAT_YUV422SP,      CameraParameters::PIXEL_FORMAT_YUV420SP,
+        CameraParameters::PIXEL_FORMAT_YUV422I,       CameraParameters::PIXEL_FORMAT_YUV420P,
+        CameraParameters::PIXEL_FORMAT_RGB565,        CameraParameters::PIXEL_FORMAT_RGBA8888,
+        CameraParameters::PIXEL_FORMAT_JPEG,          CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+        CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE};
+
+class CameraParametersFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~CameraParametersFuzzer() {
+        delete mCameraParameters;
+        delete mCameraParameters2;
+    }
+
+  private:
+    void invokeCameraParameters();
+    template <class type>
+    void initCameraParameters(type** obj);
+    template <class type>
+    void cameraParametersCommon(type* obj);
+    CameraParameters* mCameraParameters = nullptr;
+    CameraParameters2* mCameraParameters2 = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+};
+
+template <class type>
+void CameraParametersFuzzer::initCameraParameters(type** obj) {
+    if (mFDP->ConsumeBool()) {
+        *obj = new type();
+    } else {
+        string params;
+        if (mFDP->ConsumeBool()) {
+            int32_t width = mFDP->ConsumeIntegral<int32_t>();
+            int32_t height = mFDP->ConsumeIntegral<int32_t>();
+            int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
+            int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
+            params = CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
+            params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_PREVIEW_FPS_RANGE;
+                params += '=' + to_string(minFps) + ',' + to_string(maxFps) + ';';
+            }
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
+                params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+            }
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
+                params += '=' + mFDP->PickValueInArray(kValidFormats) + ';';
+            }
+        } else {
+            params = mFDP->ConsumeRandomLengthString();
+        }
+        *obj = new type(String8(params.c_str()));
+    }
+}
+
+template <class type>
+void CameraParametersFuzzer::cameraParametersCommon(type* obj) {
+    Vector<Size> supportedPreviewSizes;
+    obj->getSupportedPreviewSizes(supportedPreviewSizes);
+    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
+    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
+    obj->setPreviewSize(previewWidth, previewHeight);
+    obj->getPreviewSize(&previewWidth, &previewHeight);
+
+    Vector<Size> supportedVideoSizes;
+    obj->getSupportedVideoSizes(supportedVideoSizes);
+    if (supportedVideoSizes.size() != 0) {
+        int32_t videoWidth, videoHeight, preferredVideoWidth, preferredVideoHeight;
+        if (mFDP->ConsumeBool()) {
+            int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(0, supportedVideoSizes.size() - 1);
+            obj->setVideoSize(supportedVideoSizes[idx].width, supportedVideoSizes[idx].height);
+        } else {
+            videoWidth = mFDP->ConsumeIntegral<int32_t>();
+            videoHeight = mFDP->ConsumeIntegral<int32_t>();
+            obj->setVideoSize(videoWidth, videoHeight);
+        }
+        obj->getVideoSize(&videoWidth, &videoHeight);
+        obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth, &preferredVideoHeight);
+    }
+
+    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
+    obj->setPreviewFrameRate(fps);
+    obj->getPreviewFrameRate();
+    string previewFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                               : mFDP->ConsumeRandomLengthString();
+    obj->setPreviewFormat(previewFormat.c_str());
+
+    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
+    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
+    Vector<Size> supportedPictureSizes;
+    obj->setPictureSize(pictureWidth, pictureHeight);
+    obj->getPictureSize(&pictureWidth, &pictureHeight);
+    obj->getSupportedPictureSizes(supportedPictureSizes);
+    string pictureFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                               : mFDP->ConsumeRandomLengthString();
+    obj->setPictureFormat(pictureFormat.c_str());
+    obj->getPictureFormat();
+
+    if (mFDP->ConsumeBool()) {
+        obj->dump();
+    } else {
+        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+        Vector<String16> args = {};
+        obj->dump(fd, args);
+        close(fd);
+    }
+}
+
+void CameraParametersFuzzer::invokeCameraParameters() {
+    initCameraParameters<CameraParameters>(&mCameraParameters);
+    cameraParametersCommon<CameraParameters>(mCameraParameters);
+    initCameraParameters<CameraParameters2>(&mCameraParameters2);
+    cameraParametersCommon<CameraParameters2>(mCameraParameters2);
+
+    int32_t minFPS, maxFPS;
+    mCameraParameters->getPreviewFpsRange(&minFPS, &maxFPS);
+    string format = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                        : mFDP->ConsumeRandomLengthString();
+    mCameraParameters->previewFormatToEnum(format.c_str());
+    mCameraParameters->isEmpty();
+    Vector<int32_t> formats;
+    mCameraParameters->getSupportedPreviewFormats(formats);
+}
+
+void CameraParametersFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeCameraParameters();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    CameraParametersFuzzer cameraParametersFuzzer;
+    cameraParametersFuzzer.process(data, size);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
new file mode 100644
index 0000000..5866aaf
--- /dev/null
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraSessionStats.h>
+#include <binder/Parcel.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    CameraStreamStats* cameraStreamStats = nullptr;
+    Parcel parcelCamStreamStats;
+
+    if (fdp.ConsumeBool()) {
+        cameraStreamStats = new CameraStreamStats();
+    } else {
+        int32_t width = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(width);
+        }
+        int32_t height = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(height);
+        }
+        int32_t format = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(format);
+        }
+        float maxPreviewFps = fdp.ConsumeFloatingPoint<float>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeFloat(maxPreviewFps);
+        }
+        int32_t dataSpace = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(dataSpace);
+        }
+        int64_t usage = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(usage);
+        }
+        int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(requestCount);
+        }
+        int64_t errorCount = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(errorCount);
+        }
+        int32_t maxHalBuffers = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(maxHalBuffers);
+        }
+        int32_t maxAppBuffers = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(maxAppBuffers);
+        }
+        int32_t dynamicRangeProfile = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(dynamicRangeProfile);
+        }
+        int32_t streamUseCase = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(streamUseCase);
+        }
+        int32_t colorSpace = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(colorSpace);
+        }
+
+        cameraStreamStats = new CameraStreamStats(width, height, format, maxPreviewFps, dataSpace,
+                                                  usage, maxHalBuffers, maxAppBuffers,
+                                                  dynamicRangeProfile, streamUseCase, colorSpace);
+    }
+
+    parcelCamStreamStats.setDataPosition(0);
+    cameraStreamStats->readFromParcel(&parcelCamStreamStats);
+    invokeReadWriteNullParcel<CameraStreamStats>(cameraStreamStats);
+    invokeReadWriteParcel<CameraStreamStats>(cameraStreamStats);
+
+    CameraSessionStats* cameraSessionStats = nullptr;
+    Parcel parcelCamSessionStats;
+
+    if (fdp.ConsumeBool()) {
+        cameraSessionStats = new CameraSessionStats();
+    } else {
+        string camId = fdp.ConsumeRandomLengthString();
+        String16 cameraId(camId.c_str());
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeString16(cameraId);
+        }
+        int32_t facing = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(facing);
+        }
+        int32_t newCameraState = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(newCameraState);
+        }
+        string name = fdp.ConsumeRandomLengthString();
+        String16 clientName(name.c_str());
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeString16(clientName);
+        }
+        int32_t apiLevel = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(apiLevel);
+        }
+        bool isNdk = fdp.ConsumeBool();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeBool(isNdk);
+        }
+        int32_t latencyMs = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(latencyMs);
+        }
+
+        cameraSessionStats = new CameraSessionStats(cameraId, facing, newCameraState, clientName,
+                                                    apiLevel, isNdk, latencyMs);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int32_t internalReconfigure = fdp.ConsumeIntegral<int32_t>();
+        parcelCamSessionStats.writeInt32(internalReconfigure);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+        parcelCamSessionStats.writeInt64(requestCount);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int64_t resultErrorCount = fdp.ConsumeIntegral<int64_t>();
+        parcelCamSessionStats.writeInt64(resultErrorCount);
+    }
+
+    if (fdp.ConsumeBool()) {
+        bool deviceError = fdp.ConsumeBool();
+        parcelCamSessionStats.writeBool(deviceError);
+    }
+
+    parcelCamSessionStats.setDataPosition(0);
+    cameraSessionStats->readFromParcel(&parcelCamSessionStats);
+    invokeReadWriteNullParcel<CameraSessionStats>(cameraSessionStats);
+    invokeReadWriteParcel<CameraSessionStats>(cameraSessionStats);
+
+    delete cameraStreamStats;
+    delete cameraSessionStats;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
new file mode 100644
index 0000000..06215a5
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraMetadata.h>
+#include <camera2/CaptureRequest.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/view/Surface.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kNonZeroRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 1;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    sp<CaptureRequest> captureRequest = new CaptureRequest();
+    Parcel parcelCamCaptureReq;
+
+    size_t physicalCameraSettingsSize =
+            fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(physicalCameraSettingsSize);
+    }
+
+    for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
+        string id = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeString16(String16(id.c_str()));
+        }
+        CameraMetadata cameraMetadata;
+        if (fdp.ConsumeBool()) {
+            cameraMetadata = CameraMetadata();
+        } else {
+            size_t entryCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+            size_t dataCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+            cameraMetadata = CameraMetadata(entryCapacity, dataCapacity);
+        }
+        captureRequest->mPhysicalCameraSettings.push_back({id, cameraMetadata});
+        if (fdp.ConsumeBool()) {
+            cameraMetadata.writeToParcel(&parcelCamCaptureReq);
+        }
+    }
+
+    captureRequest->mIsReprocess = fdp.ConsumeBool();
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(captureRequest->mIsReprocess);
+    }
+
+    captureRequest->mSurfaceConverted = fdp.ConsumeBool();
+    if (fdp.ConsumeBool() && captureRequest->mSurfaceConverted) {
+        // 0-sized array
+        parcelCamCaptureReq.writeInt32(0);
+    }
+
+    if (!captureRequest->mSurfaceConverted) {
+        size_t surfaceListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(surfaceListSize);
+        }
+        for (size_t idx = 0; idx < surfaceListSize; ++idx) {
+            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                    fdp.ConsumeIntegral<uint32_t>() /* width */,
+                    fdp.ConsumeIntegral<uint32_t>() /* height */,
+                    fdp.ConsumeIntegral<int32_t>() /* format */,
+                    fdp.ConsumeIntegral<int32_t>() /* flags */);
+            if (surfaceControl) {
+                sp<Surface> surface = surfaceControl->getSurface();
+                captureRequest->mSurfaceList.push_back(surface);
+                if (fdp.ConsumeBool()) {
+                    view::Surface surfaceShim;
+                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
+                    surfaceShim.graphicBufferProducer = surface->getIGraphicBufferProducer();
+                    surfaceShim.writeToParcel(&parcelCamCaptureReq);
+                }
+                surface.clear();
+            }
+            composerClient.clear();
+            surfaceControl.clear();
+        }
+    }
+
+    size_t indexListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(indexListSize);
+    }
+
+    for (size_t idx = 0; idx < indexListSize; ++idx) {
+        int32_t streamIdx = fdp.ConsumeIntegral<int32_t>();
+        int32_t surfaceIdx = fdp.ConsumeIntegral<int32_t>();
+        captureRequest->mStreamIdxList.push_back(streamIdx);
+        captureRequest->mSurfaceIdxList.push_back(surfaceIdx);
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(streamIdx);
+        }
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(surfaceIdx);
+        }
+    }
+
+    invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    invokeReadWriteNullParcelsp<CaptureRequest>(captureRequest);
+    parcelCamCaptureReq.setDataPosition(0);
+    captureRequest->readFromParcel(&parcelCamCaptureReq);
+    captureRequest.clear();
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
new file mode 100644
index 0000000..12b5bc3
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/ConcurrentCamera.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    ConcurrentCameraIdCombination camIdCombination;
+
+    if (fdp.ConsumeBool()) {
+        size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+        for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
+            string concurrentCameraId = fdp.ConsumeRandomLengthString();
+            camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+        }
+    }
+
+    invokeReadWriteNullParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+    invokeReadWriteParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+
+    CameraIdAndSessionConfiguration camIdAndSessionConfig;
+
+    if (fdp.ConsumeBool()) {
+        camIdAndSessionConfig.mCameraId = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            camIdAndSessionConfig.mSessionConfiguration = SessionConfiguration();
+        } else {
+            int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+            int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+            int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+            int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+            camIdAndSessionConfig.mSessionConfiguration =
+                    SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+        }
+    }
+
+    invokeReadWriteNullParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+    invokeReadWriteParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..51ac4e8
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    OutputConfiguration* outputConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        outputConfiguration = new OutputConfiguration();
+    } else {
+        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+        string phyCameraId = fdp.ConsumeRandomLengthString();
+        String16 physicalCameraId(phyCameraId.c_str());
+        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+        bool isShared = fdp.ConsumeBool();
+
+        if (fdp.ConsumeBool()) {
+            sp<IGraphicBufferProducer> iGBP = nullptr;
+            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                    fdp.ConsumeIntegral<uint32_t>() /* width */,
+                    fdp.ConsumeIntegral<uint32_t>() /* height */,
+                    fdp.ConsumeIntegral<int32_t>() /* format */,
+                    fdp.ConsumeIntegral<int32_t>() /* flags */);
+            if (surfaceControl) {
+                sp<Surface> surface = surfaceControl->getSurface();
+                iGBP = surface->getIGraphicBufferProducer();
+            }
+            outputConfiguration = new OutputConfiguration(iGBP, rotation, physicalCameraId,
+                                                          surfaceSetID, isShared);
+            iGBP.clear();
+            composerClient.clear();
+            surfaceControl.clear();
+        } else {
+            size_t iGBPSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+            vector<sp<IGraphicBufferProducer>> iGBPs;
+            for (size_t idx = 0; idx < iGBPSize; ++idx) {
+                sp<IGraphicBufferProducer> iGBP = nullptr;
+                sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+                sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                        static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                        fdp.ConsumeIntegral<uint32_t>() /* width */,
+                        fdp.ConsumeIntegral<uint32_t>() /* height */,
+                        fdp.ConsumeIntegral<int32_t>() /* format */,
+                        fdp.ConsumeIntegral<int32_t>() /* flags */);
+                if (surfaceControl) {
+                    sp<Surface> surface = surfaceControl->getSurface();
+                    iGBP = surface->getIGraphicBufferProducer();
+                    iGBPs.push_back(iGBP);
+                }
+                iGBP.clear();
+                composerClient.clear();
+                surfaceControl.clear();
+            }
+            outputConfiguration = new OutputConfiguration(iGBPs, rotation, physicalCameraId,
+                                                          surfaceSetID, isShared);
+        }
+    }
+
+    outputConfiguration->getRotation();
+    outputConfiguration->getSurfaceSetID();
+    outputConfiguration->getSurfaceType();
+    outputConfiguration->getWidth();
+    outputConfiguration->getHeight();
+    outputConfiguration->isDeferred();
+    outputConfiguration->isShared();
+    outputConfiguration->getPhysicalCameraId();
+
+    OutputConfiguration outputConfiguration2;
+    outputConfiguration->gbpsEqual(outputConfiguration2);
+    outputConfiguration->sensorPixelModesUsedEqual(outputConfiguration2);
+    outputConfiguration->gbpsLessThan(outputConfiguration2);
+    outputConfiguration->sensorPixelModesUsedLessThan(outputConfiguration2);
+    outputConfiguration->getGraphicBufferProducers();
+    sp<IGraphicBufferProducer> gbp;
+    outputConfiguration->addGraphicProducer(gbp);
+    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration);
+    invokeReadWriteParcel<OutputConfiguration>(outputConfiguration);
+    delete outputConfiguration;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..b2de95d
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    SessionConfiguration* sessionConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        sessionConfiguration = new SessionConfiguration();
+    } else {
+        int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+        int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+        int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+        int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+        sessionConfiguration =
+                new SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+    }
+
+    sessionConfiguration->getInputWidth();
+    sessionConfiguration->getInputHeight();
+    sessionConfiguration->getInputFormat();
+    sessionConfiguration->getOperatingMode();
+
+    OutputConfiguration* outputConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        outputConfiguration = new OutputConfiguration();
+        sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+    } else {
+        sp<IGraphicBufferProducer> iGBP = nullptr;
+        sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+        sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()),
+                fdp.ConsumeIntegral<uint32_t>(), fdp.ConsumeIntegral<uint32_t>(),
+                fdp.ConsumeIntegral<int32_t>(), fdp.ConsumeIntegral<int32_t>());
+        if (surfaceControl) {
+            sp<Surface> surface = surfaceControl->getSurface();
+            iGBP = surface->getIGraphicBufferProducer();
+            surface.clear();
+        }
+        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+        string phyCameraId = fdp.ConsumeRandomLengthString();
+        String16 physicalCameraId(phyCameraId.c_str());
+        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+        bool isShared = fdp.ConsumeBool();
+        outputConfiguration =
+                new OutputConfiguration(iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+        sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+    }
+
+    sessionConfiguration->getOutputConfigurations();
+    SessionConfiguration sessionConfiguration2;
+    sessionConfiguration->outputsEqual(sessionConfiguration2);
+    sessionConfiguration->outputsLessThan(sessionConfiguration2);
+    sessionConfiguration->inputIsMultiResolution();
+
+    invokeReadWriteNullParcel<SessionConfiguration>(sessionConfiguration);
+    invokeReadWriteParcel<SessionConfiguration>(sessionConfiguration);
+
+    delete sessionConfiguration;
+    delete outputConfiguration;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
new file mode 100644
index 0000000..dc40b0f
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/SubmitInfo.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    SubmitInfo submitInfo;
+    submitInfo.mRequestId = fdp.ConsumeIntegral<int32_t>();
+    submitInfo.mLastFrameNumber = fdp.ConsumeIntegral<int64_t>();
+    invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
new file mode 100644
index 0000000..03cf9c4
--- /dev/null
+++ b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CaptureResult.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::impl;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    PhysicalCaptureResultInfo* physicalCaptureResultInfo = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        physicalCaptureResultInfo = new PhysicalCaptureResultInfo();
+    } else {
+        string camId = fdp.ConsumeRandomLengthString();
+        String16 cameraId(camId.c_str());
+        CameraMetadata cameraMetadata = CameraMetadata();
+        physicalCaptureResultInfo = new PhysicalCaptureResultInfo(cameraId, cameraMetadata);
+    }
+
+    invokeReadWriteParcel<PhysicalCaptureResultInfo>(physicalCaptureResultInfo);
+
+    CaptureResult* captureResult = new CaptureResult();
+
+    if (fdp.ConsumeBool()) {
+        captureResult->mMetadata = CameraMetadata();
+    }
+    if (fdp.ConsumeBool()) {
+        captureResult->mResultExtras = CaptureResultExtras();
+        string errCamId = fdp.ConsumeRandomLengthString();
+        String16 errCameraId(errCamId.c_str());
+        captureResult->mResultExtras.errorPhysicalCameraId = errCameraId;
+        captureResult->mResultExtras.isValid();
+        invokeReadWriteNullParcel<CaptureResultExtras>(&(captureResult->mResultExtras));
+    }
+    if (fdp.ConsumeBool()) {
+        size_t physicalMetadatasSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        for (size_t idx = 0; idx < physicalMetadatasSize; ++idx) {
+            captureResult->mPhysicalMetadatas.push_back(PhysicalCaptureResultInfo());
+        }
+    }
+
+    invokeReadWriteNullParcel<CaptureResult>(captureResult);
+    invokeReadWriteParcel<CaptureResult>(captureResult);
+    CaptureResult captureResult2(*captureResult);
+    CaptureResult captureResult3(move(captureResult2));
+
+    delete captureResult;
+    delete physicalCaptureResultInfo;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
new file mode 100644
index 0000000..d41e6b6
--- /dev/null
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -0,0 +1,405 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <Camera.h>
+#include <CameraBase.h>
+#include <CameraMetadata.h>
+#include <CameraParameters.h>
+#include <CameraUtils.h>
+#include <VendorTagDescriptor.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <utils/Log.h>
+#include "camera2common.h"
+#include <android/hardware/ICameraService.h>
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr int32_t kFrameRateMin = 1;
+constexpr int32_t kFrameRateMax = 120;
+constexpr int32_t kCamIdMin = 0;
+constexpr int32_t kCamIdMax = 1;
+constexpr int32_t kNumMin = 0;
+constexpr int32_t kNumMax = 1024;
+constexpr int32_t kMemoryDealerSize = 1000;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+constexpr int32_t kValidCMD[] = {CAMERA_CMD_START_SMOOTH_ZOOM,
+                                 CAMERA_CMD_STOP_SMOOTH_ZOOM,
+                                 CAMERA_CMD_SET_DISPLAY_ORIENTATION,
+                                 CAMERA_CMD_ENABLE_SHUTTER_SOUND,
+                                 CAMERA_CMD_PLAY_RECORDING_SOUND,
+                                 CAMERA_CMD_START_FACE_DETECTION,
+                                 CAMERA_CMD_STOP_FACE_DETECTION,
+                                 CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG,
+                                 CAMERA_CMD_PING,
+                                 CAMERA_CMD_SET_VIDEO_BUFFER_COUNT,
+                                 CAMERA_CMD_SET_VIDEO_FORMAT};
+
+constexpr int32_t kValidVideoBufferMode[] = {ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV,
+                                             ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA,
+                                             ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE};
+
+constexpr int32_t kValidPreviewCallbackFlag[] = {
+        CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK,    CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK,
+        CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK,  CAMERA_FRAME_CALLBACK_FLAG_NOOP,
+        CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER,      CAMERA_FRAME_CALLBACK_FLAG_CAMERA,
+        CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER};
+
+constexpr int32_t kValidFacing[] = {android::hardware::CAMERA_FACING_BACK,
+                                    android::hardware::CAMERA_FACING_FRONT};
+
+constexpr int32_t kValidOrientation[] = {0, 90, 180, 270};
+
+class TestCameraListener : public CameraListener {
+  public:
+    virtual ~TestCameraListener() = default;
+
+    void notify(int32_t /*msgType*/, int32_t /*ext1*/, int32_t /*ext2*/) override { return; };
+    void postData(int32_t /*msgType*/, const sp<IMemory>& /*dataPtr*/,
+                  camera_frame_metadata_t* /*metadata*/) override {
+        return;
+    };
+    void postDataTimestamp(nsecs_t /*timestamp*/, int32_t /*msgType*/,
+                           const sp<IMemory>& /*dataPtr*/) override {
+        return;
+    };
+    void postRecordingFrameHandleTimestamp(nsecs_t /*timestamp*/,
+                                           native_handle_t* /*handle*/) override {
+        return;
+    };
+    void postRecordingFrameHandleTimestampBatch(
+            const std::vector<nsecs_t>& /*timestamps*/,
+            const std::vector<native_handle_t*>& /*handles*/) override {
+        return;
+    };
+};
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~CameraFuzzer() {
+        delete mCameraMetadata;
+        mComposerClient.clear();
+        mSurfaceControl.clear();
+        mSurface.clear();
+        mCamera.clear();
+        mMemoryDealer.clear();
+        mIMem.clear();
+        mCameraListener.clear();
+        mCameraService.clear();
+    }
+
+  private:
+    bool initCamera();
+    void initCameraMetadata();
+    void invokeCamera();
+    void invokeCameraUtils();
+    void invokeCameraBase();
+    void invokeCameraMetadata();
+    void invokeSetParameters();
+    sp<Camera> mCamera = nullptr;
+    CameraMetadata* mCameraMetadata = nullptr;
+    sp<SurfaceComposerClient> mComposerClient = nullptr;
+    sp<SurfaceControl> mSurfaceControl = nullptr;
+    sp<Surface> mSurface = nullptr;
+    sp<MemoryDealer> mMemoryDealer = nullptr;
+    sp<IMemory> mIMem = nullptr;
+    sp<TestCameraListener> mCameraListener = nullptr;
+    sp<ICameraService> mCameraService = nullptr;
+    sp<ICamera> cameraDevice = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+
+    // CameraClient interface
+    void notifyCallback(int32_t, int32_t, int32_t) override { return; };
+    void dataCallback(int32_t, const sp<IMemory>&, camera_frame_metadata_t*) override { return; };
+    void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory>&) override { return; };
+    void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t*) override { return; };
+    void recordingFrameHandleCallbackTimestampBatch(const std::vector<nsecs_t>&,
+                                                    const std::vector<native_handle_t*>&) override {
+        return;
+    };
+};
+
+bool CameraFuzzer::initCamera() {
+    ProcessState::self()->startThreadPool();
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.camera"));
+    mCameraService = interface_cast<ICameraService>(binder);
+    mCameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
+                            String16("CAMERAFUZZ"), hardware::ICameraService::USE_CALLING_UID,
+                            hardware::ICameraService::USE_CALLING_PID,
+                            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+                            /*overrideToPortrait*/false, &cameraDevice);
+    mCamera = Camera::create(cameraDevice);
+    if (!mCamera) {
+        return false;
+    }
+    return true;
+}
+
+void CameraFuzzer::invokeSetParameters() {
+    String8 s = mCamera->getParameters();
+    CameraParameters params(s);
+    int32_t width = mFDP->ConsumeIntegral<int32_t>();
+    int32_t height = mFDP->ConsumeIntegral<int32_t>();
+    params.setVideoSize(width, height);
+    int32_t frameRate = mFDP->ConsumeIntegralInRange<int32_t>(kFrameRateMin, kFrameRateMax);
+    params.setPreviewFrameRate(frameRate);
+    mCamera->setParameters(params.flatten());
+}
+
+void CameraFuzzer::invokeCamera() {
+    if (!initCamera()) {
+        return;
+    }
+
+    int32_t cameraId = mFDP->ConsumeIntegralInRange<int32_t>(kCamIdMin, kCamIdMax);
+    Camera::getNumberOfCameras();
+    CameraInfo cameraInfo;
+    cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<int>();
+    cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                                 : mFDP->ConsumeIntegral<int>();
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    mCamera->reconnect();
+
+    mComposerClient = new SurfaceComposerClient;
+    mSurfaceControl = mComposerClient->createSurface(
+            static_cast<String8>(mFDP->ConsumeRandomLengthString().c_str()) /* name */,
+            mFDP->ConsumeIntegral<uint32_t>() /* width */,
+            mFDP->ConsumeIntegral<uint32_t>() /* height */,
+            mFDP->ConsumeIntegral<int32_t>() /* format */,
+            mFDP->ConsumeIntegral<int32_t>() /* flags */);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setPreviewTarget(mSurface->getIGraphicBufferProducer());
+        mCamera->startPreview();
+        mCamera->stopPreview();
+        mCamera->previewEnabled();
+        mCamera->startRecording();
+        mCamera->stopRecording();
+    }
+
+    mCamera->lock();
+    mCamera->unlock();
+    mCamera->autoFocus();
+    mCamera->cancelAutoFocus();
+
+    int32_t msgType = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->takePicture(msgType);
+    invokeSetParameters();
+    int32_t cmd;
+    if (mFDP->ConsumeBool()) {
+        cmd = mFDP->PickValueInArray(kValidCMD);
+    } else {
+        cmd = mFDP->ConsumeIntegral<int32_t>();
+    }
+    int32_t arg1 = mFDP->ConsumeIntegral<int32_t>();
+    int32_t arg2 = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->sendCommand(cmd, arg1, arg2);
+
+    int32_t videoBufferMode = mFDP->PickValueInArray(kValidVideoBufferMode);
+    mCamera->setVideoBufferMode(videoBufferMode);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setVideoTarget(mSurface->getIGraphicBufferProducer());
+    }
+    mCameraListener = sp<TestCameraListener>::make();
+    mCamera->setListener(mCameraListener);
+    int32_t previewCallbackFlag;
+    if (mFDP->ConsumeBool()) {
+        previewCallbackFlag = mFDP->PickValueInArray(kValidPreviewCallbackFlag);
+    } else {
+        previewCallbackFlag = mFDP->ConsumeIntegral<int32_t>();
+    }
+    mCamera->setPreviewCallbackFlags(previewCallbackFlag);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setPreviewCallbackTarget(mSurface->getIGraphicBufferProducer());
+    }
+
+    mCamera->getRecordingProxy();
+    int32_t mode = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->setAudioRestriction(mode);
+    mCamera->getGlobalAudioRestriction();
+    mCamera->recordingEnabled();
+
+    mMemoryDealer = new MemoryDealer(kMemoryDealerSize);
+    mIMem = mMemoryDealer->allocate(kMemoryDealerSize);
+    mCamera->releaseRecordingFrame(mIMem);
+
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    mCamera->releaseRecordingFrameHandle(handle);
+
+    int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
+    int32_t ext = mFDP->ConsumeIntegral<int32_t>();
+    int32_t ext2 = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->notifyCallback(msgTypeNC, ext, ext2);
+
+    int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
+    mCamera->dataCallbackTimestamp(timestamp, msgTypeNC, mIMem);
+    mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+}
+
+void CameraFuzzer::invokeCameraUtils() {
+    CameraMetadata staticMetadata;
+    int32_t orientVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                            : mFDP->ConsumeIntegral<int32_t>();
+    uint8_t facingVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<uint8_t>();
+    staticMetadata.update(ANDROID_SENSOR_ORIENTATION, &orientVal, 1);
+    staticMetadata.update(ANDROID_LENS_FACING, &facingVal, 1);
+    int32_t transform = 0;
+    CameraUtils::getRotationTransform(
+            staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */, &transform /*out*/);
+    CameraUtils::isCameraServiceDisabled();
+}
+
+void CameraFuzzer::invokeCameraBase() {
+    CameraInfo cameraInfo;
+    cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<int>();
+    cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                                 : mFDP->ConsumeIntegral<int>();
+    invokeReadWriteParcel<CameraInfo>(&cameraInfo);
+
+    CameraStatus* cameraStatus = nullptr;
+
+    if (mFDP->ConsumeBool()) {
+        cameraStatus = new CameraStatus();
+    } else {
+        string cid = mFDP->ConsumeRandomLengthString();
+        String8 id(cid.c_str());
+        int32_t status = mFDP->ConsumeIntegral<int32_t>();
+        size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        vector<String8> unavailSubIds;
+        for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
+            string subId = mFDP->ConsumeRandomLengthString();
+            String8 unavailSubId(subId.c_str());
+            unavailSubIds.push_back(unavailSubId);
+        }
+        string clientPkg = mFDP->ConsumeRandomLengthString();
+        String8 clientPackage(clientPkg.c_str());
+        cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+    }
+
+    invokeReadWriteParcel<CameraStatus>(cameraStatus);
+    delete cameraStatus;
+}
+
+void CameraFuzzer::initCameraMetadata() {
+    if (mFDP->ConsumeBool()) {
+        mCameraMetadata = new CameraMetadata();
+    } else {
+        size_t entryCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        size_t dataCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        mCameraMetadata = new CameraMetadata(entryCapacity, dataCapacity);
+    }
+}
+
+void CameraFuzzer::invokeCameraMetadata() {
+    initCameraMetadata();
+
+    const camera_metadata_t* metadataBuffer = nullptr;
+    if (mFDP->ConsumeBool()) {
+        metadataBuffer = mCameraMetadata->getAndLock();
+    }
+
+    mCameraMetadata->entryCount();
+    mCameraMetadata->isEmpty();
+    mCameraMetadata->bufferSize();
+    mCameraMetadata->sort();
+
+    uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+    uint8_t dataUint8 = mFDP->ConsumeIntegral<uint8_t>();
+    int32_t dataInt32 = mFDP->ConsumeIntegral<int32_t>();
+    int64_t dataInt64 = mFDP->ConsumeIntegral<int64_t>();
+    float dataFloat = mFDP->ConsumeFloatingPoint<float>();
+    double dataDouble = mFDP->ConsumeFloatingPoint<double>();
+    camera_metadata_rational dataRational;
+    dataRational.numerator = mFDP->ConsumeIntegral<int32_t>();
+    dataRational.denominator = mFDP->ConsumeIntegral<int32_t>();
+    string dataStr = mFDP->ConsumeRandomLengthString();
+    String8 dataString(dataStr.c_str());
+    size_t data_count = 1;
+    mCameraMetadata->update(tag, &dataUint8, data_count);
+    mCameraMetadata->update(tag, &dataInt32, data_count);
+    mCameraMetadata->update(tag, &dataFloat, data_count);
+    mCameraMetadata->update(tag, &dataInt64, data_count);
+    mCameraMetadata->update(tag, &dataRational, data_count);
+    mCameraMetadata->update(tag, &dataDouble, data_count);
+    mCameraMetadata->update(tag, dataString);
+
+    uint32_t tagExists = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->exists(tagExists);
+
+    uint32_t tagFind = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->find(tagFind);
+
+    uint32_t tagErase = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->erase(tagErase);
+
+    mCameraMetadata->unlock(metadataBuffer);
+    std::vector<int32_t> tagsRemoved;
+    uint64_t vendorId = mFDP->ConsumeIntegral<uint64_t>();
+    mCameraMetadata->removePermissionEntries(vendorId, &tagsRemoved);
+
+    string name = mFDP->ConsumeRandomLengthString();
+    VendorTagDescriptor vTags;
+    uint32_t tagName = mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->getTagFromName(name.c_str(), &vTags, &tagName);
+
+    invokeReadWriteNullParcel<CameraMetadata>(mCameraMetadata);
+    invokeReadWriteParcel<CameraMetadata>(mCameraMetadata);
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    mCameraMetadata->dump(fd, verbosity, indentation);
+
+    CameraMetadata metadataCopy(mCameraMetadata->release());
+    CameraMetadata otherCameraMetadata;
+    mCameraMetadata->swap(otherCameraMetadata);
+    close(fd);
+}
+
+void CameraFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeCamera();
+    invokeCameraUtils();
+    invokeCameraBase();
+    invokeCameraMetadata();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    sp<CameraFuzzer> cameraFuzzer = new CameraFuzzer();
+    cameraFuzzer->process(data, size);
+    cameraFuzzer.clear();
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
new file mode 100644
index 0000000..e14d9ce
--- /dev/null
+++ b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VendorTagDescriptor.h>
+#include <binder/Parcel.h>
+#include <camera_metadata_tests_fake_vendor.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <system/camera_vendor_tags.h>
+
+#include <camera_metadata_hidden.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kVendorTagDescriptorId = -1;
+
+extern "C" {
+
+static int zero_get_tag_count(const vendor_tag_ops_t*) {
+    return 0;
+}
+
+static int default_get_tag_count(const vendor_tag_ops_t*) {
+    return VENDOR_TAG_COUNT_ERR;
+}
+
+static void default_get_all_tags(const vendor_tag_ops_t*, uint32_t*) {}
+
+static const char* default_get_section_name(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_SECTION_NAME_ERR;
+}
+
+static const char* default_get_tag_name(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_TAG_NAME_ERR;
+}
+
+static int default_get_tag_type(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_TAG_TYPE_ERR;
+}
+
+} /*extern "C"*/
+
+static void FillWithDefaults(vendor_tag_ops_t* vOps) {
+    vOps->get_tag_count = default_get_tag_count;
+    vOps->get_all_tags = default_get_all_tags;
+    vOps->get_section_name = default_get_section_name;
+    vOps->get_tag_name = default_get_tag_name;
+    vOps->get_tag_type = default_get_tag_type;
+}
+
+class VendorTagDescriptorFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~VendorTagDescriptorFuzzer() {
+        mVendorTagDescriptor.clear();
+        mVendorTagDescriptorCache.clear();
+    }
+
+  private:
+    void initVendorTagDescriptor();
+    void invokeVendorTagDescriptor();
+    void invokeVendorTagDescriptorCache();
+    void invokeVendorTagErrorConditions();
+    sp<VendorTagDescriptor> mVendorTagDescriptor = nullptr;
+    sp<VendorTagDescriptorCache> mVendorTagDescriptorCache = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+};
+
+void VendorTagDescriptorFuzzer::initVendorTagDescriptor() {
+    if (mFDP->ConsumeBool()) {
+        mVendorTagDescriptor = new VendorTagDescriptor();
+    } else {
+        const vendor_tag_ops_t* vOps = &fakevendor_ops;
+        VendorTagDescriptor::createDescriptorFromOps(vOps, mVendorTagDescriptor);
+    }
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptor() {
+    initVendorTagDescriptor();
+
+    sp<VendorTagDescriptor> vdesc = new VendorTagDescriptor();
+    vdesc->copyFrom(*mVendorTagDescriptor);
+    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
+    VendorTagDescriptor::getGlobalVendorTagDescriptor();
+
+    int32_t tagCount = mVendorTagDescriptor->getTagCount();
+    if (tagCount > 0) {
+        uint32_t tagArray[tagCount];
+        mVendorTagDescriptor->getTagArray(tagArray);
+        uint32_t tag;
+        for (int32_t i = 0; i < tagCount; ++i) {
+            tag = tagArray[i];
+            get_local_camera_metadata_section_name_vendor_id(tag, kVendorTagDescriptorId);
+            get_local_camera_metadata_tag_name_vendor_id(tag, kVendorTagDescriptorId);
+            get_local_camera_metadata_tag_type_vendor_id(tag, kVendorTagDescriptorId);
+            mVendorTagDescriptor->getSectionIndex(tag);
+        }
+        mVendorTagDescriptor->getAllSectionNames();
+    }
+
+    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
+    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
+    uint32_t lookupTag;
+    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    mVendorTagDescriptor->dump(fd, verbosity, indentation);
+
+    invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+    vdesc.clear();
+    close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptorCache() {
+    mVendorTagDescriptorCache = new VendorTagDescriptorCache();
+    uint64_t id = mFDP->ConsumeIntegral<uint64_t>();
+    initVendorTagDescriptor();
+
+    mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor);
+    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
+    VendorTagDescriptorCache::getGlobalVendorTagCache();
+    sp<VendorTagDescriptor> tagDesc;
+    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
+
+    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
+    if (tagCount > 0) {
+        uint32_t tagArray[tagCount];
+        mVendorTagDescriptorCache->getTagArray(tagArray, id);
+        uint32_t tag;
+        for (int32_t i = 0; i < tagCount; ++i) {
+            tag = tagArray[i];
+            get_local_camera_metadata_section_name_vendor_id(tag, id);
+            get_local_camera_metadata_tag_name_vendor_id(tag, id);
+            get_local_camera_metadata_tag_type_vendor_id(tag, id);
+        }
+    }
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
+
+    invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
+    VendorTagDescriptorCache::isVendorCachePresent(id);
+    mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors();
+    mVendorTagDescriptorCache->clearGlobalVendorTagCache();
+    tagDesc.clear();
+    close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagErrorConditions() {
+    sp<VendorTagDescriptor> vDesc;
+    vendor_tag_ops_t vOps;
+    FillWithDefaults(&vOps);
+    vOps.get_tag_count = zero_get_tag_count;
+
+    if (mFDP->ConsumeBool()) {
+        VendorTagDescriptor::createDescriptorFromOps(/*vOps*/ NULL, vDesc);
+    } else {
+        VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc);
+        int32_t tagCount = vDesc->getTagCount();
+        uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
+        uint32_t badTagArray[tagCount + 1];
+        vDesc->getTagArray(badTagArray);
+        vDesc->getSectionName(badTag);
+        vDesc->getTagName(badTag);
+        vDesc->getTagType(badTag);
+        VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+        VendorTagDescriptor::getGlobalVendorTagDescriptor();
+        VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc);
+        invokeReadWriteNullParcelsp<VendorTagDescriptor>(vDesc);
+        vDesc.clear();
+    }
+}
+
+void VendorTagDescriptorFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeVendorTagDescriptor();
+    invokeVendorTagDescriptorCache();
+    invokeVendorTagErrorConditions();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    VendorTagDescriptorFuzzer vendorTagDescriptorFuzzer;
+    vendorTagDescriptorFuzzer.process(data, size);
+    return 0;
+}
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index d866c18..d757cd6 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -88,6 +88,7 @@
 using android::Vector;
 using android::sp;
 using android::status_t;
+using android::SurfaceControl;
 
 using android::INVALID_OPERATION;
 using android::NAME_NOT_FOUND;
@@ -341,13 +342,20 @@
 static status_t prepareVirtualDisplay(
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
-        sp<IBinder>* pDisplayHandle) {
+        sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
             String8("ScreenRecorder"), false /*secure*/);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
-    t.setDisplayLayerStack(dpy, displayState.layerStack);
+    ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
+    t.setDisplayLayerStack(dpy, layerStack);
+    *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(gPhysicalDisplayId);
+    if (*mirrorRoot == nullptr) {
+        ALOGE("Failed to create a mirror for screenrecord");
+        return UNKNOWN_ERROR;
+    }
+    t.setLayerStack(*mirrorRoot, layerStack);
     t.apply();
 
     *pDisplayHandle = dpy;
@@ -738,6 +746,23 @@
     return num & ~1;
 }
 
+struct RecordingData {
+    sp<MediaCodec> encoder;
+    // Configure virtual display.
+    sp<IBinder> dpy;
+
+    sp<Overlay> overlay;
+
+    ~RecordingData() {
+        if (dpy != nullptr) SurfaceComposerClient::destroyDisplay(dpy);
+        if (overlay != nullptr) overlay->stop();
+        if (encoder != nullptr) {
+            encoder->stop();
+            encoder->release();
+        }
+    }
+};
+
 /*
  * Main "do work" start point.
  *
@@ -795,12 +820,12 @@
         gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
     }
 
+    RecordingData recordingData = RecordingData();
     // Configure and start the encoder.
-    sp<MediaCodec> encoder;
     sp<FrameOutput> frameOutput;
     sp<IGraphicBufferProducer> encoderInputSurface;
     if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
-        err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
+        err = prepareEncoder(displayMode.refreshRate, &recordingData.encoder, &encoderInputSurface);
 
         if (err != NO_ERROR && !gSizeSpecified) {
             // fallback is defined for landscape; swap if we're in portrait
@@ -813,7 +838,8 @@
                         gVideoWidth, gVideoHeight, newWidth, newHeight);
                 gVideoWidth = newWidth;
                 gVideoHeight = newHeight;
-                err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
+                err = prepareEncoder(displayMode.refreshRate, &recordingData.encoder,
+                                      &encoderInputSurface);
             }
         }
         if (err != NO_ERROR) return err;
@@ -840,13 +866,11 @@
 
     // Configure optional overlay.
     sp<IGraphicBufferProducer> bufferProducer;
-    sp<Overlay> overlay;
     if (gWantFrameTime) {
         // Send virtual display frames to an external texture.
-        overlay = new Overlay(gMonotonicTime);
-        err = overlay->start(encoderInputSurface, &bufferProducer);
+        recordingData.overlay = new Overlay(gMonotonicTime);
+        err = recordingData.overlay->start(encoderInputSurface, &bufferProducer);
         if (err != NO_ERROR) {
-            if (encoder != NULL) encoder->release();
             return err;
         }
         if (gVerbose) {
@@ -858,11 +882,13 @@
         bufferProducer = encoderInputSurface;
     }
 
+    // We need to hold a reference to mirrorRoot during the entire recording to ensure it's not
+    // cleaned up by SurfaceFlinger. When the reference is dropped, SurfaceFlinger will delete
+    // the resource.
+    sp<SurfaceControl> mirrorRoot;
     // Configure virtual display.
-    sp<IBinder> dpy;
-    err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
+    err = prepareVirtualDisplay(displayState, bufferProducer, &recordingData.dpy, &mirrorRoot);
     if (err != NO_ERROR) {
-        if (encoder != NULL) encoder->release();
         return err;
     }
 
@@ -902,7 +928,6 @@
         case FORMAT_RAW_FRAMES: {
             rawFp = prepareRawOutput(fileName);
             if (rawFp == NULL) {
-                if (encoder != NULL) encoder->release();
                 return -1;
             }
             break;
@@ -943,7 +968,8 @@
         }
     } else {
         // Main encoder loop.
-        err = runEncoder(encoder, muxer, rawFp, display, dpy, displayState.orientation);
+        err = runEncoder(recordingData.encoder, muxer, rawFp, display, recordingData.dpy,
+                         displayState.orientation);
         if (err != NO_ERROR) {
             fprintf(stderr, "Encoder failed (err=%d)\n", err);
             // fall through to cleanup
@@ -957,9 +983,6 @@
 
     // Shut everything down, starting with the producer side.
     encoderInputSurface = NULL;
-    SurfaceComposerClient::destroyDisplay(dpy);
-    if (overlay != NULL) overlay->stop();
-    if (encoder != NULL) encoder->stop();
     if (muxer != NULL) {
         // If we don't stop muxer explicitly, i.e. let the destructor run,
         // it may hang (b/11050628).
@@ -967,7 +990,6 @@
     } else if (rawFp != stdout) {
         fclose(rawFp);
     }
-    if (encoder != NULL) encoder->release();
 
     return err;
 }
@@ -1108,7 +1130,8 @@
         "    Add additional information, such as a timestamp overlay, that is helpful\n"
         "    in videos captured to illustrate bugs.\n"
         "--time-limit TIME\n"
-        "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
+        "    Set the maximum recording time, in seconds.  Default is %d. Set to 0\n"
+        "    to remove the time limit.\n"
         "--display-id ID\n"
         "    specify the physical display ID to record. Default is the primary display.\n"
         "    see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
@@ -1147,13 +1170,13 @@
         { NULL,                 0,                  NULL, 0 }
     };
 
-    std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
-    if (!displayId) {
-        fprintf(stderr, "Failed to get ID for internal display\n");
+    const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+    if (ids.empty()) {
+        fprintf(stderr, "Failed to get ID for any displays\n");
         return 1;
     }
 
-    gPhysicalDisplayId = *displayId;
+    gPhysicalDisplayId = ids.front();
 
     while (true) {
         int optionIndex = 0;
@@ -1195,14 +1218,27 @@
             }
             break;
         case 't':
-            gTimeLimitSec = atoi(optarg);
-            if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
-                fprintf(stderr,
-                        "Time limit %ds outside acceptable range [1,%d]\n",
-                        gTimeLimitSec, kMaxTimeLimitSec);
+        {
+            char *next;
+            const int64_t timeLimitSec = strtol(optarg, &next, 10);
+            if (next == optarg || (*next != '\0' && *next != ' ')) {
+                fprintf(stderr, "Error parsing time limit argument\n");
                 return 2;
             }
+            if (timeLimitSec > std::numeric_limits<uint32_t>::max() || timeLimitSec < 0) {
+                fprintf(stderr,
+                        "Time limit %" PRIi64 "s outside acceptable range [0,%u] seconds\n",
+                        timeLimitSec, std::numeric_limits<uint32_t>::max());
+                return 2;
+            }
+            gTimeLimitSec = (timeLimitSec == 0) ?
+                    std::numeric_limits<uint32_t>::max() : timeLimitSec;
+            if (gVerbose) {
+                printf("Time limit set to %u seconds\n", gTimeLimitSec);
+                fflush(stdout);
+            }
             break;
+        }
         case 'u':
             gWantInfoScreen = true;
             gWantFrameTime = true;
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index beeab54..c43f8ce 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -411,7 +411,10 @@
         composerClient = new SurfaceComposerClient;
         CHECK_EQ(composerClient->initCheck(), (status_t)OK);
 
-        const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+        const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+        CHECK(!ids.empty());
+
+        const sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
         CHECK(display != nullptr);
 
         ui::DisplayMode mode;
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index 67c68e6..f042d5e 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -749,7 +749,10 @@
         composerClient = new SurfaceComposerClient;
         CHECK_EQ((status_t)OK, composerClient->initCheck());
 
-        const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+	const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+        CHECK(!ids.empty());
+
+        const android::sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
         CHECK(display != nullptr);
 
         ui::DisplayMode mode;
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 40b2392..1ffe801 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -318,7 +318,13 @@
     sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
     CHECK_EQ(composerClient->initCheck(), (status_t)OK);
 
-    const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+    const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+    if (ids.empty()) {
+        SLOGE("Failed to get ID for any displays\n");
+        return 1;
+    }
+
+    const sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
     CHECK(display != nullptr);
 
     ui::DisplayMode mode;
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index 7d045ac..2e0bfee 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -310,7 +310,7 @@
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportFrameworkMetrics(reportPluginMetrics());
+    if (mInitCheck == OK) reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index c06f09b..fd095b7 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -42,7 +42,7 @@
         }
         return type_names[attribute];
     }
-    
+
     static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
                                        "KEY_EXPIRED", "VENDOR_DEFINED",
                                        "SESSION_RECLAIMED"};
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 2632ebd..0044bac 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -280,7 +280,7 @@
               });
 
     logs.appendVector(allLogs);
-    return OK;
+    return toStatusT(err);
 }
 
 std::string GetExceptionMessage(const DrmStatus & err, const char *defaultMsg,
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 2d1f741..2732aa7 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -69,3 +69,58 @@
         "android.hardware.drm-service.clearkey",
     ],
 }
+
+cc_defaults {
+    name: "fuzz_aidl_clearkey_service_defaults",
+
+    srcs: [
+        "CreatePluginFactories.cpp",
+        "CryptoPlugin.cpp",
+        "DrmFactory.cpp",
+        "DrmPlugin.cpp",
+    ],
+
+    relative_install_path: "hw",
+
+    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+
+    include_dirs: ["frameworks/av/include"],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcrypto",
+        "liblog",
+        "libprotobuf-cpp-lite",
+        "libutils",
+        "android.hardware.drm-V1-ndk",
+    ],
+
+    static_libs: [
+        "android.hardware.common-V2-ndk",
+        "libclearkeybase_fuzz",
+    ],
+
+    local_include_dirs: ["include"],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+}
+
+cc_fuzz {
+    name: "android.hardware.drm-service.clearkey.aidl_fuzzer",
+    defaults: [
+        "fuzz_aidl_clearkey_service_defaults",
+        "service_fuzzer_defaults",
+    ],
+    static_libs: [
+        "liblog",
+    ],
+    srcs: ["fuzzer.cpp"],
+    fuzz_config: {
+        cc: [
+            "hamzeh@google.com",
+        ],
+    },
+}
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index ea51e9d..054eabd 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -177,7 +177,7 @@
     UNUSED(in_optionalParameters);
 
     KeyRequestType keyRequestType = KeyRequestType::UNKNOWN;
-    std::string defaultUrl("https://default.url");
+    std::string defaultUrl("");
 
     _aidl_return->request = {};
     _aidl_return->requestType = keyRequestType;
@@ -474,6 +474,7 @@
         return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
     }
 
+    Mutex::Autolock lock(mSecurityLevelLock);
     std::map<std::vector<uint8_t>, ::aidl::android::hardware::drm::SecurityLevel>::iterator itr =
             mSecurityLevel.find(sid);
     if (itr == mSecurityLevel.end()) {
@@ -1009,6 +1010,7 @@
         return Status::ERROR_DRM_SESSION_NOT_OPENED;
     }
 
+    Mutex::Autolock lock(mSecurityLevelLock);
     std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr = mSecurityLevel.find(sid);
     if (itr != mSecurityLevel.end()) {
         mSecurityLevel[sid] = level;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp b/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp
new file mode 100644
index 0000000..9ef331f
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "CreatePluginFactories.h"
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+using ::aidl::android::hardware::drm::clearkey::createDrmFactory;
+using ::aidl::android::hardware::drm::clearkey::DrmFactory;
+
+using android::fuzzService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    std::shared_ptr<DrmFactory> drmFactory = createDrmFactory();
+    fuzzService(drmFactory->asBinder().get(),  FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
index 25c05f0..7acc1b6 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
@@ -182,7 +182,8 @@
     std::map<std::string, std::vector<uint8_t>> mByteArrayProperties;
     std::map<std::string, std::vector<uint8_t>> mReleaseKeysMap;
     std::map<std::vector<uint8_t>, std::string> mPlaybackId;
-    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel;
+    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel
+        GUARDED_BY(mSecurityLevelLock);
     ::std::shared_ptr<IDrmPluginListener> mListener;
     SessionLibrary* mSessionLibrary;
     int64_t mOpenSessionOkCount;
@@ -201,6 +202,7 @@
 
     DeviceFiles mFileHandle;
     ::android::Mutex mSecureStopLock;
+    ::android::Mutex mSecurityLevelLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
 };
diff --git a/drm/mediadrm/plugins/clearkey/common/Android.bp b/drm/mediadrm/plugins/clearkey/common/Android.bp
index a6a5b28..6913df4 100644
--- a/drm/mediadrm/plugins/clearkey/common/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/common/Android.bp
@@ -97,3 +97,54 @@
         integer_overflow: true,
     },
 }
+
+cc_library_static {
+    name: "libclearkeydevicefiles-protos.common_fuzz",
+
+    proto: {
+        export_proto_headers: true,
+        type: "lite",
+    },
+    srcs: ["protos/DeviceFiles.proto"],
+}
+
+cc_library_static {
+    name: "libclearkeybase_fuzz",
+
+    srcs: [
+        "AesCtrDecryptor.cpp",
+        "Base64.cpp",
+        "Buffer.cpp",
+        "ClearKeyUUID.cpp",
+        "DeviceFiles.cpp",
+        "InitDataParser.cpp",
+        "JsonWebKey.cpp",
+        "MemoryFileSystem.cpp",
+        "Session.cpp",
+        "SessionLibrary.cpp",
+        "Utils.cpp",
+    ],
+
+    cflags: ["-Wall", "-Werror"],
+
+    include_dirs: ["frameworks/av/include"],
+
+    shared_libs: [
+        "libutils",
+        "libcrypto",
+    ],
+
+    whole_static_libs: [
+        "libjsmn",
+        "libclearkeydevicefiles-protos.common_fuzz",
+    ],
+
+    export_include_dirs: [
+        "include",
+        "include/clearkeydrm",
+    ],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 1019520..274a89a 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -414,8 +414,6 @@
     std::map<std::string, std::vector<uint8_t> > mByteArrayProperties;
     std::map<std::string, std::vector<uint8_t> > mReleaseKeysMap;
     std::map<std::vector<uint8_t>, std::string> mPlaybackId;
-    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel
-        GUARDED_BY(mSecurityLevelLock);
     sp<IDrmPluginListener> mListener;
     sp<IDrmPluginListener_V1_2> mListenerV1_2;
     SessionLibrary *mSessionLibrary;
@@ -436,6 +434,8 @@
     DeviceFiles mFileHandle;
     Mutex mSecureStopLock;
     Mutex mSecurityLevelLock;
+    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel
+        GUARDED_BY(mSecurityLevelLock);
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
 };
diff --git a/include/media/MmapStreamCallback.h b/include/media/MmapStreamCallback.h
index 31b8eb5..76ee6d7 100644
--- a/include/media/MmapStreamCallback.h
+++ b/include/media/MmapStreamCallback.h
@@ -37,12 +37,9 @@
 
     /**
      * The volume to be applied to the use case specified when opening the stream has changed
-     * \param[in] channels a channel mask containing all channels the volume should be applied to.
-     * \param[in] values the volume values to be applied to each channel. The size of the vector
-     *                   should correspond to the channel count retrieved with
-     *                   audio_channel_count_from_in_mask() or audio_channel_count_from_out_mask()
+     * \param[in] volume the new target volume
      */
-    virtual void onVolumeChanged(audio_channel_mask_t channels, Vector<float> values) = 0;
+    virtual void onVolumeChanged(float volume) = 0;
 
     /**
      * The device the stream is routed to/from has changed
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index d4025e5..11e1704 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -42,9 +42,15 @@
         mWidth(width), mHeight(height),
         mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
         mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
-        mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
-        mSize(hasData ? (bpp * width * height) : 0),
-        mIccSize(iccSize), mBitDepth(bitDepth) {
+        mRotationAngle(angle), mBytesPerPixel(bpp), mIccSize(iccSize),
+        mBitDepth(bitDepth) {
+            uint32_t multVal;
+            mRowBytes = __builtin_mul_overflow(bpp, width, &multVal) ? 0 : multVal;
+            mSize = __builtin_mul_overflow(multVal, height, &multVal) ? 0 : multVal;
+            if (hasData && (mRowBytes == 0 || mSize == 0)) {
+                ALOGE("Frame rowBytes/ size overflow %dx%d bpp %d", width, height, bpp);
+                android_errorWriteLog(0x534e4554, "233006499");
+            }
     }
 
     void init(const VideoFrame& copy, const void* iccData, size_t iccSize) {
@@ -85,8 +91,6 @@
     uint32_t mSize;            // Number of bytes of frame data
     uint32_t mIccSize;         // Number of bytes of ICC data
     uint32_t mBitDepth;        // number of bits per R / G / B channel
-
-    // Adding new items must be 64-bit aligned.
 };
 
 }; // namespace android
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index a22ec19..48a060b 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -88,4 +88,6 @@
         }
     ]
 
+    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
+    // platinum-postsubmit once issues in cuttlefish are fixed
 }
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index a2a79d5..257cf4e 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -23,3 +23,23 @@
     srcs: ["C2SoftAomDec.cpp"],
     static_libs: ["libaom"],
 }
+
+cc_library {
+    name: "libcodec2_soft_av1enc",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+    ],
+
+    static_libs: ["libaom"],
+
+    srcs: ["C2SoftAomEnc.cpp"],
+
+    export_include_dirs: ["."],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
new file mode 100644
index 0000000..f5620a4
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -0,0 +1,923 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAomEnc"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAomEnc.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
+
+#define DEFAULT_SPEED 10
+
+C2SoftAomEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+    : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
+                                        C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+    noPrivateBuffers();  // TODO: account for our buffers here
+    noInputReferences();
+    noOutputReferences();
+    noInputLatency();
+    noTimeStretch();
+    setDerivedInstance(this);
+
+    addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                         .withConstValue(new C2StreamUsageTuning::input(
+                                 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+                         .build());
+
+    addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                         .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                         .withFields({
+                                 C2F(mSize, width).inRange(2, 2048, 2),
+                                 C2F(mSize, height).inRange(2, 2048, 2),
+                         })
+                         .withSetter(SizeSetter)
+                         .build());
+
+    addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                         .withDefault(new C2StreamBitrateModeTuning::output(
+                                 0u, C2Config::BITRATE_VARIABLE))
+                         .withFields({C2F(mBitrateMode, value)
+                                              .oneOf({C2Config::BITRATE_CONST,
+                                                      C2Config::BITRATE_VARIABLE})})
+                         .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+                         .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+                         // TODO: More restriction?
+                         .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+                         .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+                         .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+                         .withFields({C2F(mSyncFramePeriod, value).any()})
+                         .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+                         .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+                         .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+                         .withSetter(BitrateSetter)
+                         .build());
+
+    addParameter(DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+                         .withConstValue(new C2StreamIntraRefreshTuning::output(
+                                 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+                         .build());
+
+    addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                         .withDefault(new C2StreamProfileLevelInfo::output(0u, PROFILE_AV1_0,
+                                                                           LEVEL_AV1_4_1))
+                         .withFields({
+                                 C2F(mProfileLevel, profile).equalTo(PROFILE_AV1_0),
+                                 C2F(mProfileLevel, level)
+                                    .oneOf({LEVEL_AV1_2, LEVEL_AV1_2_1, LEVEL_AV1_2_2,
+                                            LEVEL_AV1_2_3, LEVEL_AV1_3, LEVEL_AV1_3_1,
+                                            LEVEL_AV1_3_2, LEVEL_AV1_3_3, LEVEL_AV1_4,
+                                            LEVEL_AV1_4_1}),
+                         })
+                         .withSetter(ProfileLevelSetter)
+                         .build());
+
+    addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                         .withDefault(new C2StreamPixelFormatInfo::output(
+                              0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                         .withFields({C2F(mPixelFormat, value).oneOf({
+                                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+                                            HAL_PIXEL_FORMAT_YCBCR_420_888,
+                                            HAL_PIXEL_FORMAT_YCBCR_P010
+                                     })
+                         })
+                         .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                         .build());
+
+
+    addParameter(DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+                         .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+                         .withFields({C2F(mRequestSync, value).oneOf({C2_FALSE, C2_TRUE})})
+                         .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+                         .build());
+    addParameter(
+            DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                    .withDefault(new C2StreamColorAspectsInfo::input(
+                            0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                            C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                    .withFields(
+                            {C2F(mColorAspects, range)
+                                     .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                             C2F(mColorAspects, primaries)
+                                     .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                              C2Color::PRIMARIES_OTHER),
+                             C2F(mColorAspects, transfer)
+                                     .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                              C2Color::TRANSFER_OTHER),
+                             C2F(mColorAspects, matrix)
+                                     .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
+                    .withSetter(ColorAspectsSetter)
+                    .build());
+
+    addParameter(
+            DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                    .withDefault(new C2StreamColorAspectsInfo::output(
+                            0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                            C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                    .withFields(
+                            {C2F(mCodedColorAspects, range)
+                                     .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                             C2F(mCodedColorAspects, primaries)
+                                     .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                              C2Color::PRIMARIES_OTHER),
+                             C2F(mCodedColorAspects, transfer)
+                                     .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                              C2Color::TRANSFER_OTHER),
+                             C2F(mCodedColorAspects, matrix)
+                                     .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
+                    .withSetter(CodedColorAspectsSetter, mColorAspects)
+                    .build());
+}
+
+C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (me.v.value < 4096) {
+        me.set().value = 4096;
+    }
+    return res;
+}
+
+C2R C2SoftAomEnc::IntfImpl::SizeSetter(bool mayBlock,
+                                       const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                                       C2P<C2StreamPictureSizeInfo::input>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+        me.set().width = oldMe.v.width;
+    }
+    if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+        me.set().height = oldMe.v.height;
+    }
+    return res;
+}
+
+C2R C2SoftAomEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
+                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+    (void)mayBlock;
+    if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+        me.set().profile = PROFILE_AV1_0;
+    }
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        me.set().level = LEVEL_AV1_4_1;
+    }
+    return C2R::Ok();
+}
+
+uint32_t C2SoftAomEnc::IntfImpl::getSyncFramePeriod() const {
+    if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+        return 0;
+    }
+    double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+    return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+}
+
+C2R C2SoftAomEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
+                                               C2P<C2StreamColorAspectsInfo::input>& me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+        me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+        me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+        me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+        me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+}
+C2R C2SoftAomEnc::IntfImpl::CodedColorAspectsSetter(
+        bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+        const C2P<C2StreamColorAspectsInfo::input>& coded) {
+    (void)mayBlock;
+    me.set().range = coded.v.range;
+    me.set().primaries = coded.v.primaries;
+    me.set().transfer = coded.v.transfer;
+    me.set().matrix = coded.v.matrix;
+    return C2R::Ok();
+}
+
+C2SoftAomEnc::C2SoftAomEnc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mCodecContext(nullptr),
+      mCodecConfiguration(nullptr),
+      mCodecInterface(nullptr),
+      mStrideAlign(2),
+      mBitrateControlMode(AOM_VBR),
+      mMinQuantizer(0),
+      mMaxQuantizer(0),
+      mLastTimestamp(INT64_MAX),
+      mSignalledOutputEos(false),
+      mSignalledError(false),
+      mHeadersReceived(false),
+      mIs10Bit(false) {
+    ALOGV("Constructor");
+}
+
+C2SoftAomEnc::~C2SoftAomEnc() {
+    ALOGV("Destructor");
+    onRelease();
+}
+
+c2_status_t C2SoftAomEnc::onInit() {
+    return C2_OK;
+}
+
+c2_status_t C2SoftAomEnc::onStop() {
+    onRelease();
+    return C2_OK;
+}
+
+void C2SoftAomEnc::onReset() {
+    (void)onStop();
+}
+
+void C2SoftAomEnc::onRelease() {
+    if (mCodecContext) {
+        aom_codec_destroy(mCodecContext);
+        delete mCodecContext;
+        mCodecContext = nullptr;
+    }
+
+    if (mCodecConfiguration) {
+        delete mCodecConfiguration;
+        mCodecConfiguration = nullptr;
+    }
+
+    // this one is not allocated by us
+    mCodecInterface = nullptr;
+    mHeadersReceived = false;
+}
+
+c2_status_t C2SoftAomEnc::onFlush_sm() {
+    return onStop();
+}
+
+aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
+    aom_codec_err_t codec_return = AOM_CODEC_OK;
+
+    codec_return = aom_codec_control(mCodecContext, AOME_SET_CPUUSED, DEFAULT_SPEED);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ROW_MT, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_CDEF, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_TPL_MODEL, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_DELTAQ_MODE, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_ORDER_HINT, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_AQ_MODE, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MODE_COST_UPD_FREQ, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MV_COST_UPD_FREQ, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_PALETTE, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_OBMC, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_NOISE_SENSITIVITY, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_WARPED_MOTION, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_REF_FRAME_MVS, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_CFL_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_SMOOTH_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_ANGLE_DELTA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_FILTER_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_DISABLE_TRELLIS_QUANT, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DIST_WTD_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DIFF_WTD_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DUAL_FILTER, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTERINTRA_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTERINTRA_WEDGE, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTRA_EDGE_FILTER, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTRABC, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_MASKED_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_PAETH_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_QM, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_RECT_PARTITIONS, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_RESTORATION, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_TX64, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MAX_REFERENCE_FRAMES, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+BailOut:
+    return codec_return;
+}
+
+status_t C2SoftAomEnc::initEncoder() {
+    aom_codec_err_t codec_return;
+    status_t result = UNKNOWN_ERROR;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        // Fetch config
+        mSize = mIntf->getSize_l();
+        mBitrate = mIntf->getBitrate_l();
+        mBitrateMode = mIntf->getBitrateMode_l();
+        mFrameRate = mIntf->getFrameRate_l();
+        mIntraRefresh = mIntf->getIntraRefresh_l();
+        mRequestSync = mIntf->getRequestSync_l();
+    }
+
+
+    switch (mBitrateMode->value) {
+        case C2Config::BITRATE_CONST:
+            mBitrateControlMode = AOM_CBR;
+            break;
+        case C2Config::BITRATE_VARIABLE:
+            [[fallthrough]];
+        default:
+            mBitrateControlMode = AOM_VBR;
+            break;
+    }
+
+    mCodecInterface = aom_codec_av1_cx();
+    if (!mCodecInterface) goto CleanUp;
+
+    ALOGD("AOM: initEncoder. BRMode: %u. KF: %u. QP: %u - %u, 10Bit: %d",
+          (uint32_t)mBitrateControlMode,
+          mIntf->getSyncFramePeriod(), mMinQuantizer, mMaxQuantizer, mIs10Bit);
+
+    mCodecConfiguration = new aom_codec_enc_cfg_t;
+    if (!mCodecConfiguration) goto CleanUp;
+
+    codec_return = aom_codec_enc_config_default(mCodecInterface, mCodecConfiguration,
+                                                AOM_USAGE_REALTIME);  // RT mode
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("Error populating default configuration for aom encoder.");
+        goto CleanUp;
+    }
+
+    mCodecConfiguration->g_w = mSize->width;
+    mCodecConfiguration->g_h = mSize->height;
+    mCodecConfiguration->g_bit_depth = mIs10Bit ? AOM_BITS_10 : AOM_BITS_8;
+    mCodecConfiguration->g_input_bit_depth = mIs10Bit ? 10 : 8;
+
+
+    mCodecConfiguration->g_threads = 0;
+    mCodecConfiguration->g_error_resilient = 0;
+
+    // timebase unit is microsecond
+    // g_timebase is in seconds (i.e. 1/1000000 seconds)
+    mCodecConfiguration->g_timebase.num = 1;
+    mCodecConfiguration->g_timebase.den = 1000000;
+    // rc_target_bitrate is in kbps, mBitrate in bps
+    mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+    mCodecConfiguration->rc_end_usage = AOM_CBR;
+    // Disable frame drop - not allowed in MediaCodec now.
+    mCodecConfiguration->rc_dropframe_thresh = 0;
+    // Disable lagged encoding.
+    mCodecConfiguration->g_lag_in_frames = 0;
+
+    // Disable spatial resizing.
+    mCodecConfiguration->rc_resize_mode = 0;
+    // Single-pass mode.
+    mCodecConfiguration->g_pass = AOM_RC_ONE_PASS;
+
+    // Maximum key frame interval - for CBR boost to 3000
+    mCodecConfiguration->kf_max_dist = 3000;
+    // Encoder determines optimal key frame placement automatically.
+    mCodecConfiguration->kf_mode = AOM_KF_AUTO;
+    // Initial value of the buffer level in ms.
+    mCodecConfiguration->rc_buf_initial_sz = 500;
+    // Amount of data that the encoder should try to maintain in ms.
+    mCodecConfiguration->rc_buf_optimal_sz = 600;
+    // The amount of data that may be buffered by the decoding
+    // application in ms.
+    mCodecConfiguration->rc_buf_sz = 1000;
+
+    if (mBitrateControlMode == AOM_CBR) {
+        // Maximum amount of bits that can be subtracted from the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_undershoot_pct = 100;
+        // Maximum amount of bits that can be added to the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_overshoot_pct = 10;
+    } else {
+        // Maximum amount of bits that can be subtracted from the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_undershoot_pct = 100;
+        // Maximum amount of bits that can be added to the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_overshoot_pct = 25;
+    }
+
+    if (mIntf->getSyncFramePeriod() >= 0) {
+        mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
+        mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
+        mCodecConfiguration->kf_mode = AOM_KF_AUTO;
+    }
+    if (mMinQuantizer > 0) {
+        mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+    }
+    if (mMaxQuantizer > 0) {
+        mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+    }
+
+    mCodecContext = new aom_codec_ctx_t;
+    if (!mCodecContext) goto CleanUp;
+    codec_return = aom_codec_enc_init(mCodecContext, mCodecInterface, mCodecConfiguration,
+                                      mIs10Bit ? AOM_CODEC_USE_HIGHBITDEPTH : 0);
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("Error initializing aom encoder");
+        goto CleanUp;
+    }
+
+    codec_return = setupCodecParameters();
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("Error setting up codec parameters");
+        goto CleanUp;
+    }
+
+    mHeadersReceived = false;
+
+    {
+        uint32_t width = mSize->width;
+        uint32_t height = mSize->height;
+        if (((uint64_t)width * height) > ((uint64_t)INT32_MAX / 3)) {
+            ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
+        } else {
+            uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+            uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+            mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / (mIs10Bit? 1 : 2));
+            if (!mConversionBuffer.size()) {
+                ALOGE("Allocating conversion buffer failed.");
+            } else {
+                mNumInputFrames = -1;
+                return OK;
+            }
+        }
+    }
+
+CleanUp:
+    onRelease();
+    return result;
+}
+
+void C2SoftAomEnc::process(const std::unique_ptr<C2Work>& work,
+                           const std::shared_ptr<C2BlockPool>& pool) {
+    // Initialize output work
+    work->result = C2_OK;
+    work->workletsProcessed = 1u;
+    work->worklets.front()->output.flags = work->input.flags;
+
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    std::shared_ptr<const C2GraphicView> rView;
+    std::shared_ptr<C2Buffer> inputBuffer;
+    if (!work->input.buffers.empty()) {
+        inputBuffer = work->input.buffers[0];
+        rView = std::make_shared<const C2GraphicView>(
+                inputBuffer->data().graphicBlocks().front().map().get());
+        if (rView->error() != C2_OK) {
+            ALOGE("graphic view map err = %d", rView->error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    } else {
+        ALOGV("Empty input Buffer");
+        uint32_t flags = 0;
+        if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+    aom_image_t raw_frame;
+    const C2PlanarLayout& layout = rView->layout();
+    if (!mHeadersReceived) {
+        mIs10Bit = (layout.planes[layout.PLANE_Y].bitDepth == 10);
+
+        // Re-Initialize encoder
+        if (mCodecContext){
+            onRelease();
+        }
+    }
+    if (!mCodecContext && OK != initEncoder()) {
+        ALOGE("Failed to initialize encoder");
+        mSignalledError = true;
+        work->result = C2_CORRUPTED;
+        return;
+    }
+
+    if (!mHeadersReceived) {
+        Av1Config av1_config;
+        constexpr uint32_t header_length = 2048;
+        uint8_t header[header_length];
+        size_t header_bytes;
+        aom_fixed_buf_t* obu_sequence_header = aom_codec_get_global_headers(mCodecContext);
+        int ret = 1;
+        if (obu_sequence_header) {
+            if (get_av1config_from_obu(reinterpret_cast<const uint8_t*>(obu_sequence_header->buf),
+                                       obu_sequence_header->sz, false, &av1_config) == 0) {
+                ret = write_av1config(&av1_config, header_length, &header_bytes, header);
+
+            } else {
+                ALOGE("Can not get config");
+            }
+            free(obu_sequence_header->buf);
+            free(obu_sequence_header);
+        }
+
+        if (ret) {
+            ALOGE("Can not write config");
+            mSignalledError = true;
+            work->result = C2_NO_MEMORY;
+            work->workletsProcessed = 1u;
+            return;
+        }
+
+        mHeadersReceived = true;
+        std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                C2StreamInitDataInfo::output::AllocUnique(header_bytes, 0u);
+        if (!csd) {
+            ALOGE("CSD allocation failed");
+            mSignalledError = true;
+            work->result = C2_NO_MEMORY;
+            work->workletsProcessed = 1u;
+            return;
+        }
+        memcpy(csd->m.value, header, header_bytes);
+        work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+        ALOGV("CSD Produced of size %zu bytes", header_bytes);
+    }
+
+    const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front();
+    if (inBuffer.width() < mSize->width || inBuffer.height() < mSize->height) {
+        ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)", inBuffer.width(), mSize->width,
+              inBuffer.height(), mSize->height);
+        mSignalledError = true;
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+
+    uint32_t width = mSize->width;
+    uint32_t height = mSize->height;
+    if (width > 0x8000 || height > 0x8000) {
+        ALOGE("Image too big: %u x %u", width, height);
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+    uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+    uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_RGB:
+        case C2PlanarLayout::TYPE_RGBA: {
+            std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
+            {
+                IntfImpl::Lock lock = mIntf->lock();
+                colorAspects = mIntf->getCodedColorAspects_l();
+            }
+            ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
+                                  mConversionBuffer.size(), *rView.get(), colorAspects->matrix,
+                                  colorAspects->range);
+            aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, width, height, mStrideAlign,
+                         mConversionBuffer.data());
+            break;
+        }
+        case C2PlanarLayout::TYPE_YUV: {
+            const bool isYUV420_10bit = IsYUV420_10bit(*rView);
+            if (!IsYUV420(*rView) && !isYUV420_10bit) {
+                ALOGE("input is not YUV420");
+                work->result = C2_BAD_VALUE;
+                return;
+            }
+            if (!isYUV420_10bit) {
+                if (IsI420(*rView)) {
+                    // I420 compatible - though with custom offset and stride
+                    aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, width, height, mStrideAlign,
+                                 (uint8_t*)rView->data()[0]);
+                    raw_frame.planes[1] = (uint8_t*)rView->data()[1];
+                    raw_frame.planes[2] = (uint8_t*)rView->data()[2];
+                    raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
+                    raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
+                    raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
+                } else {
+                    // TODO(kyslov): Add image wrap for NV12
+                    // copy to I420
+                    MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
+                    if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
+                        status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
+                        if (err != OK) {
+                            ALOGE("Buffer conversion failed: %d", err);
+                            work->result = C2_BAD_VALUE;
+                            return;
+                        }
+                        aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, stride, vstride, mStrideAlign,
+                                     mConversionBuffer.data());
+                        aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+                    } else {
+                        ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+                              mConversionBuffer.size());
+                        work->result = C2_BAD_VALUE;
+                        return;
+                    }
+                }
+            } else {  // 10 bits
+                if (IsP010(*rView)) {
+                    if (mConversionBuffer.size() >= stride * vstride * 3) {
+                        uint16_t *dstY, *dstU, *dstV;
+                        dstY = (uint16_t*)mConversionBuffer.data();
+                        dstU = ((uint16_t*)mConversionBuffer.data()) + stride * vstride;
+                        dstV = ((uint16_t*)mConversionBuffer.data()) + (stride * vstride) / 4;
+                        convertP010ToYUV420Planar16(dstY, dstU, dstV, (uint16_t*)(rView->data()[0]),
+                                                    (uint16_t*)(rView->data()[1]), stride, stride,
+                                                    stride, stride / 2, stride / 2, stride,
+                                                    vstride);
+                        aom_img_wrap(&raw_frame, AOM_IMG_FMT_I42016, stride, vstride, mStrideAlign,
+                                     mConversionBuffer.data());
+                        aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+                    } else {
+                        ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+                              mConversionBuffer.size());
+                        work->result = C2_BAD_VALUE;
+                        return;
+                    }
+                } else {
+                    ALOGE("Image format conversion is not supported.");
+                    work->result = C2_BAD_VALUE;
+                    return;
+                }
+            }
+            break;
+        }
+        default:
+            ALOGE("Unrecognized plane type: %d", layout.type);
+            work->result = C2_BAD_VALUE;
+            return;
+    }
+
+    aom_enc_frame_flags_t flags = 0;
+    // handle dynamic config parameters
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh =
+                mIntf->getIntraRefresh_l();
+        std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+        std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync =
+                mIntf->getRequestSync_l();
+        lock.unlock();
+
+        if (intraRefresh != mIntraRefresh) {
+            mIntraRefresh = intraRefresh;
+            ALOGV("Got mIntraRefresh request");
+        }
+
+        if (requestSync != mRequestSync) {
+            // we can handle IDR immediately
+            if (requestSync->value) {
+                // unset request
+                C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+                std::vector<std::unique_ptr<C2SettingResult>> failures;
+                mIntf->config({&clearSync}, C2_MAY_BLOCK, &failures);
+                ALOGV("Got sync request");
+                flags |= AOM_EFLAG_FORCE_KF;
+            }
+            mRequestSync = requestSync;
+        }
+
+        if (bitrate != mBitrate) {
+            mBitrate = bitrate;
+            mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+            aom_codec_err_t res = aom_codec_enc_config_set(mCodecContext, mCodecConfiguration);
+            if (res != AOM_CODEC_OK) {
+                ALOGE("aom encoder failed to update bitrate: %s", aom_codec_err_to_string(res));
+                mSignalledError = true;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
+
+    uint64_t input_timestamp = work->input.ordinal.timestamp.peekull();
+    uint32_t frame_duration;
+    if (input_timestamp > mLastTimestamp) {
+        frame_duration = (uint32_t)(input_timestamp - mLastTimestamp);
+    } else {
+        // Use default of 30 fps in case of 0 frame rate.
+        float frame_rate = mFrameRate->value;
+        if (frame_rate < 0.001) {
+            frame_rate = 30.0;
+        }
+        frame_duration = (uint32_t)(1000000 / frame_rate + 0.5);
+    }
+    mLastTimestamp = input_timestamp;
+
+    aom_codec_err_t codec_return =
+            aom_codec_encode(mCodecContext, &raw_frame, input_timestamp, frame_duration, flags);
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("aom encoder failed to encode frame");
+        mSignalledError = true;
+        work->result = C2_CORRUPTED;
+        return;
+    }
+
+    bool populated = false;
+    aom_codec_iter_t encoded_packet_iterator = nullptr;
+    const aom_codec_cx_pkt_t* encoded_packet;
+    while ((encoded_packet = aom_codec_get_cx_data(mCodecContext, &encoded_packet_iterator))) {
+        if (encoded_packet->kind == AOM_CODEC_CX_FRAME_PKT) {
+            std::shared_ptr<C2LinearBlock> block;
+            C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+            c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
+            if (err != C2_OK) {
+                ALOGE("fetchLinearBlock for Output failed with status %d", err);
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            C2WriteView wView = block->map().get();
+            if (wView.error()) {
+                ALOGE("write view map failed %d", wView.error());
+                work->result = C2_CORRUPTED;
+                return;
+            }
+
+            memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
+            ++mNumInputFrames;
+
+            ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
+            uint32_t flags = 0;
+            if (end_of_stream) {
+                flags |= C2FrameData::FLAG_END_OF_STREAM;
+            }
+
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+            work->worklets.front()->output.buffers.clear();
+            std::shared_ptr<C2Buffer> buffer =
+                    createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
+            if (encoded_packet->data.frame.flags & AOM_FRAME_IS_KEY) {
+                buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+                        0u /* stream id */, C2Config::SYNC_FRAME));
+            }
+            work->worklets.front()->output.buffers.push_back(buffer);
+            work->worklets.front()->output.ordinal = work->input.ordinal;
+            work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
+            work->workletsProcessed = 1u;
+            populated = true;
+            if (end_of_stream) {
+                mSignalledOutputEos = true;
+                ALOGV("signalled End Of Stream");
+            }
+        }
+    }
+    if (!populated) {
+        work->workletsProcessed = 0u;
+    }
+}
+
+c2_status_t C2SoftAomEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    (void)pool;
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    return C2_OK;
+}
+
+class C2SoftAomEncFactory : public C2ComponentFactory {
+  public:
+    C2SoftAomEncFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftAomEnc(COMPONENT_NAME, id,
+                                 std::make_shared<C2SoftAomEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftAomEnc::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftAomEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftAomEncFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftAomEncFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
new file mode 100644
index 0000000..2d1bb07
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AV1_ENC_H_
+#define ANDROID_C2_SOFT_AV1_ENC_H_
+
+#include <inttypes.h>
+
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Component.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "aom/aom_encoder.h"
+#include "aom/aomcx.h"
+#include "common/av1_config.h"
+
+namespace android {
+struct C2SoftAomEnc : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftAomEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+
+    // From SimpleC2Component
+    c2_status_t onInit() override final;
+    c2_status_t onStop() override final;
+    void onReset() override final;
+    void onRelease() override final;
+    c2_status_t onFlush_sm() override final;
+
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override final;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override final;
+
+  protected:
+    virtual ~C2SoftAomEnc();
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    // Initializes aom encoder with available settings.
+    status_t initEncoder();
+
+    // aom specific opaque data structure that
+    // stores encoder state
+    aom_codec_ctx_t* mCodecContext;
+
+    // aom specific data structure that
+    // stores encoder configuration
+    aom_codec_enc_cfg_t* mCodecConfiguration;
+
+    // aom specific read-only data structure
+    // that specifies algorithm interface
+    aom_codec_iface_t* mCodecInterface;
+
+    // align stride to the power of 2
+    int32_t mStrideAlign;
+
+    aom_rc_mode mBitrateControlMode;
+
+    // Minimum (best quality) quantizer
+    uint32_t mMinQuantizer;
+
+    // Maximum (worst quality) quantizer
+    uint32_t mMaxQuantizer;
+
+    // Last input buffer timestamp
+    uint64_t mLastTimestamp;
+
+    // Number of input frames
+    int64_t mNumInputFrames;
+
+    // Conversion buffer is needed to input to
+    // yuv420 planar format.
+    MemoryBlock mConversionBuffer;
+
+    // Signalled End Of Stream
+    bool mSignalledOutputEos;
+
+    // Signalled Error
+    bool mSignalledError;
+
+    bool mHeadersReceived;
+
+    bool mIs10Bit;
+
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+
+    aom_codec_err_t setupCodecParameters();
+};
+
+class C2SoftAomEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper);
+
+    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me);
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::input>& me);
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me);
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
+        return mIntraRefresh;
+    }
+    std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+    std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+        return mBitrateMode;
+    }
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+        return mRequestSync;
+    }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+    uint32_t getSyncFramePeriod() const;
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                       const C2P<C2StreamColorAspectsInfo::input>& coded);
+
+  private:
+    std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+    std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+
+};
+
+}  // namespace android
+#endif  // ANDROID_C2_SOFT_AV1_ENC_H_
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 199875d..d549c3b 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -414,6 +414,44 @@
         dstUV += dstUVStride;
     }
 }
+
+void convertP010ToYUV420Planar16(uint16_t *dstY, uint16_t *dstU, uint16_t *dstV,
+                                 const uint16_t *srcY, const uint16_t *srcUV,
+                                 size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+                                 size_t dstUStride, size_t dstVStride, size_t width,
+                                 size_t height, bool isMonochrome) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] >> 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t y = 0; y < (height + 1) / 2; ++y) {
+            for (size_t x = 0; x < (width + 1) / 2; ++x) {
+                dstU[x] = kNeutralUVBitDepth10;
+                dstV[x] = kNeutralUVBitDepth10;
+            }
+            dstU += dstUStride;
+            dstV += dstVStride;
+        }
+        return;
+    }
+
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        for (size_t x = 0; x < (width + 1) / 2; ++x) {
+            dstU[x] = srcUV[2 * x] >> 6;
+            dstV[x] = srcUV[2 * x + 1] >> 6;
+        }
+        dstU += dstUStride;
+        dstV += dstVStride;
+        srcUV += srcUVStride;
+    }
+}
+
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 7600c5b..38b7825 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -55,6 +55,12 @@
                                  size_t dstUVStride, size_t width, size_t height,
                                  bool isMonochrome = false);
 
+void convertP010ToYUV420Planar16(uint16_t *dstY, uint16_t *dstU, uint16_t *dstV,
+                                 const uint16_t *srcY, const uint16_t *srcUV,
+                                 size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+                                 size_t dstUStride, size_t dstVStride, size_t width,
+                                 size_t height, bool isMonochrome = false);
+
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
 public:
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 8087396..18cd1bf 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -69,8 +69,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(2, 2048, 2),
-                    C2F(mSize, height).inRange(2, 2048, 2),
+                    C2F(mSize, width).inRange(2, 2048),
+                    C2F(mSize, height).inRange(2, 2048),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -734,7 +734,12 @@
     }
 
     C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-    c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
+                                              align(mHeight, 2), format, usage,
+                                              &block);
     if (err != C2_OK) {
         ALOGE("fetchGraphicBlock for Output failed with status %d", err);
         work->result = err;
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hal/client/Android.bp
similarity index 96%
rename from media/codec2/hidl/client/Android.bp
rename to media/codec2/hal/client/Android.bp
index f32711d..d2ef58c 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -43,8 +43,10 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
         "android.hardware.media.c2@1.2",
+        "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder",
+        "libbinder_ndk",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hal/client/client.cpp
similarity index 81%
rename from media/codec2/hidl/client/client.cpp
rename to media/codec2/hal/client/client.cpp
index 0acf7d7..09452c4 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -24,6 +24,7 @@
 #include <C2Config.h> // for C2StreamUsageTuning
 #include <C2PlatformSupport.h>
 
+#include <android/binder_auto_utils.h>
 #include <android/hardware/media/bufferpool/2.0/IClientManager.h>
 #include <android/hardware/media/c2/1.0/IComponent.h>
 #include <android/hardware/media/c2/1.0/IComponentInterface.h>
@@ -32,6 +33,15 @@
 #include <android/hardware/media/c2/1.0/IConfigurable.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 
+#include <aidl/android/hardware/media/c2/FieldSupportedValues.h>
+#include <aidl/android/hardware/media/c2/FieldSupportedValuesQuery.h>
+#include <aidl/android/hardware/media/c2/FieldSupportedValuesQueryResult.h>
+#include <aidl/android/hardware/media/c2/IComponent.h>
+#include <aidl/android/hardware/media/c2/IComponentInterface.h>
+#include <aidl/android/hardware/media/c2/IComponentStore.h>
+#include <aidl/android/hardware/media/c2/IConfigurable.h>
+#include <aidl/android/hardware/media/c2/ParamDescriptor.h>
+
 #include <android-base/properties.h>
 #include <bufferpool/ClientManager.h>
 #include <codec2/hidl/1.0/types.h>
@@ -47,7 +57,6 @@
 #include <system/window.h> // for NATIVE_WINDOW_QUERY_*
 #include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
 
-
 #include <deque>
 #include <iterator>
 #include <limits>
@@ -65,11 +74,6 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 
-using namespace ::android::hardware::media::c2::V1_1;
-using namespace ::android::hardware::media::c2::V1_1::utils;
-using namespace ::android::hardware::media::bufferpool::V2_0;
-using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
-
 using HGraphicBufferProducer1 = ::android::hardware::graphics::bufferqueue::
         V1_0::IGraphicBufferProducer;
 using HGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
@@ -80,6 +84,13 @@
         V2_0::utils::H2BGraphicBufferProducer;
 using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
 
+namespace bufferpool_hidl = ::android::hardware::media::bufferpool::V2_0;
+namespace c2_aidl = ::aidl::android::hardware::media::c2;
+namespace c2_hidl_base = ::android::hardware::media::c2;
+namespace c2_hidl = ::android::hardware::media::c2::V1_2;
+
+using c2_hidl::utils::operator<<;
+
 namespace /* unnamed */ {
 
 // c2_status_t value that corresponds to hwbinder transaction failure.
@@ -254,15 +265,43 @@
         return sCaches;
     }
 };
+// Codec2ConfigurableClient::HidlImpl
 
-// Codec2ConfigurableClient
+struct Codec2ConfigurableClient::HidlImpl : public Codec2ConfigurableClient::ImplBase {
+    typedef c2_hidl::IConfigurable Base;
 
-const C2String& Codec2ConfigurableClient::getName() const {
-    return mName;
-}
+    // base cannot be null.
+    explicit HidlImpl(const sp<Base>& base);
 
-Codec2ConfigurableClient::Codec2ConfigurableClient(
-        const sp<IConfigurable>& base)
+    const C2String& getName() const override {
+        return mName;
+    }
+
+    c2_status_t query(
+            const std::vector<C2Param*>& stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+
+    c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+
+    c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override;
+
+    c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override;
+
+private:
+    sp<Base> mBase;
+    const C2String mName;
+};
+
+Codec2ConfigurableClient::HidlImpl::HidlImpl(const sp<Base>& base)
       : mBase{base},
         mName{[base]() -> C2String {
                 C2String outName;
@@ -274,12 +313,12 @@
             }()} {
 }
 
-c2_status_t Codec2ConfigurableClient::query(
+c2_status_t Codec2ConfigurableClient::HidlImpl::query(
         const std::vector<C2Param*> &stackParams,
         const std::vector<C2Param::Index> &heapParamIndices,
         c2_blocking_t mayBlock,
         std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
-    hidl_vec<ParamIndex> indices(
+    hidl_vec<c2_hidl::ParamIndex> indices(
             stackParams.size() + heapParamIndices.size());
     size_t numIndices = 0;
     for (C2Param* const& stackParam : stackParams) {
@@ -287,12 +326,12 @@
             LOG(WARNING) << "query -- null stack param encountered.";
             continue;
         }
-        indices[numIndices++] = static_cast<ParamIndex>(stackParam->index());
+        indices[numIndices++] = static_cast<c2_hidl::ParamIndex>(stackParam->index());
     }
     size_t numStackIndices = numIndices;
     for (const C2Param::Index& index : heapParamIndices) {
         indices[numIndices++] =
-                static_cast<ParamIndex>(static_cast<uint32_t>(index));
+                static_cast<c2_hidl::ParamIndex>(static_cast<uint32_t>(index));
     }
     indices.resize(numIndices);
     if (heapParams) {
@@ -303,7 +342,7 @@
             indices,
             mayBlock == C2_MAY_BLOCK,
             [&status, &numStackIndices, &stackParams, heapParams](
-                    Status s, const Params& p) {
+                    c2_hidl::Status s, const c2_hidl::Params& p) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK && status != C2_BAD_INDEX) {
                     LOG(DEBUG) << "query -- call failed: "
@@ -311,7 +350,7 @@
                     return;
                 }
                 std::vector<C2Param*> paramPointers;
-                if (!parseParamsBlob(&paramPointers, p)) {
+                if (!c2_hidl::utils::parseParamsBlob(&paramPointers, p)) {
                     LOG(ERROR) << "query -- error while parsing params.";
                     status = C2_CORRUPTED;
                     return;
@@ -371,12 +410,12 @@
     return status;
 }
 
-c2_status_t Codec2ConfigurableClient::config(
+c2_status_t Codec2ConfigurableClient::HidlImpl::config(
         const std::vector<C2Param*> &params,
         c2_blocking_t mayBlock,
         std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
-    Params hidlParams;
-    if (!createParamsBlob(&hidlParams, params)) {
+    c2_hidl::Params hidlParams;
+    if (!c2_hidl::utils::createParamsBlob(&hidlParams, params)) {
         LOG(ERROR) << "config -- bad input.";
         return C2_TRANSACTION_FAILED;
     }
@@ -385,9 +424,9 @@
             hidlParams,
             mayBlock == C2_MAY_BLOCK,
             [&status, &params, failures](
-                    Status s,
-                    const hidl_vec<SettingResult> f,
-                    const Params& o) {
+                    c2_hidl::Status s,
+                    const hidl_vec<c2_hidl::SettingResult> f,
+                    const c2_hidl::Params& o) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK && status != C2_BAD_INDEX) {
                     LOG(DEBUG) << "config -- call failed: "
@@ -395,14 +434,14 @@
                 }
                 size_t i = failures->size();
                 failures->resize(i + f.size());
-                for (const SettingResult& sf : f) {
-                    if (!objcpy(&(*failures)[i++], sf)) {
+                for (const c2_hidl::SettingResult& sf : f) {
+                    if (!c2_hidl::utils::objcpy(&(*failures)[i++], sf)) {
                         LOG(ERROR) << "config -- "
                                    << "invalid SettingResult returned.";
                         return;
                     }
                 }
-                if (!updateParamsFromBlob(params, o)) {
+                if (!c2_hidl::utils::updateParamsFromBlob(params, o)) {
                     LOG(ERROR) << "config -- "
                                << "failed to parse returned params.";
                     status = C2_CORRUPTED;
@@ -415,7 +454,7 @@
     return status;
 }
 
-c2_status_t Codec2ConfigurableClient::querySupportedParams(
+c2_status_t Codec2ConfigurableClient::HidlImpl::querySupportedParams(
         std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
     // TODO: Cache and query properly!
     c2_status_t status;
@@ -423,8 +462,8 @@
             std::numeric_limits<uint32_t>::min(),
             std::numeric_limits<uint32_t>::max(),
             [&status, params](
-                    Status s,
-                    const hidl_vec<ParamDescriptor>& p) {
+                    c2_hidl::Status s,
+                    const hidl_vec<c2_hidl::ParamDescriptor>& p) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "querySupportedParams -- call failed: "
@@ -433,8 +472,8 @@
                 }
                 size_t i = params->size();
                 params->resize(i + p.size());
-                for (const ParamDescriptor& sp : p) {
-                    if (!objcpy(&(*params)[i++], sp)) {
+                for (const c2_hidl::ParamDescriptor& sp : p) {
+                    if (!c2_hidl::utils::objcpy(&(*params)[i++], sp)) {
                         LOG(ERROR) << "querySupportedParams -- "
                                    << "invalid returned ParamDescriptor.";
                         return;
@@ -448,12 +487,12 @@
     return status;
 }
 
-c2_status_t Codec2ConfigurableClient::querySupportedValues(
+c2_status_t Codec2ConfigurableClient::HidlImpl::querySupportedValues(
         std::vector<C2FieldSupportedValuesQuery>& fields,
         c2_blocking_t mayBlock) const {
-    hidl_vec<FieldSupportedValuesQuery> inFields(fields.size());
+    hidl_vec<c2_hidl::FieldSupportedValuesQuery> inFields(fields.size());
     for (size_t i = 0; i < fields.size(); ++i) {
-        if (!objcpy(&inFields[i], fields[i])) {
+        if (!c2_hidl::utils::objcpy(&inFields[i], fields[i])) {
             LOG(ERROR) << "querySupportedValues -- bad input";
             return C2_TRANSACTION_FAILED;
         }
@@ -464,8 +503,8 @@
             inFields,
             mayBlock == C2_MAY_BLOCK,
             [&status, &inFields, &fields](
-                    Status s,
-                    const hidl_vec<FieldSupportedValuesQueryResult>& r) {
+                    c2_hidl::Status s,
+                    const hidl_vec<c2_hidl::FieldSupportedValuesQueryResult>& r) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "querySupportedValues -- call failed: "
@@ -480,7 +519,7 @@
                     return;
                 }
                 for (size_t i = 0; i < fields.size(); ++i) {
-                    if (!objcpy(&fields[i], inFields[i], r[i])) {
+                    if (!c2_hidl::utils::objcpy(&fields[i], inFields[i], r[i])) {
                         LOG(ERROR) << "querySupportedValues -- "
                                       "invalid returned value.";
                         status = C2_CORRUPTED;
@@ -495,14 +534,135 @@
     return status;
 }
 
+// Codec2ConfigurableClient::AidlImpl
+
+struct Codec2ConfigurableClient::AidlImpl : public Codec2ConfigurableClient::ImplBase {
+    typedef c2_aidl::IConfigurable Base;
+
+    // base cannot be null.
+    explicit AidlImpl(const std::shared_ptr<Base>& base);
+
+    const C2String& getName() const override {
+        return mName;
+    }
+
+    c2_status_t query(
+            const std::vector<C2Param*>& stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+
+    c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+
+    c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override;
+
+    c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override;
+
+private:
+    std::shared_ptr<Base> mBase;
+    const C2String mName;
+};
+
+Codec2ConfigurableClient::AidlImpl::AidlImpl(const std::shared_ptr<Base>& base)
+      : mBase{base},
+        mName{[base]() -> C2String {
+                std::string outName;
+                ndk::ScopedAStatus status = base->getName(&outName);
+                return status.isOk() ? outName : "";
+            }()} {
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::query(
+        const std::vector<C2Param*> &stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+    (void)stackParams, (void)heapParamIndices, (void)mayBlock, (void)heapParams;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::config(
+        const std::vector<C2Param*> &params,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+    (void)params, (void)mayBlock, (void)failures;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::querySupportedParams(
+        std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
+    (void)params;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::querySupportedValues(
+        std::vector<C2FieldSupportedValuesQuery>& fields,
+        c2_blocking_t mayBlock) const {
+    (void)fields, (void)mayBlock;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+// Codec2ConfigurableClient
+
+Codec2ConfigurableClient::Codec2ConfigurableClient(const sp<HidlBase> &hidlBase)
+    : mImpl(new Codec2ConfigurableClient::HidlImpl(hidlBase)) {
+}
+
+Codec2ConfigurableClient::Codec2ConfigurableClient(
+        const std::shared_ptr<AidlBase> &aidlBase)
+    : mImpl(new Codec2ConfigurableClient::AidlImpl(aidlBase)) {
+}
+
+const C2String& Codec2ConfigurableClient::getName() const {
+    return mImpl->getName();
+}
+
+c2_status_t Codec2ConfigurableClient::query(
+        const std::vector<C2Param*>& stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+    return mImpl->query(stackParams, heapParamIndices, mayBlock, heapParams);
+}
+
+c2_status_t Codec2ConfigurableClient::config(
+        const std::vector<C2Param*> &params,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+    return mImpl->config(params, mayBlock, failures);
+}
+
+c2_status_t Codec2ConfigurableClient::querySupportedParams(
+        std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
+    return mImpl->querySupportedParams(params);
+}
+
+c2_status_t Codec2ConfigurableClient::querySupportedValues(
+        std::vector<C2FieldSupportedValuesQuery>& fields,
+        c2_blocking_t mayBlock) const {
+    return mImpl->querySupportedValues(fields, mayBlock);
+}
+
+
 // Codec2Client::Component::HidlListener
-struct Codec2Client::Component::HidlListener : public IComponentListener {
+struct Codec2Client::Component::HidlListener : public c2_hidl::IComponentListener {
     std::weak_ptr<Component> component;
     std::weak_ptr<Listener> base;
 
-    virtual Return<void> onWorkDone(const WorkBundle& workBundle) override {
+    virtual Return<void> onWorkDone(const c2_hidl::WorkBundle& workBundle) override {
         std::list<std::unique_ptr<C2Work>> workItems;
-        if (!objcpy(&workItems, workBundle)) {
+        if (!c2_hidl::utils::objcpy(&workItems, workBundle)) {
             LOG(DEBUG) << "onWorkDone -- received corrupted WorkBundle.";
             return Void();
         }
@@ -521,12 +681,12 @@
     }
 
     virtual Return<void> onTripped(
-            const hidl_vec<SettingResult>& settingResults) override {
+            const hidl_vec<c2_hidl::SettingResult>& settingResults) override {
         std::vector<std::shared_ptr<C2SettingResult>> c2SettingResults(
                 settingResults.size());
         for (size_t i = 0; i < settingResults.size(); ++i) {
             std::unique_ptr<C2SettingResult> c2SettingResult;
-            if (!objcpy(&c2SettingResult, settingResults[i])) {
+            if (!c2_hidl::utils::objcpy(&c2SettingResult, settingResults[i])) {
                 LOG(DEBUG) << "onTripped -- received corrupted SettingResult.";
                 return Void();
             }
@@ -540,13 +700,13 @@
         return Void();
     }
 
-    virtual Return<void> onError(Status s, uint32_t errorCode) override {
+    virtual Return<void> onError(c2_hidl::Status s, uint32_t errorCode) override {
         LOG(DEBUG) << "onError --"
                    << " status = " << s
                    << ", errorCode = " << errorCode
                    << ".";
         if (std::shared_ptr<Listener> listener = base.lock()) {
-            listener->onError(component, s == Status::OK ?
+            listener->onError(component, s == c2_hidl::Status::OK ?
                     errorCode : static_cast<c2_status_t>(s));
         } else {
             LOG(DEBUG) << "onError -- listener died.";
@@ -612,11 +772,11 @@
 Codec2Client::Codec2Client(sp<Base> const& base,
                            size_t serviceIndex)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -624,11 +784,11 @@
         mBase1_1{Base1_1::castFrom(base)},
         mBase1_2{Base1_2::castFrom(base)},
         mServiceIndex{serviceIndex} {
-    Return<sp<IClientManager>> transResult = base->getPoolClientManager();
+    Return<sp<bufferpool_hidl::IClientManager>> transResult = base->getPoolClientManager();
     if (!transResult.isOk()) {
         LOG(ERROR) << "getPoolClientManager -- transaction failed.";
     } else {
-        mHostPoolManager = static_cast<sp<IClientManager>>(transResult);
+        mHostPoolManager = static_cast<sp<bufferpool_hidl::IClientManager>>(transResult);
     }
 }
 
@@ -665,10 +825,10 @@
         transStatus = mBase1_2->createComponent_1_2(
             name,
             hidlListener,
-            ClientManager::getInstance(),
+            bufferpool_hidl::implementation::ClientManager::getInstance(),
             [&status, component, hidlListener](
-                    Status s,
-                    const sp<IComponent>& c) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl::IComponent>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -681,10 +841,10 @@
         transStatus = mBase1_1->createComponent_1_1(
             name,
             hidlListener,
-            ClientManager::getInstance(),
+            bufferpool_hidl::implementation::ClientManager::getInstance(),
             [&status, component, hidlListener](
-                    Status s,
-                    const sp<IComponent>& c) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl_base::V1_1::IComponent>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -696,10 +856,10 @@
         transStatus = mBase1_0->createComponent(
             name,
             hidlListener,
-            ClientManager::getInstance(),
+            bufferpool_hidl::implementation::ClientManager::getInstance(),
             [&status, component, hidlListener](
-                    Status s,
-                    const sp<hardware::media::c2::V1_0::IComponent>& c) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl_base::V1_0::IComponent>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -747,8 +907,8 @@
     Return<void> transStatus = mBase1_0->createInterface(
             name,
             [&status, interface](
-                    Status s,
-                    const sp<IComponentInterface>& i) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl::IComponentInterface>& i) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -778,8 +938,8 @@
     c2_status_t status;
     Return<void> transStatus = mBase1_0->createInputSurface(
             [&status, inputSurface](
-                    Status s,
-                    const sp<IInputSurface>& i) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl::IInputSurface>& i) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -805,16 +965,16 @@
     std::vector<C2Component::Traits> traits;
     std::string const& serviceName = getServiceName();
     Return<void> transStatus = mBase1_0->listComponents(
-            [&traits, &serviceName](Status s,
-                   const hidl_vec<IComponentStore::ComponentTraits>& t) {
-                if (s != Status::OK) {
+            [&traits, &serviceName](c2_hidl::Status s,
+                   const hidl_vec<c2_hidl::IComponentStore::ComponentTraits>& t) {
+                if (s != c2_hidl::Status::OK) {
                     LOG(DEBUG) << "_listComponents -- call failed: "
                                << static_cast<c2_status_t>(s) << ".";
                     return;
                 }
                 traits.resize(t.size());
                 for (size_t i = 0; i < t.size(); ++i) {
-                    if (!objcpy(&traits[i], t[i])) {
+                    if (!c2_hidl::utils::objcpy(&traits[i], t[i])) {
                         LOG(ERROR) << "_listComponents -- corrupted output.";
                         return;
                     }
@@ -846,14 +1006,14 @@
     // should reflect the HAL API.
     struct SimpleParamReflector : public C2ParamReflector {
         virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::CoreIndex coreIndex) const {
-            hidl_vec<ParamIndex> indices(1);
-            indices[0] = static_cast<ParamIndex>(coreIndex.coreIndex());
+            hidl_vec<c2_hidl::ParamIndex> indices(1);
+            indices[0] = static_cast<c2_hidl::ParamIndex>(coreIndex.coreIndex());
             std::unique_ptr<C2StructDescriptor> descriptor;
             Return<void> transStatus = mBase->getStructDescriptors(
                     indices,
                     [&descriptor](
-                            Status s,
-                            const hidl_vec<StructDescriptor>& sd) {
+                            c2_hidl::Status s,
+                            const hidl_vec<c2_hidl::StructDescriptor>& sd) {
                         c2_status_t status = static_cast<c2_status_t>(s);
                         if (status != C2_OK) {
                             LOG(DEBUG) << "SimpleParamReflector -- "
@@ -871,7 +1031,7 @@
                             descriptor.reset();
                             return;
                         }
-                        if (!objcpy(&descriptor, sd[0])) {
+                        if (!c2_hidl::utils::objcpy(&descriptor, sd[0])) {
                             LOG(DEBUG) << "SimpleParamReflector -- "
                                           "getStructDescriptors() returned "
                                           "corrupted data.";
@@ -1199,11 +1359,11 @@
 // Codec2Client::Interface
 Codec2Client::Interface::Interface(const sp<Base>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -1213,17 +1373,17 @@
 // Codec2Client::Component
 Codec2Client::Component::Component(const sp<Base>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IComponentInterface>> transResult1 =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IComponentInterface>> transResult1 =
                         base->getInterface();
                 if (!transResult1.isOk()) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult2 =
-                        static_cast<sp<IComponentInterface>>(transResult1)->
+                Return<sp<c2_hidl::IConfigurable>> transResult2 =
+                        static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
                         getConfigurable();
                 return transResult2.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult2) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
                         nullptr;
             }()
         },
@@ -1236,17 +1396,17 @@
 
 Codec2Client::Component::Component(const sp<Base1_1>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IComponentInterface>> transResult1 =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IComponentInterface>> transResult1 =
                         base->getInterface();
                 if (!transResult1.isOk()) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult2 =
-                        static_cast<sp<IComponentInterface>>(transResult1)->
+                Return<sp<c2_hidl::IConfigurable>> transResult2 =
+                        static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
                         getConfigurable();
                 return transResult2.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult2) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
                         nullptr;
             }()
         },
@@ -1259,17 +1419,17 @@
 
 Codec2Client::Component::Component(const sp<Base1_2>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IComponentInterface>> transResult1 =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IComponentInterface>> transResult1 =
                         base->getInterface();
                 if (!transResult1.isOk()) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult2 =
-                        static_cast<sp<IComponentInterface>>(transResult1)->
+                Return<sp<c2_hidl::IConfigurable>> transResult2 =
+                        static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
                         getConfigurable();
                 return transResult2.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult2) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
                         nullptr;
             }()
         },
@@ -1291,9 +1451,9 @@
     Return<void> transStatus = mBase1_0->createBlockPool(
             static_cast<uint32_t>(id),
             [&status, blockPoolId, configurable](
-                    Status s,
+                    c2_hidl::Status s,
                     uint64_t pId,
-                    const sp<IConfigurable>& c) {
+                    const sp<c2_hidl::IConfigurable>& c) {
                 status = static_cast<c2_status_t>(s);
                 configurable->reset();
                 if (status != C2_OK) {
@@ -1313,13 +1473,13 @@
 
 c2_status_t Codec2Client::Component::destroyBlockPool(
         C2BlockPool::local_id_t localId) {
-    Return<Status> transResult = mBase1_0->destroyBlockPool(
+    Return<c2_hidl::Status> transResult = mBase1_0->destroyBlockPool(
             static_cast<uint64_t>(localId));
     if (!transResult.isOk()) {
         LOG(ERROR) << "destroyBlockPool -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
-    return static_cast<c2_status_t>(static_cast<Status>(transResult));
+    return static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transResult));
 }
 
 void Codec2Client::Component::handleOnWorkDone(
@@ -1330,18 +1490,18 @@
 
 c2_status_t Codec2Client::Component::queue(
         std::list<std::unique_ptr<C2Work>>* const items) {
-    WorkBundle workBundle;
+    c2_hidl::WorkBundle workBundle;
     if (!objcpy(&workBundle, *items, mBufferPoolSender.get())) {
         LOG(ERROR) << "queue -- bad input.";
         return C2_TRANSACTION_FAILED;
     }
-    Return<Status> transStatus = mBase1_0->queue(workBundle);
+    Return<c2_hidl::Status> transStatus = mBase1_0->queue(workBundle);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "queue -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "queue -- call failed: " << status << ".";
     }
@@ -1355,13 +1515,13 @@
     c2_status_t status;
     Return<void> transStatus = mBase1_0->flush(
             [&status, flushedWork](
-                    Status s, const WorkBundle& wb) {
+                    c2_hidl::Status s, const c2_hidl::WorkBundle& wb) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "flush -- call failed: " << status << ".";
                     return;
                 }
-                if (!objcpy(flushedWork, wb)) {
+                if (!c2_hidl::utils::objcpy(flushedWork, wb)) {
                     status = C2_CORRUPTED;
                 } else {
                     status = C2_OK;
@@ -1394,14 +1554,14 @@
 }
 
 c2_status_t Codec2Client::Component::drain(C2Component::drain_mode_t mode) {
-    Return<Status> transStatus = mBase1_0->drain(
+    Return<c2_hidl::Status> transStatus = mBase1_0->drain(
             mode == C2Component::DRAIN_COMPONENT_WITH_EOS);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "drain -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "drain -- call failed: " << status << ".";
     }
@@ -1409,13 +1569,13 @@
 }
 
 c2_status_t Codec2Client::Component::start() {
-    Return<Status> transStatus = mBase1_0->start();
+    Return<c2_hidl::Status> transStatus = mBase1_0->start();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "start -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "start -- call failed: " << status << ".";
     }
@@ -1423,13 +1583,13 @@
 }
 
 c2_status_t Codec2Client::Component::stop() {
-    Return<Status> transStatus = mBase1_0->stop();
+    Return<c2_hidl::Status> transStatus = mBase1_0->stop();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "stop -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "stop -- call failed: " << status << ".";
     }
@@ -1437,13 +1597,13 @@
 }
 
 c2_status_t Codec2Client::Component::reset() {
-    Return<Status> transStatus = mBase1_0->reset();
+    Return<c2_hidl::Status> transStatus = mBase1_0->reset();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "reset -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "reset -- call failed: " << status << ".";
     }
@@ -1451,13 +1611,13 @@
 }
 
 c2_status_t Codec2Client::Component::release() {
-    Return<Status> transStatus = mBase1_0->release();
+    Return<c2_hidl::Status> transStatus = mBase1_0->release();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "release -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "release -- call failed: " << status << ".";
     }
@@ -1474,7 +1634,7 @@
     c2_status_t status{};
     Return<void> transStatus = mBase1_1->configureVideoTunnel(avSyncHwId,
             [&status, sidebandHandle](
-                    Status s, hardware::hidl_handle const& h) {
+                    c2_hidl::Status s, hardware::hidl_handle const& h) {
                 status = static_cast<c2_status_t>(s);
                 if (h.getNativeHandle()) {
                     *sidebandHandle = native_handle_clone(h.getNativeHandle());
@@ -1554,7 +1714,7 @@
     ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
             generation, (long long)consumerUsage, syncObj ? " sync" : "");
 
-    Return<Status> transStatus = syncObj ?
+    Return<c2_hidl::Status> transStatus = syncObj ?
             mBase1_2->setOutputSurfaceWithSyncObj(
                     static_cast<uint64_t>(blockPoolId),
                     bqId == 0 ? nullHgbp : igbp, *syncObj) :
@@ -1567,7 +1727,7 @@
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "setOutputSurface -- call failed: " << status << ".";
     }
@@ -1591,13 +1751,13 @@
         C2BlockPool::local_id_t blockPoolId) {
     std::scoped_lock lock(mOutputMutex);
     mOutputBufferQueue->stop();
-    Return<Status> transStatus = mBase1_0->setOutputSurface(
+    Return<c2_hidl::Status> transStatus = mBase1_0->setOutputSurface(
             static_cast<uint64_t>(blockPoolId), nullptr);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface(stopUsingOutputSurface) -- transaction failed.";
     } else {
         c2_status_t status =
-                static_cast<c2_status_t>(static_cast<Status>(transStatus));
+                static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
         if (status != C2_OK) {
             LOG(DEBUG) << "setOutputSurface(stopUsingOutputSurface) -- call failed: "
                        << status << ".";
@@ -1612,7 +1772,7 @@
     Return<void> transStatus = mBase1_0->connectToInputSurface(
             inputSurface->mBase,
             [&status, connection](
-                    Status s, const sp<IInputSurfaceConnection>& c) {
+                    c2_hidl::Status s, const sp<c2_hidl::IInputSurfaceConnection>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "connectToInputSurface -- call failed: "
@@ -1636,7 +1796,7 @@
     Return<void> transStatus = mBase1_0->connectToOmxInputSurface(
             producer, source,
             [&status, connection](
-                    Status s, const sp<IInputSurfaceConnection>& c) {
+                    c2_hidl::Status s, const sp<c2_hidl::IInputSurfaceConnection>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "connectToOmxInputSurface -- call failed: "
@@ -1653,13 +1813,13 @@
 }
 
 c2_status_t Codec2Client::Component::disconnectFromInputSurface() {
-    Return<Status> transStatus = mBase1_0->disconnectFromInputSurface();
+    Return<c2_hidl::Status> transStatus = mBase1_0->disconnectFromInputSurface();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "disconnectToInputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "disconnectFromInputSurface -- call failed: "
                    << status << ".";
@@ -1706,13 +1866,13 @@
 }
 
 // Codec2Client::InputSurface
-Codec2Client::InputSurface::InputSurface(const sp<IInputSurface>& base)
+Codec2Client::InputSurface::InputSurface(const sp<c2_hidl::IInputSurface>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -1732,19 +1892,19 @@
     return mGraphicBufferProducer;
 }
 
-sp<IInputSurface> Codec2Client::InputSurface::getHalInterface() const {
+sp<c2_hidl::IInputSurface> Codec2Client::InputSurface::getHalInterface() const {
     return mBase;
 }
 
 // Codec2Client::InputSurfaceConnection
 Codec2Client::InputSurfaceConnection::InputSurfaceConnection(
-        const sp<IInputSurfaceConnection>& base)
+        const sp<c2_hidl::IInputSurfaceConnection>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -1752,8 +1912,8 @@
 }
 
 c2_status_t Codec2Client::InputSurfaceConnection::disconnect() {
-    Return<Status> transResult = mBase->disconnect();
-    return static_cast<c2_status_t>(static_cast<Status>(transResult));
+    Return<c2_hidl::Status> transResult = mBase->disconnect();
+    return static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transResult));
 }
 
 }  // namespace android
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
similarity index 91%
rename from media/codec2/hidl/client/include/codec2/hidl/client.h
rename to media/codec2/hal/client/include/codec2/hidl/client.h
index 49d9b28..11f5911 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -83,6 +83,13 @@
 struct IComponentStore;
 }  // namespace android::hardware::media::c2::V1_2
 
+namespace aidl::android::hardware::media::c2 {
+class IComponent;
+class IComponentInterface;
+class IComponentStore;
+class IConfigurable;
+}  // namespace aidl::android::hardware::media::c2
+
 namespace android::hardware::media::bufferpool::V2_0 {
 struct IClientManager;
 }  // namespace android::hardware::media::bufferpool::V2_0
@@ -105,7 +112,36 @@
 // declaration of an inner class is not possible.
 struct Codec2ConfigurableClient {
 
-    typedef ::android::hardware::media::c2::V1_0::IConfigurable Base;
+    typedef ::android::hardware::media::c2::V1_0::IConfigurable HidlBase;
+    typedef ::aidl::android::hardware::media::c2::IConfigurable AidlBase;
+
+    struct ImplBase {
+        virtual ~ImplBase() = default;
+
+        virtual const C2String& getName() const = 0;
+
+        virtual c2_status_t query(
+                const std::vector<C2Param*>& stackParams,
+                const std::vector<C2Param::Index> &heapParamIndices,
+                c2_blocking_t mayBlock,
+                std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
+
+        virtual c2_status_t config(
+                const std::vector<C2Param*> &params,
+                c2_blocking_t mayBlock,
+                std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+        virtual c2_status_t querySupportedParams(
+                std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+                ) const = 0;
+
+        virtual c2_status_t querySupportedValues(
+                std::vector<C2FieldSupportedValuesQuery>& fields,
+                c2_blocking_t mayBlock) const = 0;
+    };
+
+    explicit Codec2ConfigurableClient(const sp<HidlBase> &hidlBase);
+    explicit Codec2ConfigurableClient(const std::shared_ptr<AidlBase> &aidlBase);
 
     const C2String& getName() const;
 
@@ -127,15 +163,11 @@
     c2_status_t querySupportedValues(
             std::vector<C2FieldSupportedValuesQuery>& fields,
             c2_blocking_t mayBlock) const;
+private:
+    struct HidlImpl;
+    struct AidlImpl;
 
-    // base cannot be null.
-    Codec2ConfigurableClient(const sp<Base>& base);
-
-protected:
-    sp<Base> mBase;
-    C2String mName;
-
-    friend struct Codec2Client;
+    const std::unique_ptr<ImplBase> mImpl;
 };
 
 struct Codec2Client : public Codec2ConfigurableClient {
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
similarity index 100%
rename from media/codec2/hidl/client/include/codec2/hidl/output.h
rename to media/codec2/hal/client/include/codec2/hidl/output.h
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hal/client/output.cpp
similarity index 100%
rename from media/codec2/hidl/client/output.cpp
rename to media/codec2/hal/client/output.cpp
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hal/hidl/1.0/utils/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/Android.bp
rename to media/codec2/hal/hidl/1.0/utils/Android.bp
diff --git a/media/codec2/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/Component.cpp
rename to media/codec2/hal/hidl/1.0/utils/Component.cpp
diff --git a/media/codec2/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/ComponentInterface.cpp
rename to media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
diff --git a/media/codec2/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/ComponentStore.cpp
rename to media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
diff --git a/media/codec2/hidl/1.0/utils/Configurable.cpp b/media/codec2/hal/hidl/1.0/utils/Configurable.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/Configurable.cpp
rename to media/codec2/hal/hidl/1.0/utils/Configurable.cpp
diff --git a/media/codec2/hidl/1.0/utils/InputBufferManager.cpp b/media/codec2/hal/hidl/1.0/utils/InputBufferManager.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/InputBufferManager.cpp
rename to media/codec2/hal/hidl/1.0/utils/InputBufferManager.cpp
diff --git a/media/codec2/hidl/1.0/utils/InputSurface.cpp b/media/codec2/hal/hidl/1.0/utils/InputSurface.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/InputSurface.cpp
rename to media/codec2/hal/hidl/1.0/utils/InputSurface.cpp
diff --git a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
rename to media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hal/hidl/1.0/utils/types.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/types.cpp
rename to media/codec2/hal/hidl/1.0/utils/types.cpp
diff --git a/media/codec2/hidl/1.0/vts/OWNERS b/media/codec2/hal/hidl/1.0/vts/OWNERS
similarity index 100%
rename from media/codec2/hidl/1.0/vts/OWNERS
rename to media/codec2/hal/hidl/1.0/vts/OWNERS
diff --git a/media/codec2/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/common/README.md b/media/codec2/hal/hidl/1.0/vts/functional/common/README.md
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/README.md
rename to media/codec2/hal/hidl/1.0/vts/functional/common/README.md
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
rename to media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
diff --git a/media/codec2/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/component/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/master/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb b/media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb
rename to media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info b/media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info
diff --git a/media/codec2/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h b/media/codec2/hal/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
rename to media/codec2/hal/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hal/hidl/1.1/utils/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/Android.bp
rename to media/codec2/hal/hidl/1.1/utils/Android.bp
diff --git a/media/codec2/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/Component.cpp
rename to media/codec2/hal/hidl/1.1/utils/Component.cpp
diff --git a/media/codec2/hidl/1.1/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentInterface.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/ComponentInterface.cpp
rename to media/codec2/hal/hidl/1.1/utils/ComponentInterface.cpp
diff --git a/media/codec2/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/ComponentStore.cpp
rename to media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
diff --git a/media/codec2/hidl/1.1/utils/Configurable.cpp b/media/codec2/hal/hidl/1.1/utils/Configurable.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/Configurable.cpp
rename to media/codec2/hal/hidl/1.1/utils/Configurable.cpp
diff --git a/media/codec2/hidl/1.1/utils/InputBufferManager.cpp b/media/codec2/hal/hidl/1.1/utils/InputBufferManager.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/InputBufferManager.cpp
rename to media/codec2/hal/hidl/1.1/utils/InputBufferManager.cpp
diff --git a/media/codec2/hidl/1.1/utils/InputSurface.cpp b/media/codec2/hal/hidl/1.1/utils/InputSurface.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/InputSurface.cpp
rename to media/codec2/hal/hidl/1.1/utils/InputSurface.cpp
diff --git a/media/codec2/hidl/1.1/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.1/utils/InputSurfaceConnection.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/InputSurfaceConnection.cpp
rename to media/codec2/hal/hidl/1.1/utils/InputSurfaceConnection.cpp
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/types.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/types.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/types.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/types.h
diff --git a/media/codec2/hidl/1.1/utils/types.cpp b/media/codec2/hal/hidl/1.1/utils/types.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/types.cpp
rename to media/codec2/hal/hidl/1.1/utils/types.cpp
diff --git a/media/codec2/hidl/1.2/utils/Android.bp b/media/codec2/hal/hidl/1.2/utils/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/Android.bp
rename to media/codec2/hal/hidl/1.2/utils/Android.bp
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/Component.cpp
rename to media/codec2/hal/hidl/1.2/utils/Component.cpp
diff --git a/media/codec2/hidl/1.2/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentInterface.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/ComponentInterface.cpp
rename to media/codec2/hal/hidl/1.2/utils/ComponentInterface.cpp
diff --git a/media/codec2/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/ComponentStore.cpp
rename to media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
diff --git a/media/codec2/hidl/1.2/utils/Configurable.cpp b/media/codec2/hal/hidl/1.2/utils/Configurable.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/Configurable.cpp
rename to media/codec2/hal/hidl/1.2/utils/Configurable.cpp
diff --git a/media/codec2/hidl/1.2/utils/InputBufferManager.cpp b/media/codec2/hal/hidl/1.2/utils/InputBufferManager.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/InputBufferManager.cpp
rename to media/codec2/hal/hidl/1.2/utils/InputBufferManager.cpp
diff --git a/media/codec2/hidl/1.2/utils/InputSurface.cpp b/media/codec2/hal/hidl/1.2/utils/InputSurface.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/InputSurface.cpp
rename to media/codec2/hal/hidl/1.2/utils/InputSurface.cpp
diff --git a/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.2/utils/InputSurfaceConnection.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
rename to media/codec2/hal/hidl/1.2/utils/InputSurfaceConnection.cpp
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
diff --git a/media/codec2/hidl/1.2/utils/types.cpp b/media/codec2/hal/hidl/1.2/utils/types.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/types.cpp
rename to media/codec2/hal/hidl/1.2/utils/types.cpp
diff --git a/media/codec2/hidl/plugin/Android.bp b/media/codec2/hal/plugin/Android.bp
similarity index 100%
rename from media/codec2/hidl/plugin/Android.bp
rename to media/codec2/hal/plugin/Android.bp
diff --git a/media/codec2/hidl/plugin/DefaultFilterPlugin.cpp b/media/codec2/hal/plugin/DefaultFilterPlugin.cpp
similarity index 100%
rename from media/codec2/hidl/plugin/DefaultFilterPlugin.cpp
rename to media/codec2/hal/plugin/DefaultFilterPlugin.cpp
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
similarity index 100%
rename from media/codec2/hidl/plugin/FilterWrapper.cpp
rename to media/codec2/hal/plugin/FilterWrapper.cpp
diff --git a/media/codec2/hidl/plugin/FilterWrapperStub.cpp b/media/codec2/hal/plugin/FilterWrapperStub.cpp
similarity index 100%
rename from media/codec2/hidl/plugin/FilterWrapperStub.cpp
rename to media/codec2/hal/plugin/FilterWrapperStub.cpp
diff --git a/media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h b/media/codec2/hal/plugin/include/codec2/hidl/plugin/FilterPlugin.h
similarity index 100%
rename from media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h
rename to media/codec2/hal/plugin/include/codec2/hidl/plugin/FilterPlugin.h
diff --git a/media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h b/media/codec2/hal/plugin/internal/DefaultFilterPlugin.h
similarity index 100%
rename from media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h
rename to media/codec2/hal/plugin/internal/DefaultFilterPlugin.h
diff --git a/media/codec2/hidl/plugin/internal/FilterWrapper.h b/media/codec2/hal/plugin/internal/FilterWrapper.h
similarity index 100%
rename from media/codec2/hidl/plugin/internal/FilterWrapper.h
rename to media/codec2/hal/plugin/internal/FilterWrapper.h
diff --git a/media/codec2/hidl/plugin/samples/Android.bp b/media/codec2/hal/plugin/samples/Android.bp
similarity index 100%
rename from media/codec2/hidl/plugin/samples/Android.bp
rename to media/codec2/hal/plugin/samples/Android.bp
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
similarity index 99%
rename from media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
rename to media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
index c8997bb..c77eb22 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
@@ -810,8 +810,6 @@
     // affectedParams
     {
         C2StreamHdrStaticInfo::output::PARAM_TYPE,
-        C2StreamHdr10PlusInfo::output::PARAM_TYPE,  // will be deprecated
-        C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
         C2StreamColorAspectsInfo::output::PARAM_TYPE,
     },
 };
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hal/services/Android.bp
similarity index 100%
rename from media/codec2/hidl/services/Android.bp
rename to media/codec2/hal/services/Android.bp
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hal/services/android.hardware.media.c2@1.2-default-service.rc
similarity index 100%
rename from media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
rename to media/codec2/hal/services/android.hardware.media.c2@1.2-default-service.rc
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_0_default.xml b/media/codec2/hal/services/manifest_media_c2_V1_0_default.xml
similarity index 100%
rename from media/codec2/hidl/services/manifest_media_c2_V1_0_default.xml
rename to media/codec2/hal/services/manifest_media_c2_V1_0_default.xml
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_1_default.xml b/media/codec2/hal/services/manifest_media_c2_V1_1_default.xml
similarity index 100%
rename from media/codec2/hidl/services/manifest_media_c2_V1_1_default.xml
rename to media/codec2/hal/services/manifest_media_c2_V1_1_default.xml
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml b/media/codec2/hal/services/manifest_media_c2_V1_2_default.xml
similarity index 100%
rename from media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
rename to media/codec2/hal/services/manifest_media_c2_V1_2_default.xml
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hal/services/vendor.cpp
similarity index 100%
rename from media/codec2/hidl/services/vendor.cpp
rename to media/codec2/hal/services/vendor.cpp
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 4bf8dce..b54d35d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -226,6 +226,9 @@
     if (buffer->meta()->findInt32("tunnel-first-frame", &tmp) && tmp) {
         tunnelFirstFrame = true;
     }
+    if (buffer->meta()->findInt32("decode-only", &tmp) && tmp) {
+        flags |= C2FrameData::FLAG_DROP_FRAME;
+    }
     ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
     std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
@@ -1995,6 +1998,12 @@
         drop = true;
     }
 
+    // Workaround: if C2FrameData::FLAG_DROP_FRAME is not implemented in
+    // HAL, the flag is then removed in the corresponding output buffer.
+    if (work->input.flags & C2FrameData::FLAG_DROP_FRAME) {
+        flags |= BUFFER_FLAG_DECODE_ONLY;
+    }
+
     if (notifyClient && !buffer && !flags) {
         if (mTunneled && drop && outputFormat) {
             ALOGV("[%s] onWorkDone: Keep tunneled, drop frame with format change (%lld)",
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 5208be6..cfadc95 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -389,7 +389,7 @@
             // read back always as int
             float value;
             if (v.get(&value)) {
-                return (int32_t)value;
+                return (int32_t) (value + 0.5);
             }
             return C2Value();
         }));
@@ -955,7 +955,7 @@
         .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
     add(ConfigMapper(KEY_FLAC_COMPRESSION_LEVEL, C2_PARAMKEY_COMPLEXITY, "value")
         .limitTo(D::AUDIO & D::ENCODER));
-    add(ConfigMapper("complexity", C2_PARAMKEY_COMPLEXITY, "value")
+    add(ConfigMapper(KEY_COMPLEXITY, C2_PARAMKEY_COMPLEXITY, "value")
         .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
 
     add(ConfigMapper(KEY_GRID_COLUMNS, C2_PARAMKEY_TILE_LAYOUT, "columns")
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 807841e..9004bcf 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -313,6 +313,28 @@
             && layout.planes[layout.PLANE_V].rowSampling == 2);
 }
 
+bool IsYUV420_10bit(const C2GraphicView &view) {
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.numPlanes == 3
+            && layout.type == C2PlanarLayout::TYPE_YUV
+            && layout.planes[layout.PLANE_Y].channel == C2PlaneInfo::CHANNEL_Y
+            && layout.planes[layout.PLANE_Y].allocatedDepth == 16
+            && layout.planes[layout.PLANE_Y].bitDepth == 10
+            && layout.planes[layout.PLANE_Y].colSampling == 1
+            && layout.planes[layout.PLANE_Y].rowSampling == 1
+            && layout.planes[layout.PLANE_U].channel == C2PlaneInfo::CHANNEL_CB
+            && layout.planes[layout.PLANE_U].allocatedDepth == 16
+            && layout.planes[layout.PLANE_U].bitDepth == 10
+            && layout.planes[layout.PLANE_U].colSampling == 2
+            && layout.planes[layout.PLANE_U].rowSampling == 2
+            && layout.planes[layout.PLANE_V].channel == C2PlaneInfo::CHANNEL_CR
+            && layout.planes[layout.PLANE_V].allocatedDepth == 16
+            && layout.planes[layout.PLANE_V].bitDepth == 10
+            && layout.planes[layout.PLANE_V].colSampling == 2
+            && layout.planes[layout.PLANE_V].rowSampling == 2);
+}
+
+
 bool IsNV12(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
@@ -327,6 +349,24 @@
             && layout.planes[layout.PLANE_V].offset == 1);
 }
 
+bool IsP010(const C2GraphicView &view) {
+    if (!IsYUV420_10bit(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 4
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_U
+            && layout.planes[layout.PLANE_U].offset == 0
+            && layout.planes[layout.PLANE_V].colInc == 4
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_U
+            && layout.planes[layout.PLANE_V].offset == 2
+            && layout.planes[layout.PLANE_Y].rightShift == 6
+            && layout.planes[layout.PLANE_U].rightShift == 6
+            && layout.planes[layout.PLANE_V].rightShift == 6);
+}
+
+
 bool IsNV21(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 9fa642d..6b0ba7f 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -93,11 +93,21 @@
 bool IsYUV420(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a YUV 420 10-10-10 layout.
+ */
+bool IsYUV420_10bit(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a NV12 layout.
  */
 bool IsNV12(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a P010 layout.
+ */
+bool IsP010(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a NV21 layout.
  */
 bool IsNV21(const C2GraphicView &view);
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index dfdd84d..d7a9764 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -1081,6 +1081,7 @@
     emplace("libcodec2_soft_amrwbenc.so");
     //emplace("libcodec2_soft_av1dec_aom.so"); // deprecated for the gav1 implementation
     emplace("libcodec2_soft_av1dec_gav1.so");
+    emplace("libcodec2_soft_av1enc.so");
     emplace("libcodec2_soft_avcdec.so");
     emplace("libcodec2_soft_avcenc.so");
     emplace("libcodec2_soft_flacdec.so");
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 270bbf4..f2cd585 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,6 +432,10 @@
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
+            if (bufferNeedsReallocation) {
+                mBuffers[slot].clear();
+            }
+
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 99bccac..bf4ca32 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -64,6 +64,11 @@
     }
 
     HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    if (o->size() < sizeof(C2SyncVariables)) {
+        android_errorWriteLog(0x534e4554, "240140929");
+        return nullptr;
+    }
+
     void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
 
     if (ptr == MAP_FAILED) {
diff --git a/media/libaaudio/TEST_MAPPING b/media/libaaudio/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/media/libaaudio/TEST_MAPPING
+++ b/media/libaaudio/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaaudio/examples/loopback/README.md b/media/libaaudio/examples/loopback/README.md
new file mode 100644
index 0000000..0da751f
--- /dev/null
+++ b/media/libaaudio/examples/loopback/README.md
@@ -0,0 +1,7 @@
+# to run the loopback test from the command line
+{cd to top of the repo}
+mmma frameworks/av/media/libaaudio/examples/
+adb root
+adb remount -R
+adb push $OUT/data/nativetest/aaudio_loopback/aaudio_loopback /data/aaudio_loopback
+adb shell /data/aaudio_loopback -?
diff --git a/media/libaaudio/examples/write_sine/README.md b/media/libaaudio/examples/write_sine/README.md
index b150471..73e6fc9 100644
--- a/media/libaaudio/examples/write_sine/README.md
+++ b/media/libaaudio/examples/write_sine/README.md
@@ -1,7 +1,10 @@
-# cd to this directory
-mkdir -p jni/include/aaudio
-ln -s $PLATFORM/frameworks/av/media/liboboe/include/aaudio/*.h jni/include/aaudio
-ln -s $PLATFORM/out/target/product/$TARGET_PRODUCT/symbols/out/soong/ndk/platforms/android-current/arch-arm64/usr/lib/liboboe.so jni
-$NDK/ndk-build
-adb push libs/arm64-v8a/write_sine_threaded /data
-adb shell /data/write_sine_threaded
+# to run write_sine examples from the command line
+{cd to top of the repo}
+mmma frameworks/av/media/libaaudio/examples/
+adb root
+adb remount -R
+adb push $OUT/data/nativetest/write_sine/write_sine /data/write_sine
+adb shell /data/write_sine -?
+
+adb push $OUT/data/nativetest/write_sine_callback/write_sine_callback /data/write_sine_callback
+adb shell /data/write_sine_callback -?
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 0c4a8f7..6c364c9 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -103,7 +103,23 @@
      *
      * Available since API level 31.
      */
-    AAUDIO_FORMAT_PCM_I32
+    AAUDIO_FORMAT_PCM_I32,
+
+    /**
+     * This format is used for compressed audio wrapped in IEC61937 for HDMI
+     * or S/PDIF passthrough.
+     *
+     * Unlike PCM playback, the Android framework is not able to do format
+     * conversion for IEC61937. In that case, when IEC61937 is requested, sampling
+     * rate and channel count or channel mask must be specified. Otherwise, it may
+     * fail when opening the stream. Apps are able to get the correct configuration
+     * for the playback by calling
+     * <a href="/reference/android/media/AudioManager#getDevices(int)">
+     *   AudioManager#getDevices(int)</a>.
+     *
+     * Available since API level 34.
+     */
+    AAUDIO_FORMAT_IEC61937
 
 };
 typedef int32_t aaudio_format_t;
@@ -261,6 +277,7 @@
     AAUDIO_STREAM_STATE_CLOSED,
     /**
      * The stream is disconnected from audio device.
+     * @deprecated
      */
     AAUDIO_STREAM_STATE_DISCONNECTED
 };
@@ -870,6 +887,12 @@
  * will be respected if both this function and {@link AAudioStreamBuilder_setChannelMask} are
  * called.
  *
+ * Note that if the channel count is two then it may get mixed to mono when the device only supports
+ * one channel. If the channel count is greater than two but the device's supported channel count is
+ * less than the requested value, the channels higher than the device channel will be dropped. If
+ * higher channels should be mixed or spatialized, use {@link AAudioStreamBuilder_setChannelMask}
+ * instead.
+ *
  * Available since API level 26.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
@@ -1157,7 +1180,10 @@
  * in the streams current data format to the audioData buffer.
  *
  * For an input stream, this function should read and process numFrames of data
- * from the audioData buffer.
+ * from the audioData buffer. The data in the audioData buffer must not be modified
+ * directly. Instead, it should be copied to another buffer before doing any modification.
+ * In many cases, writing to the audioData buffer of an input stream will result in a
+ * native exception.
  *
  * The audio data is passed through the buffer. So do NOT call AAudioStream_read() or
  * AAudioStream_write() on the stream that is making the callback.
@@ -1441,7 +1467,10 @@
 /**
  * Asynchronous request for the stream to flush.
  * Flushing will discard any pending data.
- * This call only works if the stream is pausing or paused. TODO review
+ * This call only works if the stream is OPEN, PAUSED, STOPPED, or FLUSHED.
+ * Calling this function when in other states,
+ * or calling from an AAudio callback function,
+ * will have no effect and an error will be returned.
  * Frame counters are not reset by a flush. They may be advanced.
  * After this call the state will be in {@link #AAUDIO_STREAM_STATE_FLUSHING} or
  * {@link #AAUDIO_STREAM_STATE_FLUSHED}.
@@ -1672,22 +1701,55 @@
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual sample rate
+ * @return actual sample rate of the stream
  */
 AAUDIO_API int32_t AAudioStream_getSampleRate(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
+ * There may be sample rate conversions in the Audio framework.
+ * The sample rate set in the stream builder may not be actual sample rate used in the hardware.
+ *
+ * This returns the sample rate used by the hardware.
+ *
+ * If AAudioStreamBuilder_openStream() returned AAUDIO_OK, the result should always be valid.
+ *
+ * Available since API level 34.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual sample rate of the underlying hardware
+ */
+AAUDIO_API int32_t AAudioStream_getHardwareSampleRate(AAudioStream* stream)
+        __INTRODUCED_IN(__ANDROID_API_U__);
+
+/**
  * A stream has one or more channels of data.
  * A frame will contain one sample for each channel.
  *
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual number of channels
+ * @return actual number of channels of the stream
  */
 AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
+ * There may be channel conversions in the Audio framework.
+ * The channel count or channel mask set in the stream builder may not be actual number of
+ * channels used in the hardware.
+ *
+ * This returns the channel count used by the hardware.
+ *
+ * If AAudioStreamBuilder_openStream() returned AAUDIO_OK, the result should always be valid.
+ *
+ * Available since API level 34.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual number of channels of the underlying hardware
+ */
+AAUDIO_API int32_t AAudioStream_getHardwareChannelCount(AAudioStream* stream)
+        __INTRODUCED_IN(__ANDROID_API_U__);
+
+/**
  * Identical to AAudioStream_getChannelCount().
  *
  * Available since API level 26.
@@ -1709,11 +1771,27 @@
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual data format
+ * @return actual data format of the stream
  */
 AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
+ * There may be data format conversions in the Audio framework.
+ * The data format set in the stream builder may not be actual format used in the hardware.
+ *
+ * This returns the audio format used by the hardware.
+ * AUDIO_FORMAT_PCM_8_24_BIT is currently not supported in AAudio, but the hardware may use it.
+ * If AUDIO_FORMAT_PCM_8_24_BIT is used by the hardware, return AAUDIO_FORMAT_PCM_I24_PACKED.
+ *
+ * Available since API level 34.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual data format of the underlying hardware.
+ */
+AAUDIO_API aaudio_format_t AAudioStream_getHardwareFormat(AAudioStream* stream)
+        __INTRODUCED_IN(__ANDROID_API_U__);
+
+/**
  * Provide actual sharing mode.
  *
  * Available since API level 26.
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 0f2d7a2..01d97b6 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -87,7 +87,7 @@
  * @note This is only for testing. Do not use this in an application.
  * It may change or be removed at any time.
  *
- * @return true if the stream uses ther MMAP data path
+ * @return true if the stream uses the MMAP data path
  */
 AAUDIO_API bool AAudioStream_isMMapUsed(AAudioStream* stream);
 
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 4c5fc71..30f451a 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -212,6 +212,7 @@
         "flowgraph/ChannelCountConverter.cpp",
         "flowgraph/ClipToRange.cpp",
         "flowgraph/FlowGraphNode.cpp",
+        "flowgraph/Limiter.cpp",
         "flowgraph/ManyToMultiConverter.cpp",
         "flowgraph/MonoBlend.cpp",
         "flowgraph/MonoToMultiConverter.cpp",
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index b60bac2..c4692ce 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -40,6 +40,10 @@
     auto convFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
             parcelable.audioFormat);
     setFormat(convFormat.ok() ? convFormat.value() : AUDIO_FORMAT_INVALID);
+    if (!convFormat.ok()) {
+        ALOGE("audioFormat (%s) aidl2legacy conversion failed",
+              parcelable.hardwareAudioFormat.toString().c_str());
+    }
     static_assert(sizeof(aaudio_direction_t) == sizeof(parcelable.direction));
     setDirection(parcelable.direction);
     static_assert(sizeof(audio_usage_t) == sizeof(parcelable.usage));
@@ -52,7 +56,6 @@
     setSpatializationBehavior(parcelable.spatializationBehavior);
     setIsContentSpatialized(parcelable.isContentSpatialized);
 
-
     static_assert(sizeof(aaudio_input_preset_t) == sizeof(parcelable.inputPreset));
     setInputPreset(parcelable.inputPreset);
     setBufferCapacity(parcelable.bufferCapacity);
@@ -62,6 +65,15 @@
     static_assert(sizeof(aaudio_session_id_t) == sizeof(parcelable.sessionId));
     setSessionId(parcelable.sessionId);
     setPrivacySensitive(parcelable.isPrivacySensitive);
+    setHardwareSamplesPerFrame(parcelable.hardwareSamplesPerFrame);
+    setHardwareSampleRate(parcelable.hardwareSampleRate);
+    auto convHardwareFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
+            parcelable.hardwareAudioFormat);
+    setHardwareFormat(convHardwareFormat.ok() ? convHardwareFormat.value() : AUDIO_FORMAT_INVALID);
+    if (!convHardwareFormat.ok()) {
+        ALOGE("hardwareAudioFormat (%s) aidl2legacy conversion failed",
+              parcelable.hardwareAudioFormat.toString().c_str());
+    }
 }
 
 AAudioStreamConfiguration&
@@ -82,6 +94,8 @@
     if (convAudioFormat.ok()) {
         result.audioFormat = convAudioFormat.value();
     } else {
+        ALOGE("audioFormat (%s) legacy2aidl conversion failed",
+              audio_format_to_string(getFormat()));
         result.audioFormat = AudioFormatDescription{};
         result.audioFormat.type =
                 android::media::audio::common::AudioFormatType::SYS_RESERVED_INVALID;
@@ -92,6 +106,10 @@
     result.usage = getUsage();
     static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
     result.contentType = getContentType();
+    static_assert(
+            sizeof(aaudio_spatialization_behavior_t) == sizeof(result.spatializationBehavior));
+    result.spatializationBehavior = getSpatializationBehavior();
+    result.isContentSpatialized = isContentSpatialized();
     static_assert(sizeof(aaudio_input_preset_t) == sizeof(result.inputPreset));
     result.inputPreset = getInputPreset();
     result.bufferCapacity = getBufferCapacity();
@@ -100,5 +118,18 @@
     static_assert(sizeof(aaudio_session_id_t) == sizeof(result.sessionId));
     result.sessionId = getSessionId();
     result.isPrivacySensitive = isPrivacySensitive();
+    result.hardwareSamplesPerFrame = getHardwareSamplesPerFrame();
+    result.hardwareSampleRate = getHardwareSampleRate();
+    auto convHardwareAudioFormat = android::legacy2aidl_audio_format_t_AudioFormatDescription(
+            getHardwareFormat());
+    if (convHardwareAudioFormat.ok()) {
+        result.hardwareAudioFormat = convHardwareAudioFormat.value();
+    } else {
+        ALOGE("hardwareAudioFormat (%s) legacy2aidl conversion failed",
+              audio_format_to_string(getHardwareFormat()));
+        result.hardwareAudioFormat = AudioFormatDescription{};
+        result.hardwareAudioFormat.type =
+                android::media::audio::common::AudioFormatType::SYS_RESERVED_INVALID;
+    }
     return result;
 }
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index 983e193..fa46e0d 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -34,4 +34,7 @@
     int /* aaudio_allowed_capture_policy_t */ allowedCapturePolicy;  // = AAUDIO_UNSPECIFIED;
     int /* aaudio_session_id_t */             sessionId;  //            = AAUDIO_SESSION_ID_NONE;
     boolean                                   isPrivacySensitive;  //   = false;
+    int                                       hardwareSamplesPerFrame;//= AAUDIO_UNSPECIFIED;
+    int                                       hardwareSampleRate;  //   = AAUDIO_UNSPECIFIED;
+    AudioFormatDescription                    hardwareAudioFormat;  //  = AUDIO_FORMAT_DEFAULT;
 }
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 2ed3e3c..5444565 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -20,7 +20,7 @@
 
 #include "AAudioFlowGraph.h"
 
-#include <flowgraph/ClipToRange.h>
+#include <flowgraph/Limiter.h>
 #include <flowgraph/ManyToMultiConverter.h>
 #include <flowgraph/MonoBlend.h>
 #include <flowgraph/MonoToMultiConverter.h>
@@ -78,11 +78,11 @@
     }
 
     // For a pure float graph, there is chance that the data range may be very large.
-    // So we should clip to a reasonable value that allows a little headroom.
+    // So we should limit to a reasonable value that allows a little headroom.
     if (sourceFormat == AUDIO_FORMAT_PCM_FLOAT && sinkFormat == AUDIO_FORMAT_PCM_FLOAT) {
-        mClipper = std::make_unique<ClipToRange>(sourceChannelCount);
-        lastOutput->connect(&mClipper->input);
-        lastOutput = &mClipper->output;
+        mLimiter = std::make_unique<Limiter>(sourceChannelCount);
+        lastOutput->connect(&mLimiter->input);
+        lastOutput = &mLimiter->output;
     }
 
     // Expand the number of channels if required.
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index 602c17f..35fef37 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -24,7 +24,7 @@
 
 #include <aaudio/AAudio.h>
 #include <audio_utils/Balance.h>
-#include <flowgraph/ClipToRange.h>
+#include <flowgraph/Limiter.h>
 #include <flowgraph/ManyToMultiConverter.h>
 #include <flowgraph/MonoBlend.h>
 #include <flowgraph/MonoToMultiConverter.h>
@@ -74,7 +74,7 @@
 private:
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
-    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ClipToRange> mClipper;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::Limiter> mLimiter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ManyToMultiConverter>
             mManyToMultiConverter;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 9f0564f..8fe8569 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -131,6 +131,10 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
+    request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
+    request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
+    request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
+
     mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
 
     mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
@@ -192,6 +196,10 @@
     // Save device format so we can do format conversion and volume scaling together.
     setDeviceFormat(configurationOutput.getFormat());
 
+    setHardwareSamplesPerFrame(configurationOutput.getHardwareSamplesPerFrame());
+    setHardwareSampleRate(configurationOutput.getHardwareSampleRate());
+    setHardwareFormat(configurationOutput.getHardwareFormat());
+
     result = mServiceInterface.getStreamDescription(mServiceStreamHandle, mEndPointParcelable);
     if (result != AAUDIO_OK) {
         goto error;
@@ -315,11 +323,10 @@
     aaudio_result_t result = AAUDIO_OK;
     ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
     if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
-        aaudio_stream_state_t currentState = getState();
         // Don't release a stream while it is running. Stop it first.
         // If DISCONNECTED then we should still try to stop in case the
         // error callback is still running.
-        if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        if (isActive() || isDisconnected()) {
             requestStop_l();
         }
 
@@ -432,11 +439,11 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    aaudio_stream_state_t originalState = getState();
-    if (originalState == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         ALOGD("requestStart() but DISCONNECTED");
         return AAUDIO_ERROR_DISCONNECTED;
     }
+    aaudio_stream_state_t originalState = getState();
     setState(AAUDIO_STREAM_STATE_STARTING);
 
     // Clear any stale timestamps from the previous run.
@@ -456,7 +463,7 @@
         ALOGD("%s() error = %d, stream was probably stolen", __func__, result);
         // Stealing was added in R. Coerce result to improve backward compatibility.
         result = AAUDIO_ERROR_DISCONNECTED;
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected();
     }
 
     startTime = AudioClock::getNanoseconds();
@@ -473,7 +480,6 @@
         result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
     }
     if (result != AAUDIO_OK) {
-        // TODO(b/214607638): Do we want to roll back to original state or keep as disconnected?
         setState(originalState);
     }
     return result;
@@ -499,8 +505,7 @@
 // This must be called under mStreamLock.
 aaudio_result_t AudioStreamInternal::stopCallback_l()
 {
-    if (isDataCallbackSet()
-            && (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+    if (isDataCallbackSet() && (isActive() || isDisconnected())) {
         mCallbackEnabled.store(false);
         aaudio_result_t result = joinThread_l(nullptr); // may temporarily unlock mStreamLock
         if (result == AAUDIO_ERROR_INVALID_HANDLE) {
@@ -525,7 +530,7 @@
     // and the callback may have stopped the stream.
     // Check to make sure the stream still needs to be stopped.
     // See also AudioStream::safeStop_l().
-    if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+    if (!(isActive() || isDisconnected())) {
         ALOGD("%s() returning early, not active or disconnected", __func__);
         return AAUDIO_OK;
     }
@@ -605,13 +610,6 @@
     return AAUDIO_ERROR_INVALID_STATE;
 }
 
-aaudio_result_t AudioStreamInternal::updateStateMachine() {
-    if (isDataCallbackActive()) {
-        return AAUDIO_OK; // state is getting updated by the callback thread read/write call
-    }
-    return processCommands();
-}
-
 void AudioStreamInternal::logTimestamp(AAudioServiceMessage &command) {
     static int64_t oldPosition = 0;
     static int64_t oldTime = 0;
@@ -654,6 +652,8 @@
             if (getState() == AAUDIO_STREAM_STATE_STARTING) {
                 setState(AAUDIO_STREAM_STATE_STARTED);
             }
+            mPlayerBase->triggerPortIdUpdate(static_cast<audio_port_handle_t>(
+                                                 message->event.dataLong));
             break;
         case AAUDIO_SERVICE_EVENT_PAUSED:
             ALOGD("%s - got AAUDIO_SERVICE_EVENT_PAUSED", __func__);
@@ -680,7 +680,7 @@
                 mAudioEndpoint->eraseDataMemory();
             }
             result = AAUDIO_ERROR_DISCONNECTED;
-            setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+            setDisconnected();
             ALOGW("%s - AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared", __func__);
             break;
         case AAUDIO_SERVICE_EVENT_VOLUME:
@@ -814,7 +814,6 @@
 
             if (wakeTimeNanos > deadlineNanos) {
                 // If we time out, just return the framesWritten so far.
-                // TODO remove after we fix the deadline bug
                 ALOGW("processData(): entered at %lld nanos, currently %lld",
                       (long long) entryTimeNanos, (long long) currentTimeNanos);
                 ALOGW("processData(): TIMEOUT after %lld nanos",
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 2367572..4ea61d2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -48,7 +48,7 @@
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) override;
 
-    virtual aaudio_result_t updateStateMachine() override;
+    virtual aaudio_result_t processCommands() override;
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
@@ -110,8 +110,6 @@
 
     aaudio_result_t drainTimestampsFromService();
 
-    aaudio_result_t processCommands();
-
     aaudio_result_t stopCallback_l();
 
     virtual void prepareBuffersForStart() {}
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 938079b..8a13a6f 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -418,12 +418,24 @@
     return audioStream->getSampleRate();
 }
 
+AAUDIO_API int32_t AAudioStream_getHardwareSampleRate(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getHardwareSampleRate();
+}
+
 AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
     return audioStream->getSamplesPerFrame();
 }
 
+AAUDIO_API int32_t AAudioStream_getHardwareChannelCount(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getHardwareSamplesPerFrame();
+}
+
 AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream)
 {
     return AAudioStream_getChannelCount(stream);
@@ -432,7 +444,7 @@
 AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getState();
+    return audioStream->getStateExternal();
 }
 
 AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* stream)
@@ -443,6 +455,14 @@
     return AAudioConvert_androidToAAudioDataFormat(internalFormat);
 }
 
+AAUDIO_API aaudio_format_t AAudioStream_getHardwareFormat(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    // Use audio_format_t internally.
+    audio_format_t internalFormat = audioStream->getHardwareFormat();
+    return AAudioConvert_androidToNearestAAudioDataFormat(internalFormat);
+}
+
 AAUDIO_API aaudio_result_t AAudioStream_setBufferSizeInFrames(AAudioStream* stream,
                                                 int32_t requestedFrames)
 {
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 31fd011..c8461cc 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -49,6 +49,9 @@
     mOpPackageName        = other.mOpPackageName;
     mAttributionTag       = other.mAttributionTag;
     mChannelMask          = other.mChannelMask;
+    mHardwareSamplesPerFrame = other.mHardwareSamplesPerFrame;
+    mHardwareSampleRate   = other.mHardwareSampleRate;
+    mHardwareAudioFormat  = other.mHardwareAudioFormat;
 }
 
 static aaudio_result_t isFormatValid(audio_format_t format) {
@@ -58,6 +61,7 @@
         case AUDIO_FORMAT_PCM_32_BIT:
         case AUDIO_FORMAT_PCM_FLOAT:
         case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        case AUDIO_FORMAT_IEC61937:
             break; // valid
         default:
             ALOGD("audioFormat not valid, audio_format_t = 0x%08x", format);
@@ -310,4 +314,7 @@
         "(null)" : mOpPackageName.value().c_str());
     ALOGD("mAttributionTag       = %s", !mAttributionTag.has_value() ?
         "(null)" : mAttributionTag.value().c_str());
-}
+    ALOGD("mHardwareSamplesPerFrame = %6d", mHardwareSamplesPerFrame);
+    ALOGD("mHardwareSampleRate   = %6d", mHardwareSampleRate);
+    ALOGD("mHardwareAudioFormat  = %6d", (int)mHardwareAudioFormat);
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index cb998bf..565d54c 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -171,6 +171,30 @@
         mSamplesPerFrame = AAudioConvert_channelMaskToCount(channelMask);
     }
 
+    int32_t getHardwareSamplesPerFrame() const {
+        return mHardwareSamplesPerFrame;
+    }
+
+    void setHardwareSamplesPerFrame(int32_t hardwareSamplesPerFrame) {
+        mHardwareSamplesPerFrame = hardwareSamplesPerFrame;
+    }
+
+    int32_t getHardwareSampleRate() const {
+        return mHardwareSampleRate;
+    }
+
+    void setHardwareSampleRate(int32_t hardwareSampleRate) {
+        mHardwareSampleRate = hardwareSampleRate;
+    }
+
+    audio_format_t getHardwareFormat() const {
+        return mHardwareAudioFormat;
+    }
+
+    void setHardwareFormat(audio_format_t hardwareAudioFormat) {
+        mHardwareAudioFormat = hardwareAudioFormat;
+    }
+
     /**
      * @return bytes per frame of getFormat()
      */
@@ -210,6 +234,10 @@
     std::optional<std::string>      mOpPackageName        = {};
     std::optional<std::string>      mAttributionTag       = {};
     aaudio_channel_mask_t           mChannelMask          = AAUDIO_UNSPECIFIED;
+    int                             mHardwareSamplesPerFrame
+                                                          = AAUDIO_UNSPECIFIED;
+    int                             mHardwareSampleRate   = AAUDIO_UNSPECIFIED;
+    audio_format_t                  mHardwareAudioFormat  = AUDIO_FORMAT_DEFAULT;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index 0e5b8be..30f9677 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -82,6 +82,7 @@
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_FLOAT);
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I24_PACKED);
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I32);
+        AAUDIO_CASE_ENUM(AAUDIO_FORMAT_IEC61937);
     }
     return "Unrecognized";
 }
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 06f05b0..c31947f 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -21,7 +21,9 @@
 #include <atomic>
 #include <stdint.h>
 
+#include <linux/futex.h>
 #include <media/MediaMetricsItem.h>
+#include <sys/syscall.h>
 
 #include <aaudio/AAudio.h>
 
@@ -59,10 +61,9 @@
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
-                          || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
-                          || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
-                        "~AudioStream() - still in use, state = %s",
-                        AudioGlobal_convertStreamStateToText(getState()));
+                          || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
+                        "~AudioStream() - still in use, state = %s disconnected = %d",
+                        AudioGlobal_convertStreamStateToText(getState()), isDisconnected());
 }
 
 aaudio_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -156,6 +157,11 @@
 
     std::lock_guard<std::mutex> lock(mStreamLock);
 
+    if (isDisconnected()) {
+        ALOGW("%s() stream is disconnected", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+
     switch (getState()) {
         // Is this a good time to start?
         case AAUDIO_STREAM_STATE_OPEN:
@@ -174,8 +180,13 @@
                   AudioGlobal_convertStreamStateToText(getState()));
             return AAUDIO_ERROR_INVALID_STATE;
 
-        // Don't start when the stream is dead!
         case AAUDIO_STREAM_STATE_DISCONNECTED:
+            // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED, trying to
+            // start will finally return ERROR_DISCONNECTED.
+            ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
+            return AAUDIO_ERROR_INTERNAL;
+
+        // Don't start when the stream is dead!
         case AAUDIO_STREAM_STATE_CLOSING:
         case AAUDIO_STREAM_STATE_CLOSED:
         default:
@@ -208,7 +219,11 @@
         // Proceed with pausing.
         case AAUDIO_STREAM_STATE_STARTING:
         case AAUDIO_STREAM_STATE_STARTED:
+            break;
+
         case AAUDIO_STREAM_STATE_DISCONNECTED:
+            // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED
+            ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
             break;
 
             // Transition from one inactive state to another.
@@ -287,7 +302,10 @@
         // Proceed with stopping.
         case AAUDIO_STREAM_STATE_STARTING:
         case AAUDIO_STREAM_STATE_STARTED:
+            break;
         case AAUDIO_STREAM_STATE_DISCONNECTED:
+            // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED
+            ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
             break;
 
         // Transition from one inactive state to another.
@@ -362,37 +380,46 @@
 }
 
 void AudioStream::setState(aaudio_stream_state_t state) {
-    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
-    if (state == mState) {
+    aaudio_stream_state_t oldState = mState.load();
+    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), oldState, state);
+    if (state == oldState) {
         return; // no change
     }
-    // Track transition to DISCONNECTED state.
-    if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
-        android::mediametrics::LogItem(mMetricsId)
-                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
-                .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
-                .record();
-    }
+    LOG_ALWAYS_FATAL_IF(state == AAUDIO_STREAM_STATE_DISCONNECTED,
+                        "Disconnected state must be separated from mState");
     // CLOSED is a final state
-    if (mState == AAUDIO_STREAM_STATE_CLOSED) {
+    if (oldState == AAUDIO_STREAM_STATE_CLOSED) {
         ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
-    } else if (mState == AAUDIO_STREAM_STATE_CLOSING
+    } else if (oldState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
         ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
-    // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
-    } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
-               && !(state == AAUDIO_STREAM_STATE_CLOSING
-                   || state == AAUDIO_STREAM_STATE_CLOSED)) {
-        ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
-
     } else {
-        mState = state;
+        mState.store(state);
+        // Wake up a wakeForStateChange thread if it exists.
+        syscall(SYS_futex, &mState, FUTEX_WAKE_PRIVATE, INT_MAX, NULL, NULL, 0);
     }
 }
 
+void AudioStream::setDisconnected() {
+    const bool old = isDisconnected();
+    ALOGD("%s setting disconnected, current disconnected: %d, current state: %d",
+          __func__, old, getState());
+    if (old) {
+        return; // no change, the stream is already disconnected
+    }
+    mDisconnected.store(true);
+    // Wake up a wakeForStateChange thread if it exists.
+    syscall(SYS_futex, &mState, FUTEX_WAKE_PRIVATE, INT_MAX, NULL, NULL, 0);
+    // Track transition to DISCONNECTED state.
+    android::mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
+            .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
+            .record();
+}
+
 aaudio_result_t AudioStream::waitForStateChange(aaudio_stream_state_t currentState,
                                                 aaudio_stream_state_t *nextState,
                                                 int64_t timeoutNanoseconds)
@@ -403,20 +430,26 @@
     }
 
     int64_t durationNanos = 20 * AAUDIO_NANOS_PER_MILLISECOND; // arbitrary
-    aaudio_stream_state_t state = getState();
+    aaudio_stream_state_t state = getStateExternal();
     while (state == currentState && timeoutNanoseconds > 0) {
         if (durationNanos > timeoutNanoseconds) {
             durationNanos = timeoutNanoseconds;
         }
-        AudioClock::sleepForNanos(durationNanos);
-        timeoutNanoseconds -= durationNanos;
+        struct timespec time;
+        time.tv_sec = durationNanos / AAUDIO_NANOS_PER_SECOND;
+        // Add the fractional nanoseconds.
+        time.tv_nsec = durationNanos - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
 
+        // Sleep for durationNanos. If mState changes from the callback
+        // thread, this thread will wake up earlier.
+        syscall(SYS_futex, &mState, FUTEX_WAIT_PRIVATE, currentState, &time, NULL, 0);
+        timeoutNanoseconds -= durationNanos;
         aaudio_result_t result = updateStateMachine();
         if (result != AAUDIO_OK) {
             return result;
         }
 
-        state = getState();
+        state = getStateExternal();
     }
     if (nextState != nullptr) {
         *nextState = state;
@@ -607,6 +640,13 @@
     doSetVolume(); // apply this change
 }
 
+aaudio_stream_state_t AudioStream::getStateExternal() const {
+    if (isDisconnected()) {
+        return AAUDIO_STREAM_STATE_DISCONNECTED;
+    }
+    return getState();
+}
+
 void AudioStream::MyPlayerBase::registerWithAudioManager(const android::sp<AudioStream>& parent) {
     std::lock_guard<std::mutex> lock(mParentLock);
     mParent = parent;
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 5fb4528..9b4b734 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -100,10 +100,17 @@
                                        int64_t *timeNanoseconds) = 0;
 
     /**
-     * Update state machine.()
-     * @return
+     * Update state machine.
+     * @return result of the operation.
      */
-    virtual aaudio_result_t updateStateMachine() = 0;
+    aaudio_result_t updateStateMachine() {
+        if (isDataCallbackActive()) {
+            return AAUDIO_OK; // state is getting updated by the callback thread read/write call
+        }
+        return processCommands();
+    };
+
+    virtual aaudio_result_t processCommands() = 0;
 
     // =========== End ABSTRACT methods ===========================
 
@@ -184,9 +191,11 @@
     // ============== Queries ===========================
 
     aaudio_stream_state_t getState() const {
-        return mState;
+        return mState.load();
     }
 
+    aaudio_stream_state_t getStateExternal() const;
+
     virtual int32_t getBufferSize() const {
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
@@ -215,14 +224,26 @@
         return mSampleRate;
     }
 
+    aaudio_result_t getHardwareSampleRate() const {
+        return mHardwareSampleRate;
+    }
+
     audio_format_t getFormat()  const {
         return mFormat;
     }
 
+    audio_format_t getHardwareFormat()  const {
+        return mHardwareFormat;
+    }
+
     aaudio_result_t getSamplesPerFrame() const {
         return mSamplesPerFrame;
     }
 
+    aaudio_result_t getHardwareSamplesPerFrame() const {
+        return mHardwareSamplesPerFrame;
+    }
+
     virtual int32_t getPerformanceMode() const {
         return mPerformanceMode;
     }
@@ -403,7 +424,7 @@
      * This should only be called for client streams and not for streams
      * that run in the service.
      */
-    void registerPlayerBase() {
+    virtual void registerPlayerBase() {
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             mPlayerBase->registerWithAudioManager(this);
         }
@@ -521,6 +542,11 @@
     }
 
     // This should not be called after the open() call.
+    void setHardwareSampleRate(int32_t hardwareSampleRate) {
+        mHardwareSampleRate = hardwareSampleRate;
+    }
+
+    // This should not be called after the open() call.
     void setFramesPerBurst(int32_t framesPerBurst) {
         mFramesPerBurst = framesPerBurst;
     }
@@ -541,6 +567,16 @@
     }
 
     // This should not be called after the open() call.
+    void setHardwareFormat(audio_format_t format) {
+        mHardwareFormat = format;
+    }
+
+    // This should not be called after the open() call.
+    void setHardwareSamplesPerFrame(int32_t hardwareSamplesPerFrame) {
+        mHardwareSamplesPerFrame = hardwareSamplesPerFrame;
+    }
+
+    // This should not be called after the open() call.
     void setDeviceFormat(audio_format_t format) {
         mDeviceFormat = format;
     }
@@ -551,6 +587,11 @@
 
     void setState(aaudio_stream_state_t state);
 
+    bool isDisconnected() const {
+        return mDisconnected.load();
+    }
+    void setDisconnected();
+
     void setDeviceId(int32_t deviceId) {
         mDeviceId = deviceId;
     }
@@ -657,6 +698,8 @@
 
     std::mutex                 mStreamLock;
 
+    const android::sp<MyPlayerBase>   mPlayerBase;
+
 private:
 
     aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
@@ -672,17 +715,21 @@
         close_l();
     }
 
-    const android::sp<MyPlayerBase>   mPlayerBase;
+    std::atomic<aaudio_stream_state_t>          mState{AAUDIO_STREAM_STATE_UNINITIALIZED};
+
+    std::atomic_bool            mDisconnected{false};
 
     // These do not change after open().
     int32_t                     mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+    int32_t                     mHardwareSamplesPerFrame = AAUDIO_UNSPECIFIED;
     aaudio_channel_mask_t       mChannelMask = AAUDIO_UNSPECIFIED;
     int32_t                     mSampleRate = AAUDIO_UNSPECIFIED;
+    int32_t                     mHardwareSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mDeviceId = AAUDIO_UNSPECIFIED;
     aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
     bool                        mSharingModeMatchRequired = false; // must match sharing mode requested
     audio_format_t              mFormat = AUDIO_FORMAT_DEFAULT;
-    aaudio_stream_state_t       mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+    audio_format_t              mHardwareFormat = AUDIO_FORMAT_DEFAULT;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
     int32_t                     mFramesPerBurst = 0;
     int32_t                     mBufferCapacity = 0;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index a100aa9..ac4e2b3 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -24,7 +24,6 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
-#include <android/media/audio/common/AudioMMapPolicy.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <media/AudioSystem.h>
@@ -37,10 +36,10 @@
 #include "core/AudioStreamBuilder.h"
 #include "legacy/AudioStreamRecord.h"
 #include "legacy/AudioStreamTrack.h"
+#include "utility/AAudioUtilities.h"
 
 using namespace aaudio;
 
-using android::media::audio::common::AudioMMapPolicy;
 using android::media::audio::common::AudioMMapPolicyInfo;
 using android::media::audio::common::AudioMMapPolicyType;
 
@@ -95,37 +94,6 @@
     return result;
 }
 
-namespace {
-
-aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
-    switch (aidl) {
-        case AudioMMapPolicy::NEVER:
-            return AAUDIO_POLICY_NEVER;
-        case AudioMMapPolicy::AUTO:
-            return AAUDIO_POLICY_AUTO;
-        case AudioMMapPolicy::ALWAYS:
-            return AAUDIO_POLICY_ALWAYS;
-        case AudioMMapPolicy::UNSPECIFIED:
-        default:
-            return AAUDIO_UNSPECIFIED;
-    }
-}
-
-// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
-// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
-aaudio_policy_t getAAudioPolicy(
-        const std::vector<AudioMMapPolicyInfo>& policyInfos) {
-    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
-    for (size_t i = 1; i < policyInfos.size(); ++i) {
-        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
-            return AAUDIO_POLICY_AUTO;
-        }
-    }
-    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
-}
-
-} // namespace
-
 // Try to open using MMAP path if that is allowed.
 // Fall back to Legacy path if MMAP not available.
 // Exact behavior is controlled by MMapPolicy.
@@ -145,12 +113,28 @@
     }
 
     std::vector<AudioMMapPolicyInfo> policyInfos;
-    // The API setting is the highest priority.
     aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
-    // If not specified then get from a system property.
-    if (mmapPolicy == AAUDIO_UNSPECIFIED && android::AudioSystem::getMmapPolicyInfo(
-                AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
-        mmapPolicy = getAAudioPolicy(policyInfos);
+    if (android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
+        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(policyInfos);
+        if (mmapPolicy == AAUDIO_POLICY_ALWAYS && systemMmapPolicy == AAUDIO_POLICY_NEVER) {
+            // No need to try as AAudioService is not created and the client only wants MMAP path.
+            return AAUDIO_ERROR_NO_SERVICE;
+        }
+        // Use system property for mmap policy if
+        //    1. The API setting does not specify mmap policy or
+        //    2. The system property specifies MMAP policy as never. In this case, AAudioService
+        //       will not be started, no need to try mmap path.
+        if (mmapPolicy == AAUDIO_UNSPECIFIED || systemMmapPolicy == AAUDIO_POLICY_NEVER) {
+            mmapPolicy = systemMmapPolicy;
+        }
+    } else {
+        // If it fails querying mmap policy info, it is highly possible that the AAudioService is
+        // not created. In this case, we don't try mmap path.
+        if (mmapPolicy == AAUDIO_POLICY_ALWAYS) {
+            return AAUDIO_ERROR_NO_SERVICE;
+        }
+        mmapPolicy = AAUDIO_POLICY_NEVER;
     }
     // If still not specified then use the default.
     if (mmapPolicy == AAUDIO_UNSPECIFIED) {
@@ -161,7 +145,7 @@
     aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
     if (android::AudioSystem::getMmapPolicyInfo(
             AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
-        mmapExclusivePolicy = getAAudioPolicy(policyInfos);
+        mmapExclusivePolicy = AAudio_getAAudioPolicy(policyInfos);
     }
     if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
         mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
@@ -192,6 +176,11 @@
         allowMMap = false;
     }
 
+    if (getFormat() == AUDIO_FORMAT_IEC61937) {
+        ALOGD("%s IEC61937 format is selected, do not allow MMAP in this case.", __func__);
+        allowMMap = false;
+    }
+
     if (!allowMMap && !allowLegacy) {
         ALOGE("%s() no backend available: neither MMAP nor legacy path are allowed", __func__);
         return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
diff --git a/media/libaaudio/src/flowgraph/Limiter.cpp b/media/libaaudio/src/flowgraph/Limiter.cpp
new file mode 100644
index 0000000..def905a
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/Limiter.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <math.h>
+#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "Limiter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+Limiter::Limiter(int32_t channelCount)
+        : FlowGraphFilter(channelCount) {
+}
+
+int32_t Limiter::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
+
+    int32_t numSamples = numFrames * output.getSamplesPerFrame();
+
+    // Cache the last valid output to reduce memory read/write
+    float lastValidOutput = mLastValidOutput;
+
+    for (int32_t i = 0; i < numSamples; i++) {
+        // Use the previous output if the input is NaN
+        if (!isnan(*inputBuffer)) {
+            lastValidOutput = processFloat(*inputBuffer);
+        }
+        inputBuffer++;
+        *outputBuffer++ = lastValidOutput;
+    }
+    mLastValidOutput = lastValidOutput;
+
+    return numFrames;
+}
+
+float Limiter::processFloat(float in)
+{
+    float in_abs = fabsf(in);
+    if (in_abs <= 1) {
+        return in;
+    }
+    float out;
+    if (in_abs < kXWhenYis3Decibels) {
+        out = (kPolynomialSplineA * in_abs + kPolynomialSplineB) * in_abs + kPolynomialSplineC;
+    } else {
+        out = M_SQRT2;
+    }
+    if (in < 0) {
+        out = -out;
+    }
+    return out;
+}
diff --git a/media/libaaudio/src/flowgraph/Limiter.h b/media/libaaudio/src/flowgraph/Limiter.h
new file mode 100644
index 0000000..393a7bf
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/Limiter.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_LIMITER_H
+#define FLOWGRAPH_LIMITER_H
+
+#include <atomic>
+#include <unistd.h>
+#include <sys/types.h>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+class Limiter : public FlowGraphFilter {
+public:
+    explicit Limiter(int32_t channelCount);
+
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "Limiter";
+    }
+
+private:
+    // These numbers are based on a polynomial spline for a quadratic solution Ax^2 + Bx + C
+    // The range is up to 3 dB, (10^(3/20)), to match AudioTrack for float data.
+    static constexpr float kPolynomialSplineA = -0.6035533905; // -(1+sqrt(2))/4
+    static constexpr float kPolynomialSplineB = 2.2071067811; // (3+sqrt(2))/2
+    static constexpr float kPolynomialSplineC = -0.6035533905; // -(1+sqrt(2))/4
+    static constexpr float kXWhenYis3Decibels = 1.8284271247; // -1+2sqrt(2)
+
+    /**
+     * Process an input based on the following:
+     * If between -1 and 1, return the input value.
+     * If above kXWhenYis3Decibels, return sqrt(2).
+     * If below -kXWhenYis3Decibels, return -sqrt(2).
+     * If between 1 and kXWhenYis3Decibels, use a quadratic spline (Ax^2 + Bx + C).
+     * If between -kXWhenYis3Decibels and -1, use the absolute value for the spline and flip it.
+     * The derivative of the spline is 1 at 1 and 0 at kXWhenYis3Decibels.
+     * This way, the graph is both continuous and differentiable.
+     */
+    float processFloat(float in);
+
+    // Use the previous valid output for NaN inputs
+    float mLastValidOutput = 0.0f;
+};
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_LIMITER_H
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dd11169..8595308 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -85,7 +85,7 @@
     // AudioRecord::Buffer
     // TODO define our own AudioBuffer and pass it from the subclasses.
     size_t written = buffer.size();
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         ALOGW("%s() data, stream disconnected", __func__);
         // This will kill the stream and prevent it from being restarted.
         // That is OK because the stream is disconnected.
@@ -127,7 +127,7 @@
             mCallbackEnabled.store(false);
         }
 
-        if (updateStateMachine() != AAUDIO_OK) {
+        if (processCommands() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
         }
@@ -150,7 +150,7 @@
     // AudioRecord::Buffer
     // TODO define our own AudioBuffer and pass it from the subclasses.
     size_t written = buffer.size();
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         ALOGW("%s() data, stream disconnected", __func__);
         // This will kill the stream and prevent it from being restarted.
         // That is OK because the stream is disconnected.
@@ -192,7 +192,7 @@
             mCallbackEnabled.store(false);
         }
 
-        if (updateStateMachine() != AAUDIO_OK) {
+        if (processCommands() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
         }
@@ -214,11 +214,11 @@
 
 void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
     // There is no need to disconnect if already in these states.
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+    if (!isDisconnected()
             && getState() != AAUDIO_STREAM_STATE_CLOSING
             && getState() != AAUDIO_STREAM_STATE_CLOSED
             ) {
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected();
         if (errorCallbackEnabled) {
             maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
         }
@@ -268,7 +268,7 @@
     ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
     if (getDeviceId() != AAUDIO_UNSPECIFIED
             && getDeviceId() != deviceId
-            && getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+            && !isDisconnected()
             ) {
         // Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
         // If we have a data callback and the stream is active, then ask the data callback
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 1e39e0f..e760dab 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -208,6 +208,10 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    setHardwareSamplesPerFrame(mAudioRecord->getHalChannelCount());
+    setHardwareSampleRate(mAudioRecord->getHalSampleRate());
+    setHardwareFormat(mAudioRecord->getHalFormat());
+
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
         // The block adapter runs before the format conversion.
@@ -364,8 +368,7 @@
     return checkForDisconnectRequest(false);
 }
 
-aaudio_result_t AudioStreamRecord::updateStateMachine()
-{
+aaudio_result_t AudioStreamRecord::processCommands() {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_wrapping_frames_t position;
     status_t err;
@@ -404,7 +407,7 @@
         return result;
     }
 
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         return AAUDIO_ERROR_DISCONNECTED;
     }
 
@@ -447,7 +450,7 @@
         // In this context, a DEAD_OBJECT is more likely to be a disconnect notification due to
         // AudioRecord invalidation.
         if (bytesActuallyRead == DEAD_OBJECT) {
-            setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+            setDisconnected();
             return AAUDIO_ERROR_DISCONNECTED;
         }
         return AAudioConvert_androidToAAudioResult(bytesActuallyRead);
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 5ce73f9..252ff3c 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -58,7 +58,7 @@
 
     int64_t getFramesWritten() override;
 
-    aaudio_result_t updateStateMachine() override;
+    aaudio_result_t processCommands() override;
 
     aaudio_direction_t getDirection() const override {
         return AAUDIO_DIRECTION_INPUT;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 6f1dc92..fc8ba9e 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -202,6 +202,10 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    setHardwareSamplesPerFrame(mAudioTrack->getHalChannelCount());
+    setHardwareSampleRate(mAudioTrack->getHalSampleRate());
+    setHardwareFormat(mAudioTrack->getHalFormat());
+
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
         // This may need to change if we add format conversion before
@@ -248,7 +252,7 @@
 
     if (getState() != AAUDIO_STREAM_STATE_UNINITIALIZED) {
         ALOGE("%s - Open canceled since state = %d", __func__, getState());
-        if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED)
+        if (isDisconnected())
         {
             ALOGE("%s - Opening while state is disconnected", __func__);
             safeReleaseClose();
@@ -378,8 +382,7 @@
     return checkForDisconnectRequest(false);;
 }
 
-aaudio_result_t AudioStreamTrack::updateStateMachine()
-{
+aaudio_result_t AudioStreamTrack::processCommands() {
     status_t err;
     aaudio_wrapping_frames_t position;
     switch (getState()) {
@@ -407,7 +410,6 @@
             if (err != OK) {
                 return AAudioConvert_androidToAAudioResult(err);
             } else if (position == 0) {
-                // TODO Advance frames read to match written.
                 setState(AAUDIO_STREAM_STATE_FLUSHED);
             }
         }
@@ -434,7 +436,7 @@
         return result;
     }
 
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         return AAUDIO_ERROR_DISCONNECTED;
     }
 
@@ -448,7 +450,7 @@
         // in this context, a DEAD_OBJECT is more likely to be a disconnect notification due to
         // AudioTrack invalidation
         if (bytesWritten == DEAD_OBJECT) {
-            setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+            setDisconnected();
             return AAUDIO_ERROR_DISCONNECTED;
         }
         return AAudioConvert_androidToAAudioResult(bytesWritten);
@@ -553,6 +555,16 @@
     return status;
 }
 
+void AudioStreamTrack::registerPlayerBase() {
+    AudioStream::registerPlayerBase();
+
+    if (mAudioTrack == nullptr) {
+        ALOGW("%s: cannot set piid, AudioTrack is null", __func__);
+        return;
+    }
+    mAudioTrack->setPlayerIId(mPlayerBase->getPlayerIId());
+}
+
 #if AAUDIO_USE_VOLUME_SHAPER
 
 using namespace android::media::VolumeShaper;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 0f4d72b..05609c4 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -79,7 +79,7 @@
         return AAUDIO_DIRECTION_OUTPUT;
     }
 
-    aaudio_result_t updateStateMachine() override;
+    aaudio_result_t processCommands() override;
 
     int64_t incrementClientFrameCounter(int32_t frames) override {
         return incrementFramesWritten(frames);
@@ -87,6 +87,8 @@
 
     android::status_t doSetVolume() override;
 
+    void registerPlayerBase() override;
+
 #if AAUDIO_USE_VOLUME_SHAPER
     virtual android::binder::Status applyVolumeShaper(
             const android::media::VolumeShaper::Configuration& configuration,
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index f45b816..e28dcb4 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -67,6 +67,9 @@
     AAudioStream_getChannelMask;  # introduced=32
     AAudioStream_getSpatializationBehavior;  # introduced=32
     AAudioStream_isContentSpatialized;       # introduced=32
+    AAudioStream_getHardwareChannelCount; # introduced=UpsideDownCake
+    AAudioStream_getHardwareFormat;       # introduced=UpsideDownCake
+    AAudioStream_getHardwareSampleRate;   # introduced=UpsideDownCake
   local:
     *;
 };
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 872faca..8920e53 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -16,24 +16,28 @@
 
 #define LOG_TAG "AAudio"
 //#define LOG_NDEBUG 0
-#include <utils/Log.h>
 
-#include <cutils/properties.h>
+#include <assert.h>
+#include <math.h>
 #include <stdint.h>
+
+#include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
+#include <cutils/properties.h>
 #include <sys/types.h>
+#include <system/audio.h>
 #include <utils/Errors.h>
+#include <utils/Log.h>
 
 #include "aaudio/AAudio.h"
 #include "core/AudioGlobal.h"
-#include <aaudio/AAudioTesting.h>
-#include <math.h>
-#include <system/audio.h>
-#include <assert.h>
-
 #include "utility/AAudioUtilities.h"
 
 using namespace android;
 
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+
 status_t AAudioConvert_aaudioToAndroidStatus(aaudio_result_t result) {
     // This covers the case for AAUDIO_OK and for positive results.
     if (result >= 0) {
@@ -140,6 +144,9 @@
     case AAUDIO_FORMAT_PCM_I32:
         androidFormat = AUDIO_FORMAT_PCM_32_BIT;
         break;
+    case AAUDIO_FORMAT_IEC61937:
+        androidFormat = AUDIO_FORMAT_IEC61937;
+        break;
     default:
         androidFormat = AUDIO_FORMAT_INVALID;
         ALOGE("%s() 0x%08X unrecognized", __func__, aaudioFormat);
@@ -166,6 +173,9 @@
     case AUDIO_FORMAT_PCM_32_BIT:
         aaudioFormat = AAUDIO_FORMAT_PCM_I32;
         break;
+    case AUDIO_FORMAT_IEC61937:
+        aaudioFormat = AAUDIO_FORMAT_IEC61937;
+        break;
     default:
         aaudioFormat = AAUDIO_FORMAT_INVALID;
         ALOGE("%s() 0x%08X unrecognized", __func__, androidFormat);
@@ -174,6 +184,17 @@
     return aaudioFormat;
 }
 
+aaudio_format_t AAudioConvert_androidToNearestAAudioDataFormat(audio_format_t androidFormat) {
+    // Special case AUDIO_FORMAT_PCM_8_24_BIT because this function should be used to find the
+    // resolution of the data format. Setting AUDIO_FORMAT_PCM_8_24_BIT directly is not available
+    // from AAudio but hardware may use AUDIO_FORMAT_PCM_8_24_BIT under the hood.
+    if (androidFormat == AUDIO_FORMAT_PCM_8_24_BIT) {
+        ALOGD("%s() converting 8.24 to 24 bit packed", __func__);
+        return AAUDIO_FORMAT_PCM_I24_PACKED;
+    }
+    return AAudioConvert_androidToAAudioDataFormat(androidFormat);
+}
+
 // Make a message string from the condition.
 #define STATIC_ASSERT(condition) static_assert(condition, #condition)
 
@@ -632,3 +653,31 @@
     }
     return result;
 }
+
+namespace {
+
+aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
+    switch (aidl) {
+        case AudioMMapPolicy::NEVER:
+            return AAUDIO_POLICY_NEVER;
+        case AudioMMapPolicy::AUTO:
+            return AAUDIO_POLICY_AUTO;
+        case AudioMMapPolicy::ALWAYS:
+            return AAUDIO_POLICY_ALWAYS;
+        case AudioMMapPolicy::UNSPECIFIED:
+        default:
+            return AAUDIO_UNSPECIFIED;
+    }
+}
+
+} // namespace
+
+aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos) {
+    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
+    for (size_t i = 1; i < policyInfos.size(); ++i) {
+        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+            return AAUDIO_POLICY_AUTO;
+        }
+    }
+    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index b59ce1c..d8e5b00 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -19,14 +19,17 @@
 
 #include <algorithm>
 #include <functional>
+#include <vector>
 #include <stdint.h>
 #include <sys/types.h>
 #include <unistd.h>
 
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <utils/Errors.h>
 #include <system/audio.h>
 
 #include "aaudio/AAudio.h"
+#include "aaudio/AAudioTesting.h"
 
 /**
  * Convert an AAudio result into the closest matching Android status.
@@ -62,6 +65,7 @@
 
 aaudio_format_t AAudioConvert_androidToAAudioDataFormat(audio_format_t format);
 
+aaudio_format_t AAudioConvert_androidToNearestAAudioDataFormat(audio_format_t format);
 
 /**
  * Note that this function does not validate the passed in value.
@@ -343,4 +347,9 @@
     AAUDIO_CHANNEL_INDEX_MASK_24 = AAUDIO_CHANNEL_BIT_INDEX | (1 << 24) - 1,
 };
 
+// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
+// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+aaudio_policy_t AAudio_getAAudioPolicy(
+        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos);
+
 #endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 4b45909..438be0a 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -214,3 +214,17 @@
     srcs: ["test_disconnect_race.cpp"],
     shared_libs: ["libaaudio"],
 }
+
+cc_test {
+    name: "aaudio_test_mmap_path",
+    defaults: [
+        "libaaudio_tests_defaults",
+    ],
+    srcs: ["test_mmap_path.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libaaudio_internal",
+        "libaudioclient",
+        "liblog",
+    ],
+}
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 66b77eb..6f75f5a 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -26,6 +26,7 @@
 #include <gtest/gtest.h>
 
 #include "flowgraph/ClipToRange.h"
+#include "flowgraph/Limiter.h"
 #include "flowgraph/MonoBlend.h"
 #include "flowgraph/MonoToMultiConverter.h"
 #include "flowgraph/SourceFloat.h"
@@ -319,3 +320,77 @@
     }
 }
 
+TEST(test_flowgraph, module_limiter) {
+    constexpr int kNumSamples = 101;
+    constexpr float kLastSample = 3.0f;
+    constexpr float kFirstSample = -kLastSample;
+    constexpr float kDeltaBetweenSamples = (kLastSample - kFirstSample) / (kNumSamples - 1);
+    constexpr float kTolerance = 0.00001f;
+
+    float input[kNumSamples];
+    float output[kNumSamples];
+    SourceFloat sourceFloat{1};
+    Limiter limiter{1};
+    SinkFloat sinkFloat{1};
+
+    for (int i = 0; i < kNumSamples; i++) {
+        input[i] = kFirstSample + i * kDeltaBetweenSamples;
+    }
+
+    const int numInputFrames = std::size(input);
+    sourceFloat.setData(input, numInputFrames);
+
+    sourceFloat.output.connect(&limiter.input);
+    limiter.output.connect(&sinkFloat.input);
+
+    const int numOutputFrames = std::size(output);
+    int32_t numRead = sinkFloat.read(output, numOutputFrames);
+    ASSERT_EQ(numInputFrames, numRead);
+
+    for (int i = 0; i < numRead; i++) {
+        // limiter must be symmetric wrt 0.
+        EXPECT_NEAR(output[i], -output[kNumSamples - i - 1], kTolerance);
+        if (i > 0) {
+            EXPECT_GE(output[i], output[i - 1]); // limiter must be monotonic
+        }
+        if (input[i] == 0.f) {
+            EXPECT_EQ(0.f, output[i]);
+        } else if (input[i] > 0.0f) {
+            EXPECT_GE(output[i], 0.0f);
+            EXPECT_LE(output[i], M_SQRT2); // limiter actually limits
+            EXPECT_LE(output[i], input[i]); // a limiter, gain <= 1
+        } else {
+            EXPECT_LE(output[i], 0.0f);
+            EXPECT_GE(output[i], -M_SQRT2); // limiter actually limits
+            EXPECT_GE(output[i], input[i]); // a limiter, gain <= 1
+        }
+        if (-1.f <= input[i] && input[i] <= 1.f) {
+            EXPECT_EQ(input[i], output[i]);
+        }
+    }
+}
+
+TEST(test_flowgraph, module_limiter_nan) {
+    constexpr int kArbitraryOutputSize = 100;
+    static const float input[] = {NAN, 0.5f, NAN, NAN, -10.0f, NAN};
+    static const float expected[] = {0.0f, 0.5f, 0.5f, 0.5f, -M_SQRT2, -M_SQRT2};
+    constexpr float tolerance = 0.00001f;
+    float output[kArbitraryOutputSize];
+    SourceFloat sourceFloat{1};
+    Limiter limiter{1};
+    SinkFloat sinkFloat{1};
+
+    const int numInputFrames = std::size(input);
+    sourceFloat.setData(input, numInputFrames);
+
+    sourceFloat.output.connect(&limiter.input);
+    limiter.output.connect(&sinkFloat.input);
+
+    const int numOutputFrames = std::size(output);
+    int32_t numRead = sinkFloat.read(output, numOutputFrames);
+    ASSERT_EQ(numInputFrames, numRead);
+
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_NEAR(expected[i], output[i], tolerance);
+    }
+}
diff --git a/media/libaaudio/tests/test_mmap_path.cpp b/media/libaaudio/tests/test_mmap_path.cpp
new file mode 100644
index 0000000..c8376f6
--- /dev/null
+++ b/media/libaaudio/tests/test_mmap_path.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_mmap_path"
+
+#include <vector>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+#include <android/log.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <media/AudioSystem.h>
+
+#include <gtest/gtest.h>
+
+#include "utility/AAudioUtilities.h"
+
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
+/**
+ * Open a stream via AAudio API and set the performance mode as LOW_LATENCY. When MMAP is supported,
+ * the stream is supposed to be on MMAP path instead of legacy path. This is guaranteed on pixel
+ * devices, but may not be guaranteed on other vendor devices.
+ * @param direction the direction for the stream
+ */
+static void openStreamAndVerify(aaudio_direction_t direction) {
+    std::vector<AudioMMapPolicyInfo> policyInfos;
+    ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::DEFAULT, &policyInfos));
+    if (AAudio_getAAudioPolicy(policyInfos) == AAUDIO_POLICY_NEVER) {
+        // Query the system MMAP policy, if it is NEVER, it indicates there is no MMAP support.
+        // In that case, there is no need to run the test. The reason of adding the query is to
+        // avoid someone accidentally run the test on device that doesn't support MMAP,
+        // such as cuttlefish.
+        ALOGD("Skip test as mmap is not supported");
+        return;
+    }
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    EXPECT_EQ(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAudioStream_getPerformanceMode(aaudioStream));
+    EXPECT_TRUE(AAudioStream_isMMapUsed(aaudioStream));
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+TEST(test_mmap_path, input) {
+    openStreamAndVerify(AAUDIO_DIRECTION_INPUT);
+}
+
+TEST(test_mmap_path, output) {
+    openStreamAndVerify(AAUDIO_DIRECTION_OUTPUT);
+}
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index 9fb0290..5ea4926 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -417,7 +417,7 @@
 }
 
 ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
-        const media::AudioPortConfig& aidl) {
+        const media::AudioPortConfigFw& aidl) {
     audio_port_config legacy{};
     legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
     legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
@@ -457,9 +457,9 @@
     return legacy;
 }
 
-ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+ConversionResult<media::AudioPortConfigFw> legacy2aidl_audio_port_config_AudioPortConfig(
         const audio_port_config& legacy) {
-    media::AudioPortConfig aidl;
+    media::AudioPortConfigFw aidl;
     aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
     aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
     aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
@@ -492,7 +492,7 @@
 }
 
 ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
-        const media::AudioPatch& aidl) {
+        const media::AudioPatchFw& aidl) {
     struct audio_patch legacy;
     legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_patch_handle_t(aidl.id));
     legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
@@ -514,9 +514,9 @@
     return legacy;
 }
 
-ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ConversionResult<media::AudioPatchFw> legacy2aidl_audio_patch_AudioPatch(
         const struct audio_patch& legacy) {
-    media::AudioPatch aidl;
+    media::AudioPatchFw aidl;
     aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_patch_handle_t_int32_t(legacy.id));
 
     if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
@@ -930,7 +930,7 @@
 }
 
 ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl) {
+aidl2legacy_AudioPort_audio_port_v7(const media::AudioPortFw& aidl) {
     audio_port_v7 legacy;
     legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
     legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
@@ -975,9 +975,9 @@
     return legacy;
 }
 
-ConversionResult<media::AudioPort>
+ConversionResult<media::AudioPortFw>
 legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy) {
-    media::AudioPort aidl;
+    media::AudioPortFw aidl;
     aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
     aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
     aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index c0abb70..4679731 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -48,7 +48,7 @@
 cc_library {
     name: "libaudiopolicy",
     srcs: [
-        "AudioAttributes.cpp",
+        "VolumeGroupAttributes.cpp",
         "AudioPolicy.cpp",
         "AudioProductStrategy.cpp",
         "AudioVolumeGroup.cpp",
@@ -287,11 +287,11 @@
         "aidl/android/media/AudioHalVersion.aidl",
         "aidl/android/media/AudioIoConfigEvent.aidl",
         "aidl/android/media/AudioIoDescriptor.aidl",
-        "aidl/android/media/AudioPatch.aidl",
+        "aidl/android/media/AudioPatchFw.aidl",
         "aidl/android/media/AudioPlaybackRate.aidl",
-        "aidl/android/media/AudioPort.aidl",
+        "aidl/android/media/AudioPortFw.aidl",
         "aidl/android/media/AudioPortSys.aidl",
-        "aidl/android/media/AudioPortConfig.aidl",
+        "aidl/android/media/AudioPortConfigFw.aidl",
         "aidl/android/media/AudioPortConfigSys.aidl",
         "aidl/android/media/AudioPortDeviceExtSys.aidl",
         "aidl/android/media/AudioPortExtSys.aidl",
@@ -333,6 +333,8 @@
     srcs: [
         "aidl/android/media/AudioAttributesEx.aidl",
         "aidl/android/media/AudioMix.aidl",
+        "aidl/android/media/AudioMixerAttributesInternal.aidl",
+        "aidl/android/media/AudioMixerBehavior.aidl",
         "aidl/android/media/AudioMixCallbackFlag.aidl",
         "aidl/android/media/AudioMixMatchCriterion.aidl",
         "aidl/android/media/AudioMixMatchCriterionValue.aidl",
@@ -392,6 +394,10 @@
         "aidl/android/media/IAudioRecord.aidl",
         "aidl/android/media/IAudioTrack.aidl",
         "aidl/android/media/IAudioTrackCallback.aidl",
+
+        "aidl/android/media/ISoundDose.aidl",
+        "aidl/android/media/ISoundDoseCallback.aidl",
+        "aidl/android/media/SoundDoseRecord.aidl",
     ],
     imports: [
         "android.media.audio.common.types-V2",
@@ -483,3 +489,21 @@
         },
     },
 }
+
+aidl_interface {
+    name: "sounddose-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/ISoundDose.aidl",
+        "aidl/android/media/ISoundDoseCallback.aidl",
+        "aidl/android/media/SoundDoseRecord.aidl",
+    ],
+
+    double_loadable: true,
+    backend: {
+        java: {
+            sdk_version: "module_current",
+        },
+    },
+}
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index 4d2b6b1..6bb0cbe 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -57,6 +57,10 @@
     case RULE_EXCLUDE_USERID:
         mValue.mUserId = (int) parcel->readInt32();
         break;
+    case RULE_MATCH_AUDIO_SESSION_ID:
+    case RULE_EXCLUDE_AUDIO_SESSION_ID:
+        mValue.mAudioSessionId = (audio_session_t) parcel->readInt32();
+        break;
     default:
         ALOGE("Trying to build AudioMixMatchCriterion from unknown rule %d", mRule);
         return BAD_VALUE;
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index ecd423a..381faf6 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -18,7 +18,7 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 #include <media/AudioProductStrategy.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
 namespace android {
@@ -42,8 +42,8 @@
     aidl.name = legacy.getName();
     aidl.audioAttributes = VALUE_OR_RETURN(
             convertContainer<std::vector<media::AudioAttributesEx>>(
-                    legacy.getAudioAttributes(),
-                    legacy2aidl_AudioAttributes_AudioAttributesEx));
+                    legacy.getVolumeGroupAttributes(),
+                    legacy2aidl_VolumeGroupAttributes_AudioAttributesEx));
     aidl.id = VALUE_OR_RETURN(legacy2aidl_product_strategy_t_int32_t(legacy.getId()));
     return aidl;
 }
@@ -53,9 +53,9 @@
     return AudioProductStrategy(
             aidl.name,
             VALUE_OR_RETURN(
-                    convertContainer<std::vector<AudioAttributes>>(
+                    convertContainer<std::vector<VolumeGroupAttributes>>(
                             aidl.audioAttributes,
-                            aidl2legacy_AudioAttributesEx_AudioAttributes)),
+                            aidl2legacy_AudioAttributesEx_VolumeGroupAttributes)),
             VALUE_OR_RETURN(aidl2legacy_int32_t_product_strategy_t(aidl.id)));
 }
 
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 69d73ad..12bcec6 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -560,6 +560,21 @@
     return NO_ERROR;
 }
 
+uint32_t AudioRecord::getHalSampleRate() const
+{
+    return mHalSampleRate;
+}
+
+uint32_t AudioRecord::getHalChannelCount() const
+{
+    return mHalChannelCount;
+}
+
+audio_format_t AudioRecord::getHalFormat() const
+{
+    return mHalFormat;
+}
+
 status_t AudioRecord::getMarkerPosition(uint32_t *marker) const
 {
     if (marker == NULL) {
@@ -878,6 +893,9 @@
     mServerFrameSize = audio_bytes_per_frame(
             audio_channel_count_from_in_mask(mServerConfig.channel_mask), mServerConfig.format);
     mServerSampleSize = audio_bytes_per_sample(mServerConfig.format);
+    mHalSampleRate = output.halConfig.sample_rate;
+    mHalChannelCount = audio_channel_count_from_in_mask(output.halConfig.channel_mask);
+    mHalFormat = output.halConfig.format;
 
     if (output.cblk == 0) {
         errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 141e1e3..5faa8f3 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -34,6 +34,7 @@
 
 #include <system/audio.h>
 #include <android/media/GetInputForAttrResponse.h>
+#include <android/media/AudioMixerAttributesInternal.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -1015,12 +1016,13 @@
                                        audio_session_t session,
                                        audio_stream_type_t* stream,
                                        const AttributionSourceState& attributionSource,
-                                       const audio_config_t* config,
+                                       audio_config_t* config,
                                        audio_output_flags_t flags,
                                        audio_port_handle_t* selectedDeviceId,
                                        audio_port_handle_t* portId,
                                        std::vector<audio_io_handle_t>* secondaryOutputs,
-                                       bool *isSpatialized) {
+                                       bool *isSpatialized,
+                                       bool *isBitPerfect) {
     if (attr == nullptr) {
         ALOGE("%s NULL audio attributes", __func__);
         return BAD_VALUE;
@@ -1057,9 +1059,18 @@
 
     media::GetOutputForAttrResponse responseAidl;
 
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+    status_t status = statusTFromBinderStatus(
             aps->getOutputForAttr(attrAidl, sessionAidl, attributionSource, configAidl, flagsAidl,
-                                  selectedDeviceIdAidl, &responseAidl)));
+                                  selectedDeviceIdAidl, &responseAidl));
+    if (status != NO_ERROR) {
+        config->format = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
+        config->channel_mask = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    responseAidl.configBase.channelMask, false /*isInput*/));
+        config->sample_rate = responseAidl.configBase.sampleRate;
+        return status;
+    }
 
     *output = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_io_handle_t(responseAidl.output));
@@ -1074,6 +1085,9 @@
     *secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
             responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
     *isSpatialized = responseAidl.isSpatialized;
+    *isBitPerfect = responseAidl.isBitPerfect;
+    *attr = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(responseAidl.attr));
 
     return OK;
 }
@@ -1114,7 +1128,7 @@
                                       audio_unique_id_t riid,
                                       audio_session_t session,
                                       const AttributionSourceState &attributionSource,
-                                      const audio_config_base_t* config,
+                                      audio_config_base_t* config,
                                       audio_input_flags_t flags,
                                       audio_port_handle_t* selectedDeviceId,
                                       audio_port_handle_t* portId) {
@@ -1151,9 +1165,14 @@
 
     media::GetInputForAttrResponse response;
 
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+    status_t status = statusTFromBinderStatus(
             aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl, attributionSource,
-                configAidl, flagsAidl, selectedDeviceIdAidl, &response)));
+                configAidl, flagsAidl, selectedDeviceIdAidl, &response));
+    if (status != NO_ERROR) {
+        *config = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioConfigBase_audio_config_base_t(response.config, true /*isInput*/));
+        return status;
+    }
 
     *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.input));
     *selectedDeviceId = VALUE_OR_RETURN_STATUS(
@@ -1322,7 +1341,7 @@
     return result.value_or(PRODUCT_STRATEGY_NONE);
 }
 
-status_t AudioSystem::getDevicesForAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getDevicesForAttributes(const audio_attributes_t& aa,
                                               AudioDeviceTypeAddrVector* devices,
                                               bool forVolume) {
     if (devices == nullptr) {
@@ -1331,8 +1350,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+             legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
     std::vector<AudioDevice> retAidl;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, forVolume, &retAidl)));
@@ -1532,7 +1551,7 @@
             legacy2aidl_audio_port_type_t_AudioPortType(type));
     Int numPortsAidl;
     numPortsAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_ports));
-    std::vector<media::AudioPort> portsAidl;
+    std::vector<media::AudioPortFw> portsAidl;
     int32_t generationAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1551,7 +1570,7 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPort portAidl;
+    media::AudioPortFw portAidl;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(aps->getAudioPort(port->id, &portAidl)));
     *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(portAidl));
@@ -1567,7 +1586,7 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(
+    media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_patch_AudioPatch(*patch));
     int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(*handle));
     RETURN_STATUS_IF_ERROR(
@@ -1598,7 +1617,7 @@
 
     Int numPatchesAidl;
     numPatchesAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
-    std::vector<media::AudioPatch> patchesAidl;
+    std::vector<media::AudioPatchFw> patchesAidl;
     int32_t generationAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1618,7 +1637,7 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
+    media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_config_AudioPortConfig(*config));
     return statusTFromBinderStatus(aps->setAudioPortConfig(configAidl));
 }
@@ -1839,7 +1858,7 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPortConfig sourceAidl = VALUE_OR_RETURN_STATUS(
+    media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_config_AudioPortConfig(*source));
     media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attributes));
@@ -2079,7 +2098,7 @@
     AudioProductStrategyVector strategies;
     listAudioProductStrategies(strategies);
     for (const auto& strategy : strategies) {
-        auto attrVect = strategy.getAudioAttributes();
+        auto attrVect = strategy.getVolumeGroupAttributes();
         auto iter = std::find_if(begin(attrVect), end(attrVect), [&stream](const auto& attributes) {
             return attributes.getStreamType() == stream;
         });
@@ -2093,7 +2112,7 @@
 
 audio_stream_type_t AudioSystem::attributesToStreamType(const audio_attributes_t& attr) {
     product_strategy_t psId;
-    status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(AudioAttributes(attr), psId);
+    status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(attr, psId);
     if (ret != NO_ERROR) {
         ALOGE("no strategy found for attributes %s", toString(attr).c_str());
         return AUDIO_STREAM_MUSIC;
@@ -2102,7 +2121,7 @@
     listAudioProductStrategies(strategies);
     for (const auto& strategy : strategies) {
         if (strategy.getId() == psId) {
-            auto attrVect = strategy.getAudioAttributes();
+            auto attrVect = strategy.getVolumeGroupAttributes();
             auto iter = std::find_if(begin(attrVect), end(attrVect), [&attr](const auto& refAttr) {
                 return AudioProductStrategy::attributesMatches(
                         refAttr.getAttributes(), attr);
@@ -2123,14 +2142,14 @@
     return AUDIO_STREAM_MUSIC;
 }
 
-status_t AudioSystem::getProductStrategyFromAudioAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getProductStrategyFromAudioAttributes(const audio_attributes_t& aa,
                                                             product_strategy_t& productStrategy,
                                                             bool fallbackOnDefault) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
     int32_t productStrategyAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2153,14 +2172,14 @@
     return OK;
 }
 
-status_t AudioSystem::getVolumeGroupFromAudioAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getVolumeGroupFromAudioAttributes(const audio_attributes_t &aa,
                                                         volume_group_t& volumeGroup,
                                                         bool fallbackOnDefault) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
     int32_t volumeGroupAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getVolumeGroupFromAudioAttributes(aaAidl, fallbackOnDefault, &volumeGroupAidl)));
@@ -2360,6 +2379,20 @@
     return OK;
 }
 
+status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                            sp<media::ISoundDose>* soundDose) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    if (soundDose == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(af->getSoundDoseInterface(callback, soundDose));
+    return OK;
+}
+
 status_t AudioSystem::getDirectPlaybackSupport(const audio_attributes_t *attr,
                                                const audio_config_t *config,
                                                audio_direct_mode_t* directMode) {
@@ -2426,6 +2459,23 @@
     return af->getSupportedLatencyModes(output, modes);
 }
 
+status_t AudioSystem::setBluetoothLatencyModesEnabled(bool enabled) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setBluetoothLatencyModesEnabled(enabled);
+}
+
+status_t AudioSystem::supportsBluetoothLatencyModes(
+        bool *support) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->supportsBluetoothLatencyModes(support);
+}
+
 class CaptureStateListenerImpl : public media::BnCaptureStateListener,
                                  public IBinder::DeathRecipient {
 public:
@@ -2515,6 +2565,84 @@
     return af->getAAudioHardwareBurstMinUsec();
 }
 
+status_t AudioSystem::getSupportedMixerAttributes(
+        audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    std::vector<media::AudioMixerAttributesInternal> _aidlReturn;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getSupportedMixerAttributes(portIdAidl, &_aidlReturn)));
+    *mixerAttrs = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<audio_mixer_attributes_t>>(
+                    _aidlReturn,
+                    aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t));
+    return OK;
+}
+
+status_t AudioSystem::setPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                  audio_port_handle_t portId,
+                                                  uid_t uid,
+                                                  const audio_mixer_attributes_t *mixerAttr) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::AudioMixerAttributesInternal mixerAttrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(*mixerAttr));
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+
+    return statusTFromBinderStatus(
+            aps->setPreferredMixerAttributes(attrAidl, portIdAidl, uidAidl, mixerAttrAidl));
+}
+
+status_t AudioSystem::getPreferredMixerAttributes(
+        const audio_attributes_t *attr,
+        audio_port_handle_t portId,
+        std::optional<audio_mixer_attributes_t> *mixerAttr) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    std::optional<media::AudioMixerAttributesInternal> _aidlReturn;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getPreferredMixerAttributes(attrAidl, portIdAidl, &_aidlReturn)));
+
+    if (_aidlReturn.has_value()) {
+         *mixerAttr = VALUE_OR_RETURN_STATUS(
+                 aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(
+                         _aidlReturn.value()));
+    }
+    return NO_ERROR;
+}
+
+status_t AudioSystem::clearPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                    audio_port_handle_t portId,
+                                                    uid_t uid) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(
+            aps->clearPreferredMixerAttributes(attrAidl, portIdAidl, uidAidl));
+}
+
 // ---------------------------------------------------------------------------
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 96fc544..22bd2a3 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -29,6 +29,7 @@
 #include <audio_utils/clock.h>
 #include <audio_utils/primitives.h>
 #include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
 #include <media/AudioTrack.h>
 #include <utils/Log.h>
 #include <private/media/AudioTrackShared.h>
@@ -42,7 +43,9 @@
 
 #define WAIT_PERIOD_MS                  10
 #define WAIT_STREAM_END_TIMEOUT_SEC     120
+
 static const int kMaxLoopCountNotifications = 32;
+static constexpr char kAudioServiceName[] = "audio";
 
 using ::android::aidl_utils::statusTFromBinderStatus;
 using ::android::base::StringPrintf;
@@ -1230,6 +1233,21 @@
     return mOriginalSampleRate;
 }
 
+uint32_t AudioTrack::getHalSampleRate() const
+{
+    return mAfSampleRate;
+}
+
+uint32_t AudioTrack::getHalChannelCount() const
+{
+    return mAfChannelCount;
+}
+
+audio_format_t AudioTrack::getHalFormat() const
+{
+    return mAfFormat;
+}
+
 status_t AudioTrack::setDualMonoMode(audio_dual_mono_mode_t mode)
 {
     AutoMutex lock(mLock);
@@ -1885,6 +1903,8 @@
 
     mAfFrameCount = output.afFrameCount;
     mAfSampleRate = output.afSampleRate;
+    mAfChannelCount = audio_channel_count_from_out_mask(output.afChannelMask);
+    mAfFormat = output.afFormat;
     mAfLatency = output.afLatencyMs;
 
     mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
@@ -1949,6 +1969,9 @@
     }
 
     mPortId = output.portId;
+    // notify the upper layers about the new portId
+    triggerPortIdUpdate_l();
+
     // We retain a copy of the I/O handle, but don't own the reference
     mOutput = output.outputId;
     mRefreshRemaining = true;
@@ -3511,12 +3534,34 @@
     if (mPlayerIId == playerIId) return;
 
     mPlayerIId = playerIId;
+    triggerPortIdUpdate_l();
     mediametrics::LogItem(mMetricsId)
         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID)
         .set(AMEDIAMETRICS_PROP_PLAYERIID, playerIId)
         .record();
 }
 
+void AudioTrack::triggerPortIdUpdate_l() {
+    if (mAudioManager == nullptr) {
+        // use checkService() to avoid blocking if audio service is not up yet
+        sp<IBinder> binder =
+            defaultServiceManager()->checkService(String16(kAudioServiceName));
+        if (binder == nullptr) {
+            ALOGE("%s(%d): binding to audio service failed.",
+                  __func__,
+                  mPlayerIId);
+            return;
+        }
+
+        mAudioManager = interface_cast<IAudioManager>(binder);
+    }
+
+    // first time when the track is created we do not have a valid piid
+    if (mPlayerIId != PLAYER_PIID_INVALID) {
+        mAudioManager->playerEvent(mPlayerIId, PLAYER_UPDATE_PORT_ID, mPortId);
+    }
+}
+
 status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
 {
 
diff --git a/media/libaudioclient/AudioVolumeGroup.cpp b/media/libaudioclient/AudioVolumeGroup.cpp
index ab95246..978599e 100644
--- a/media/libaudioclient/AudioVolumeGroup.cpp
+++ b/media/libaudioclient/AudioVolumeGroup.cpp
@@ -23,7 +23,6 @@
 
 #include <media/AidlConversion.h>
 #include <media/AudioVolumeGroup.h>
-#include <media/AudioAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
 namespace android {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index f290453..ba3e703 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -112,6 +112,10 @@
     aidl.afFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(afFrameCount));
     aidl.afSampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(afSampleRate));
     aidl.afLatencyMs = VALUE_OR_RETURN(convertIntegral<int32_t>(afLatencyMs));
+    aidl.afChannelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(afChannelMask, false /*isInput*/));
+    aidl.afFormat = VALUE_OR_RETURN(
+            legacy2aidl_audio_format_t_AudioFormatDescription(afFormat));
     aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
     aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
     aidl.audioTrack = audioTrack;
@@ -135,6 +139,11 @@
     legacy.afFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.afFrameCount));
     legacy.afSampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afSampleRate));
     legacy.afLatencyMs = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afLatencyMs));
+    legacy.afChannelMask = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.afChannelMask,
+                                                                false /*isInput*/));
+    legacy.afFormat = VALUE_OR_RETURN(
+            aidl2legacy_AudioFormatDescription_audio_format_t(aidl.afFormat));
     legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
     legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
     legacy.audioTrack = aidl.audioTrack;
@@ -199,6 +208,8 @@
     aidl.audioRecord = audioRecord;
     aidl.serverConfig = VALUE_OR_RETURN(
             legacy2aidl_audio_config_base_t_AudioConfigBase(serverConfig, true /*isInput*/));
+    aidl.halConfig = VALUE_OR_RETURN(
+        legacy2aidl_audio_config_base_t_AudioConfigBase(halConfig, true /*isInput*/));
     return aidl;
 }
 
@@ -221,6 +232,8 @@
     legacy.audioRecord = aidl.audioRecord;
     legacy.serverConfig = VALUE_OR_RETURN(
             aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.serverConfig, true /*isInput*/));
+    legacy.halConfig = VALUE_OR_RETURN(
+        aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.halConfig, true /*isInput*/));
     return legacy;
 }
 
@@ -480,12 +493,6 @@
     return statusTFromBinderStatus(mDelegate->closeInput(inputAidl));
 }
 
-status_t AudioFlingerClientAdapter::invalidateStream(audio_stream_type_t stream) {
-    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-    return statusTFromBinderStatus(mDelegate->invalidateStream(streamAidl));
-}
-
 status_t AudioFlingerClientAdapter::setVoiceVolume(float volume) {
     return statusTFromBinderStatus(mDelegate->setVoiceVolume(volume));
 }
@@ -666,8 +673,9 @@
 }
 
 status_t AudioFlingerClientAdapter::getAudioPort(struct audio_port_v7* port) {
-    media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
-    media::AudioPort aidlRet;
+    media::AudioPortFw portAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPort(*port));
+    media::AudioPortFw aidlRet;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             mDelegate->getAudioPort(portAidl, &aidlRet)));
     *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(aidlRet));
@@ -676,7 +684,8 @@
 
 status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
                                                      audio_patch_handle_t* handle) {
-    media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
+    media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_patch_AudioPatch(*patch));
     int32_t aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(
                     AUDIO_PATCH_HANDLE_NONE));
     if (handle != nullptr) {
@@ -697,7 +706,7 @@
 
 status_t AudioFlingerClientAdapter::listAudioPatches(unsigned int* num_patches,
                                                      struct audio_patch* patches) {
-    std::vector<media::AudioPatch> aidlRet;
+    std::vector<media::AudioPatchFw> aidlRet;
     int32_t maxPatches = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             mDelegate->listAudioPatches(maxPatches, &aidlRet)));
@@ -707,7 +716,7 @@
 }
 
 status_t AudioFlingerClientAdapter::setAudioPortConfig(const struct audio_port_config* config) {
-    media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
+    media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_config_AudioPortConfig(*config));
     return statusTFromBinderStatus(mDelegate->setAudioPortConfig(configAidl));
 }
@@ -805,7 +814,7 @@
 
 status_t AudioFlingerClientAdapter::setDeviceConnectedState(
         const struct audio_port_v7 *port, bool connected) {
-    media::AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+    media::AudioPortFw aidlPort = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_v7_AudioPort(*port));
     return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
 }
@@ -837,6 +846,35 @@
     return NO_ERROR;
 }
 
+status_t AudioFlingerClientAdapter::setBluetoothLatencyModesEnabled(bool enabled) {
+    return statusTFromBinderStatus(mDelegate->setBluetoothLatencyModesEnabled(enabled));
+}
+
+status_t AudioFlingerClientAdapter::supportsBluetoothLatencyModes(bool* support) {
+    if (support == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->supportsBluetoothLatencyModes(support)));
+
+    return NO_ERROR;
+}
+
+status_t AudioFlingerClientAdapter::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback> &callback,
+        sp<media::ISoundDose>* soundDose) {
+    return statusTFromBinderStatus(mDelegate->getSoundDoseInterface(callback, soundDose));
+}
+
+status_t AudioFlingerClientAdapter::invalidateTracks(
+        const std::vector<audio_port_handle_t>& portIds) {
+    std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(
+                    portIds, legacy2aidl_audio_port_handle_t_int32_t));
+    return statusTFromBinderStatus(mDelegate->invalidateTracks(portIdsAidl));
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1069,12 +1107,6 @@
     return Status::fromStatusT(mDelegate->closeInput(inputLegacy));
 }
 
-Status AudioFlingerServerAdapter::invalidateStream(AudioStreamType stream) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    return Status::fromStatusT(mDelegate->invalidateStream(streamLegacy));
-}
-
 Status AudioFlingerServerAdapter::setVoiceVolume(float volume) {
     return Status::fromStatusT(mDelegate->setVoiceVolume(volume));
 }
@@ -1211,15 +1243,15 @@
     return Status::fromStatusT(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
 }
 
-Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPort& port,
-                                               media::AudioPort* _aidl_return) {
+Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPortFw& port,
+                                               media::AudioPortFw* _aidl_return) {
     audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
     RETURN_BINDER_IF_ERROR(mDelegate->getAudioPort(&portLegacy));
     *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPort(portLegacy));
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
+Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatchFw& patch,
                                                    int32_t* _aidl_return) {
     audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
     audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
@@ -1236,7 +1268,7 @@
 }
 
 Status AudioFlingerServerAdapter::listAudioPatches(int32_t maxCount,
-                            std::vector<media::AudioPatch>* _aidl_return) {
+                            std::vector<media::AudioPatchFw>* _aidl_return) {
     unsigned int count = VALUE_OR_RETURN_BINDER(convertIntegral<unsigned int>(maxCount));
     count = std::min(count, static_cast<unsigned int>(MAX_ITEMS_PER_LIST));
     std::unique_ptr<audio_patch[]> patchesLegacy(new audio_patch[count]);
@@ -1248,7 +1280,7 @@
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfig& config) {
+Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfigFw& config) {
     audio_port_config configLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioPortConfig_audio_port_config(config));
     return Status::fromStatusT(mDelegate->setAudioPortConfig(&configLegacy));
@@ -1328,7 +1360,7 @@
 }
 
 Status AudioFlingerServerAdapter::setDeviceConnectedState(
-        const media::AudioPort& port, bool connected) {
+        const media::AudioPortFw& port, bool connected) {
     audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
     return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
 }
@@ -1357,4 +1389,27 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setBluetoothLatencyModesEnabled(bool enabled) {
+    return Status::fromStatusT(mDelegate->setBluetoothLatencyModesEnabled(enabled));
+}
+
+Status AudioFlingerServerAdapter::supportsBluetoothLatencyModes(bool *support) {
+    return Status::fromStatusT(mDelegate->supportsBluetoothLatencyModes(support));
+}
+
+Status AudioFlingerServerAdapter::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback>& callback,
+        sp<media::ISoundDose>* soundDose)
+{
+    return Status::fromStatusT(mDelegate->getSoundDoseInterface(callback, soundDose));
+}
+
+Status AudioFlingerServerAdapter::invalidateTracks(const std::vector<int32_t>& portIds) {
+    std::vector<audio_port_handle_t> portIdsLegacy = VALUE_OR_RETURN_BINDER(
+            convertContainer<std::vector<audio_port_handle_t>>(
+                    portIds, aidl2legacy_int32_t_audio_port_handle_t));
+    RETURN_BINDER_IF_ERROR(mDelegate->invalidateTracks(portIdsLegacy));
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index 446a58c..651255a 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -58,6 +58,20 @@
     }
 }
 
+void PlayerBase::triggerPortIdUpdate(audio_port_handle_t portId) const {
+    if (mAudioManager == nullptr) {
+        ALOGE("%s: no audio service, player %d will not update portId %d",
+              __func__,
+              mPIId,
+              portId);
+        return;
+    }
+
+    if (mPIId != PLAYER_PIID_INVALID && portId != AUDIO_PORT_HANDLE_NONE) {
+        mAudioManager->playerEvent(mPIId, android::PLAYER_UPDATE_PORT_ID, portId);
+    }
+}
+
 void PlayerBase::baseDestroy() {
     serviceReleasePlayer();
     if (mAudioManager != 0) {
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 520f09c..60b08fa 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -158,6 +158,11 @@
                     convertIntegral<int>(UNION_GET(aidl, userId).value()));
             *rule |= RULE_MATCH_USERID;
             return legacy;
+        case media::AudioMixMatchCriterionValue::audioSessionId:
+            legacy.mAudioSessionId = VALUE_OR_RETURN(
+                    aidl2legacy_int32_t_audio_session_t(UNION_GET(aidl, audioSessionId).value()));
+            *rule |= RULE_MATCH_AUDIO_SESSION_ID;
+            return legacy;
     }
     return unexpected(BAD_VALUE);
 }
@@ -185,7 +190,10 @@
         case RULE_MATCH_USERID:
             UNION_SET(aidl, userId, VALUE_OR_RETURN(convertReinterpret<uint32_t>(legacy.mUserId)));
             break;
-
+        case RULE_MATCH_AUDIO_SESSION_ID:
+            UNION_SET(aidl, audioSessionId,
+                VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.mAudioSessionId)));
+            break;
         default:
             return unexpected(BAD_VALUE);
     }
@@ -464,4 +472,51 @@
     return unexpected(BAD_VALUE);
 }
 
+ConversionResult<audio_mixer_behavior_t>
+aidl2legacy_AudioMixerBehavior_audio_mixer_behavior_t(media::AudioMixerBehavior aidl) {
+    switch (aidl) {
+        case media::AudioMixerBehavior::DEFAULT:
+            return AUDIO_MIXER_BEHAVIOR_DEFAULT;
+        case media::AudioMixerBehavior::BIT_PERFECT:
+            return AUDIO_MIXER_BEHAVIOR_BIT_PERFECT;
+        case media::AudioMixerBehavior::INVALID:
+            return AUDIO_MIXER_BEHAVIOR_INVALID;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<media::AudioMixerBehavior>
+legacy2aidl_audio_mixer_behavior_t_AudioMixerBehavior(audio_mixer_behavior_t legacy) {
+    switch (legacy) {
+        case AUDIO_MIXER_BEHAVIOR_DEFAULT:
+            return media::AudioMixerBehavior::DEFAULT;
+        case AUDIO_MIXER_BEHAVIOR_BIT_PERFECT:
+            return media::AudioMixerBehavior::BIT_PERFECT;
+        case AUDIO_MIXER_BEHAVIOR_INVALID:
+            return media::AudioMixerBehavior::INVALID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_mixer_attributes_t>
+aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(
+        const media::AudioMixerAttributesInternal& aidl) {
+    audio_mixer_attributes_t legacy = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+    legacy.config = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config, false /*isInput*/));
+    legacy.mixer_behavior = VALUE_OR_RETURN(
+            aidl2legacy_AudioMixerBehavior_audio_mixer_behavior_t(aidl.mixerBehavior));
+    return legacy;
+}
+ConversionResult<media::AudioMixerAttributesInternal>
+legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(
+        const audio_mixer_attributes& legacy) {
+    media::AudioMixerAttributesInternal aidl;
+    aidl.config = VALUE_OR_RETURN(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(legacy.config, false /*isInput*/));
+    aidl.mixerBehavior = VALUE_OR_RETURN(
+            legacy2aidl_audio_mixer_behavior_t_AudioMixerBehavior(legacy.mixer_behavior));
+    return aidl;
+}
+
+
 }  // namespace android
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 10f9d9b..60bb4f0 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -7,14 +7,6 @@
       "name": "audio_aidl_status_tests"
     },
     {
-      "name": "CtsNativeMediaAAudioTestCases",
-      "options" : [
-        {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
-        }
-      ]
-    },
-    {
       "name": "audiorecord_tests"
     },
     {
@@ -31,6 +23,23 @@
     },
     {
       "name": "audiosystem_tests"
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
+        }
+      ]
     }
   ],
   "postsubmit": [
diff --git a/media/libaudioclient/AudioAttributes.cpp b/media/libaudioclient/VolumeGroupAttributes.cpp
similarity index 73%
rename from media/libaudioclient/AudioAttributes.cpp
rename to media/libaudioclient/VolumeGroupAttributes.cpp
index 260c06c..2de4667 100644
--- a/media/libaudioclient/AudioAttributes.cpp
+++ b/media/libaudioclient/VolumeGroupAttributes.cpp
@@ -14,33 +14,33 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioAttributes"
+#define LOG_TAG "VolumeGroupAttributes"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
 #include <binder/Parcel.h>
 
 #include <media/AidlConversion.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
 namespace android {
 
-status_t AudioAttributes::readFromParcel(const Parcel* parcel) {
+status_t VolumeGroupAttributes::readFromParcel(const Parcel* parcel) {
     media::AudioAttributesEx aidl;
     RETURN_STATUS_IF_ERROR(aidl.readFromParcel(parcel));
-    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_AudioAttributes(aidl));
+    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(aidl));
     return OK;
 }
 
-status_t AudioAttributes::writeToParcel(Parcel* parcel) const {
+status_t VolumeGroupAttributes::writeToParcel(Parcel* parcel) const {
     media::AudioAttributesEx aidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(*this));
+            legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(*this));
     return aidl.writeToParcel(parcel);
 }
 
 ConversionResult<media::AudioAttributesEx>
-legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy) {
+legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy) {
     media::AudioAttributesEx aidl;
     aidl.attributes = VALUE_OR_RETURN(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(legacy.getAttributes()));
@@ -50,9 +50,9 @@
     return aidl;
 }
 
-ConversionResult<AudioAttributes>
-aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl) {
-    return AudioAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
+ConversionResult<VolumeGroupAttributes>
+aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(const media::AudioAttributesEx& aidl) {
+    return VolumeGroupAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
                            VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
                                    aidl.streamType)),
                            VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
index b01f902..5dd898c 100644
--- a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioPatch;
+import android.media.AudioPatchFw;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
 
@@ -26,7 +26,7 @@
 parcelable AudioIoDescriptor {
     /** Interpreted as audio_io_handle_t. */
     int ioHandle;
-    AudioPatch patch;
+    AudioPatchFw patch;
     boolean isInput;
     int samplingRate;
     AudioFormatDescription format;
diff --git a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
index 921a93a..0f373a2 100644
--- a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
@@ -28,4 +28,6 @@
     /** Interpreted as uid_t. */
     int uid;
     int userId;
+    /** Interpreted as audio_session_t. */
+    int audioSessionId;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioMixerAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioMixerAttributesInternal.aidl
new file mode 100644
index 0000000..ed25060
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixerAttributesInternal.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMixerBehavior;
+import android.media.audio.common.AudioConfigBase;
+
+/**
+ * This class is used to contains information about audio mixer.
+ * The "Internal" suffix of this type name is to disambiguate it from the
+ * android.media.AudioMixerAttributes SDK type.
+ *
+ * {@hide}
+ */
+parcelable AudioMixerAttributesInternal {
+    AudioConfigBase config;
+    AudioMixerBehavior mixerBehavior;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixerBehavior.aidl b/media/libaudioclient/aidl/android/media/AudioMixerBehavior.aidl
new file mode 100644
index 0000000..38f50d6
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixerBehavior.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Defines the mixer behavior that can be used when setting mixer attributes.
+ */
+@Backing(type="int")
+enum AudioMixerBehavior {
+    /**
+     * The mixer behavior is invalid.
+     */
+    INVALID = -1,
+    /**
+     * The mixer behavior that follows platform default behavior, which is mixing audio from
+     * different sources.
+     */
+    DEFAULT = 0,
+    /**
+     * The audio data in the mixer will be bit-perfect as long as possible.
+     */
+    BIT_PERFECT = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
similarity index 74%
rename from media/libaudioclient/aidl/android/media/AudioPatch.aidl
rename to media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
index 8519faf..9ec3fa9 100644
--- a/media/libaudioclient/aidl/android/media/AudioPatch.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
@@ -16,17 +16,19 @@
 
 package android.media;
 
-import android.media.AudioPortConfig;
+import android.media.AudioPortConfigFw;
 
 /**
  * {@hide}
+ * The Fw suffix is used to break a namespace collision with an SDK API.
+ * It contains the framework version of AudioPortConfig.
  */
-parcelable AudioPatch {
+parcelable AudioPatchFw {
     /**
      * Patch unique ID.
      * Interpreted as audio_patch_handle_t.
      */
     int id;
-    AudioPortConfig[] sources;
-    AudioPortConfig[] sinks;
+    AudioPortConfigFw[] sources;
+    AudioPortConfigFw[] sinks;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
similarity index 89%
rename from media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
index 3a4ca31..e7565d7 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
@@ -21,8 +21,9 @@
 
 /**
  * {@hide}
+ * Suffixed with Fw to avoid name conflict with SDK class.
  */
-parcelable AudioPortConfig {
+parcelable AudioPortConfigFw {
     AudioPortConfig hal;
     AudioPortConfigSys sys;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPortFw.aidl
similarity index 88%
rename from media/libaudioclient/aidl/android/media/AudioPort.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortFw.aidl
index ff177c0..5580e35 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortFw.aidl
@@ -21,8 +21,9 @@
 
 /**
  * {@hide}
+ * The Fw suffix is used to break a namespace collision with an SDK API.
  */
-parcelable AudioPort {
+parcelable AudioPortFw {
     AudioPort hal;
     AudioPortSys sys;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
index f3b5c19..756c469 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
@@ -17,7 +17,7 @@
 package android.media;
 
 import android.media.AudioGainSys;
-import android.media.AudioPortConfig;
+import android.media.AudioPortConfigFw;
 import android.media.AudioPortExtSys;
 import android.media.AudioPortRole;
 import android.media.AudioPortType;
@@ -36,7 +36,7 @@
     /** System-only parameters for each AudioGain from 'port.gains'. */
     AudioGainSys[] gains;
     /** Current audio port configuration. */
-    AudioPortConfig activeConfig;
+    AudioPortConfigFw activeConfig;
     /** System-only extra parameters for 'port.ext'. */
     AudioPortExtSys ext;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
index 7d159d0..5f1e288 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
@@ -45,4 +45,5 @@
     /** The newly created record. */
     @nullable IAudioRecord audioRecord;
     AudioConfigBase serverConfig;
+    AudioConfigBase halConfig;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index da6f454..42e0bb4 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.audio.common.AudioChannelLayout;
+import android.media.audio.common.AudioFormatDescription;
 import android.media.audio.common.AudioStreamType;
 import android.media.IAudioTrack;
 
@@ -38,6 +40,8 @@
     AudioStreamType streamType;
     long afFrameCount;
     int afSampleRate;
+    AudioChannelLayout afChannelMask;
+    AudioFormatDescription afFormat;
     int afLatencyMs;
     /** Interpreted as audio_io_handle_t. */
     int outputId;
diff --git a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
index 9696124..347bf79 100644
--- a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.audio.common.AudioConfigBase;
+
 /**
  * {@hide}
  */
@@ -26,4 +28,6 @@
     int selectedDeviceId;
     /** Interpreted as audio_port_handle_t. */
     int portId;
+    /** The suggested config if fails to get an input. **/
+    AudioConfigBase config;
 }
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index f1848b6..9d44bb0 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -16,8 +16,9 @@
 
 package android.media;
 
+import android.media.audio.common.AudioConfigBase;
 import android.media.audio.common.AudioStreamType;
-
+import android.media.AudioAttributesInternal;
 /**
  * {@hide}
  */
@@ -33,4 +34,9 @@
     int[] secondaryOutputs;
     /** True if the track is connected to a spatializer mixer and actually spatialized */
     boolean isSpatialized;
+    /** The suggested audio config if fails to get an output. **/
+    AudioConfigBase configBase;
+    boolean isBitPerfect;
+    /** The corrected audio attributes. **/
+    AudioAttributesInternal attr;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 9b8a843..6497447 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -16,9 +16,9 @@
 
 package android.media;
 
-import android.media.AudioPatch;
-import android.media.AudioPort;
-import android.media.AudioPortConfig;
+import android.media.AudioPatchFw;
+import android.media.AudioPortFw;
+import android.media.AudioPortConfigFw;
 import android.media.AudioUniqueIdUse;
 import android.media.AudioVibratorInfo;
 import android.media.CreateEffectRequest;
@@ -35,6 +35,8 @@
 import android.media.IAudioFlingerClient;
 import android.media.IAudioRecord;
 import android.media.IAudioTrack;
+import android.media.ISoundDose;
+import android.media.ISoundDoseCallback;
 import android.media.LatencyMode;
 import android.media.MicrophoneInfoData;
 import android.media.RenderPosition;
@@ -132,8 +134,6 @@
     OpenInputResponse openInput(in OpenInputRequest request);
     void closeInput(int /* audio_io_handle_t */ input);
 
-    void invalidateStream(AudioStreamType stream);
-
     void setVoiceVolume(float volume);
 
     RenderPosition getRenderPosition(int /* audio_io_handle_t */ output);
@@ -182,18 +182,18 @@
     void setLowRamDevice(boolean isLowRamDevice, long totalMemory);
 
     /* Get attributes for a given audio port */
-    AudioPort getAudioPort(in AudioPort port);
+    AudioPortFw getAudioPort(in AudioPortFw port);
 
     /* Create an audio patch between several source and sink ports */
-    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch);
+    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatchFw patch);
 
     /* Release an audio patch */
     void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
 
     /* List existing audio patches */
-    AudioPatch[] listAudioPatches(int maxCount);
+    AudioPatchFw[] listAudioPatches(int maxCount);
     /* Set audio port configuration */
-    void setAudioPortConfig(in AudioPortConfig config);
+    void setAudioPortConfig(in AudioPortConfigFw config);
 
     /* Get the HW synchronization source used for an audio session */
     int /* audio_hw_sync_t */ getAudioHwSyncForSession(int /* audio_session_t */ sessionId);
@@ -227,7 +227,7 @@
 
     int getAAudioHardwareBurstMinUsec();
 
-    void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
+    void setDeviceConnectedState(in AudioPortFw devicePort, boolean connected);
 
     /**
      * Requests a given latency mode (See LatencyMode.aidl) on an output stream.
@@ -246,6 +246,32 @@
      */
     LatencyMode[] getSupportedLatencyModes(int output);
 
+    /**
+     * Requests if the implementation supports controlling the latency modes
+     * over the Bleutooth A2DP or LE Audio links. If it does,
+     * setRequestedLatencyMode() and getSupportedLatencyModes() APIs can also be used
+     * for streams routed to Bluetooth and not just for the spatializer output.
+     */
+     boolean supportsBluetoothLatencyModes();
+
+    /**
+     * Enables or disables the variable Bluetooth latency control mechanism in the
+     * audio framework and the audio HAL. This does not apply to the latency mode control
+     * on the spatializer output with is a built-in feature.
+     */
+    void setBluetoothLatencyModesEnabled(boolean enabled);
+
+    /**
+     * Registers the sound dose callback and returns the interface for executing
+     * sound dose methods on the audio server.
+     */
+    ISoundDose getSoundDoseInterface(in ISoundDoseCallback callback);
+
+    /**
+     * Invalidate all tracks with given port ids.
+     */
+    void invalidateTracks(in int[] /* audio_port_handle_t[] */ portIds);
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 8ac89a8..ec5097a 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -22,13 +22,14 @@
 import android.media.AudioAttributesInternal;
 import android.media.AudioDirectMode;
 import android.media.AudioMix;
+import android.media.AudioMixerAttributesInternal;
 import android.media.AudioOffloadMode;
-import android.media.AudioPatch;
+import android.media.AudioPatchFw;
 import android.media.AudioPolicyDeviceState;
 import android.media.AudioPolicyForcedConfig;
 import android.media.AudioPolicyForceUse;
-import android.media.AudioPort;
-import android.media.AudioPortConfig;
+import android.media.AudioPortFw;
+import android.media.AudioPortConfigFw;
 import android.media.AudioPortRole;
 import android.media.AudioPortType;
 import android.media.AudioProductStrategy;
@@ -137,7 +138,7 @@
 
     int /* product_strategy_t */ getStrategyForStream(AudioStreamType stream);
 
-    AudioDevice[] getDevicesForAttributes(in AudioAttributesEx attr, boolean forVolume);
+    AudioDevice[] getDevicesForAttributes(in AudioAttributesInternal attr, boolean forVolume);
 
     int /* audio_io_handle_t */ getOutputForEffect(in EffectDescriptor desc);
 
@@ -212,16 +213,16 @@
     int listAudioPorts(AudioPortRole role,
                        AudioPortType type,
                        inout Int count,
-                       out AudioPort[] ports);
+                       out AudioPortFw[] ports);
 
     /** Get attributes for the audio port with the given id (AudioPort.hal.id field). */
-    AudioPort getAudioPort(int /* audio_port_handle_t */ portId);
+    AudioPortFw getAudioPort(int /* audio_port_handle_t */ portId);
 
     /**
      * Create an audio patch between several source and sink ports.
      * The handle argument is used when updating an existing patch.
      */
-    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch, int handle);
+    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatchFw patch, int handle);
 
     /** Release an audio patch. */
     void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
@@ -234,10 +235,10 @@
      * Passing '0' on input and inspecting the value on output is a common way of determining the
      * number of elements without actually retrieving them.
      */
-    int listAudioPatches(inout Int count, out AudioPatch[] patches);
+    int listAudioPatches(inout Int count, out AudioPatchFw[] patches);
 
     /** Set audio port configuration. */
-    void setAudioPortConfig(in AudioPortConfig config);
+    void setAudioPortConfig(in AudioPortConfigFw config);
 
     void registerClient(IAudioPolicyServiceClient client);
 
@@ -261,7 +262,7 @@
 
     void removeUserIdDeviceAffinities(int userId);
 
-    int /* audio_port_handle_t */ startAudioSource(in AudioPortConfig source,
+    int /* audio_port_handle_t */ startAudioSource(in AudioPortConfigFw source,
                                                    in AudioAttributesInternal attributes);
 
     void stopAudioSource(int /* audio_port_handle_t */ portId);
@@ -313,11 +314,11 @@
     boolean isUltrasoundSupported();
 
     AudioProductStrategy[] listAudioProductStrategies();
-    int /* product_strategy_t */ getProductStrategyFromAudioAttributes(in AudioAttributesEx aa,
-                                                                       boolean fallbackOnDefault);
+    int /* product_strategy_t */ getProductStrategyFromAudioAttributes(
+            in AudioAttributesInternal aa, boolean fallbackOnDefault);
 
     AudioVolumeGroup[] listAudioVolumeGroups();
-    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesEx aa,
+    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesInternal aa,
                                                                boolean fallbackOnDefault);
 
     void setRttEnabled(boolean enabled);
@@ -391,6 +392,60 @@
      */
     AudioProfile[] getDirectProfilesForAttributes(in AudioAttributesInternal attr);
 
+    /**
+     * Return a list of AudioMixerAttributes that can be used to set preferred mixer attributes
+     * for the given device.
+     */
+    AudioMixerAttributesInternal[] getSupportedMixerAttributes(
+            int /* audio_port_handle_t */ portId);
+
+    /**
+     * Set preferred mixer attributes for a given device on a given audio attributes.
+     * When conflicting requests are received, the last request will be honored.
+     * The preferred mixer attributes can only be set when 1) the usage is media, 2) the
+     * given device is currently available, 3) the given device is usb device, 4) the given mixer
+     * attributes is supported by the given device.
+     *
+     * @param attr the audio attributes whose mixer attributes should be set.
+     * @param portId the port id of the device to be routed.
+     * @param uid the uid of the request client. The uid will be used to recognize the ownership for
+     *            the preferred mixer attributes. All the playback with same audio attributes from
+     *            the same uid will be attached to the mixer with the preferred attributes if the
+     *            playback is routed to the given device.
+     * @param mixerAttr the preferred mixer attributes.
+     */
+    void setPreferredMixerAttributes(in AudioAttributesInternal attr,
+                                     int /* audio_port_handle_t */ portId,
+                                     int /* uid_t */ uid,
+                                     in AudioMixerAttributesInternal mixerAttr);
+
+    /**
+     * Get preferred mixer attributes for a given device on a given audio attributes.
+     * Null will be returned if there is no preferred mixer attributes set or it has
+     * been cleared.
+     *
+     * @param attr the audio attributes whose mixer attributes should be set.
+     * @param portId the port id of the device to be routed.
+     */
+    @nullable AudioMixerAttributesInternal getPreferredMixerAttributes(
+            in AudioAttributesInternal attr,
+            int /* audio_port_handle_t */ portId);
+
+    /**
+     * Clear preferred mixer attributes for a given device on a given audio attributes that
+     * is previously set via setPreferredMixerAttributes.
+     *
+     * @param attr the audio attributes whose mixer attributes should be set.
+     * @param portId the port id of the device to be routed.
+     * @param uid the uid of the request client. The uid is used to identify the ownership for the
+     *            preferred mixer attributes. The preferred mixer attributes will only be cleared
+     *            if the uid is the same as the owner of current preferred mixer attributes.
+     */
+    void clearPreferredMixerAttributes(in AudioAttributesInternal attr,
+                                       int /* audio_port_handle_t */ portId,
+                                       int /* uid_t */ uid);
+
+
     // When adding a new method, please review and update
     // AudioPolicyService.cpp AudioPolicyService::onTransact()
     // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
new file mode 100644
index 0000000..f31f091
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SoundDoseRecord;
+
+/**
+ * Interface used to push the sound dose related information from the
+ * AudioService#SoundDoseHelper to the audio server
+ */
+oneway interface ISoundDose {
+    /** Set a new RS2 value used for momentary exposure warnings. */
+    void setOutputRs2(float rs2Value);
+
+    /**
+     * Resets the native CSD values. This can happen after a crash in the
+     * audio server or after booting when restoring the previous state.
+     * 'currentCsd' represents the restored CSD value and 'records' contains the
+     * dosage values and MELs together with their timestamps that lead to this
+     * CSD.
+     */
+    void resetCsd(float currentCsd, in SoundDoseRecord[] records);
+}
diff --git a/media/libaudioclient/aidl/android/media/ISoundDoseCallback.aidl b/media/libaudioclient/aidl/android/media/ISoundDoseCallback.aidl
new file mode 100644
index 0000000..7e59409
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/ISoundDoseCallback.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SoundDoseRecord;
+
+/**
+ * Interface used to push the sound dose related information from the audio
+ * server to the AudioService#SoundDoseHelper.
+ */
+interface ISoundDoseCallback {
+    /** Called whenever the momentary exposure exceeds the RS2 value. */
+    oneway void onMomentaryExposure(float currentMel, int deviceId);
+
+    /**
+     * Notifies that the CSD value has changed. The currentCsd is normalized
+     * with value 1 representing 100% of sound dose. SoundDoseRecord represents
+     * the newest record that lead to the new currentCsd.
+     */
+    oneway void onNewCsdValue(float currentCsd, in SoundDoseRecord[] records);
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index 90e7ea6..ddda8bb 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioPort;
+import android.media.AudioPortFw;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
 
@@ -29,7 +29,7 @@
     AudioConfig halConfig;
     AudioConfigBase mixerConfig;
     /** Type must be DEVICE. */
-    AudioPort device;
+    AudioPortFw device;
     /** Bitmask, indexed by AudioOutputFlag. */
     int flags;
 }
diff --git a/media/libaudioclient/aidl/android/media/SoundDoseRecord.aidl b/media/libaudioclient/aidl/android/media/SoundDoseRecord.aidl
new file mode 100644
index 0000000..94b8ce2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/SoundDoseRecord.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/** Record containing information about the computed sound dose. */
+parcelable SoundDoseRecord {
+    /**
+     * Corresponds to the time in seconds when the CSD value is calculated from.
+     * Values should be consistent and referenced from the same clock (e.g.: monotonic)
+     */
+    long timestamp;
+    /** Corresponds to the duration that leads to the CSD value. */
+    int duration;
+    /** The actual contribution to the CSD computation normalized: 1.f is 100%CSD. */
+    float value;
+    /** The average MEL value in this time frame that lead to this CSD value. */
+    float averageMel;
+}
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index 036e72e..47fe0f6 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -84,13 +84,15 @@
 };
 
 template <typename T, typename X, typename FUNC>
-std::vector<T> getFlags(const xsdc_enum_range<X> &range, const FUNC &func,
-                        const std::string &findString = {}) {
+std::vector<T> getFlags(const xsdc_enum_range<X>& range, const FUNC& func,
+                        const std::string& findString = {},
+                        const std::set<X>& excludedValues = {}) {
     std::vector<T> vec;
     for (const auto &xsdEnumVal : range) {
         T enumVal;
         std::string enumString = toString(xsdEnumVal);
         if (enumString.find(findString) != std::string::npos &&
+            (excludedValues.find(xsdEnumVal) == excludedValues.end()) &&
             func(enumString.c_str(), &enumVal)) {
             vec.push_back(enumVal);
         }
@@ -102,13 +104,29 @@
     getFlags<audio_stream_type_t, xsd::AudioStreamType, decltype(audio_stream_type_from_string)>(
         xsdc_enum_range<xsd::AudioStreamType>{}, audio_stream_type_from_string);
 
+/**
+ * AudioFormat - AUDIO_FORMAT_HE_AAC_V1 and AUDIO_FORMAT_HE_AAC_V2
+ * are excluded from kFormats[] in order to avoid the abort triggered
+ * for these two types of AudioFormat in
+ * AidlConversion::legacy2aidl_audio_format_t_AudioFormatDescription()
+ */
 static const std::vector<audio_format_t> kFormats =
-    getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
-        xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string);
+        getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
+                xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string, {},
+                {xsd::AudioFormat::AUDIO_FORMAT_HE_AAC_V1,
+                 xsd::AudioFormat::AUDIO_FORMAT_HE_AAC_V2});
 
+/**
+ * AudioChannelMask - AUDIO_CHANNEL_IN_6
+ * is excluded from kChannelMasks[] in order to avoid the abort triggered
+ * for this type of AudioChannelMask in
+ * AidlConversion::legacy2aidl_audio_channel_mask_t_AudioChannelLayout()
+ */
 static const std::vector<audio_channel_mask_t> kChannelMasks =
-    getFlags<audio_channel_mask_t, xsd::AudioChannelMask, decltype(audio_channel_mask_from_string)>(
-        xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string);
+        getFlags<audio_channel_mask_t, xsd::AudioChannelMask,
+                 decltype(audio_channel_mask_from_string)>(
+                xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string, {},
+                {xsd::AudioChannelMask::AUDIO_CHANNEL_IN_6});
 
 static const std::vector<audio_usage_t> kUsages =
     getFlags<audio_usage_t, xsd::AudioUsage, decltype(audio_usage_from_string)>(
@@ -126,9 +144,17 @@
     getFlags<audio_gain_mode_t, xsd::AudioGainMode, decltype(audio_gain_mode_from_string)>(
         xsdc_enum_range<xsd::AudioGainMode>{}, audio_gain_mode_from_string);
 
+/**
+ * AudioDevice - AUDIO_DEVICE_IN_AMBIENT and AUDIO_DEVICE_IN_COMMUNICATION
+ * are excluded from kDevices[] in order to avoid the abort triggered
+ * for these two types of AudioDevice in
+ * AidlConversion::aidl2legacy_AudioDeviceDescription_audio_devices_t()
+ */
 static const std::vector<audio_devices_t> kDevices =
-    getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
-        xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string);
+        getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
+                xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string, {},
+                {xsd::AudioDevice::AUDIO_DEVICE_IN_AMBIENT,
+                 xsd::AudioDevice::AUDIO_DEVICE_IN_COMMUNICATION});
 
 static const std::vector<audio_input_flags_t> kInputFlags =
     getFlags<audio_input_flags_t, xsd::AudioInOutFlag, decltype(audio_input_flag_from_string)>(
@@ -558,7 +584,12 @@
 
     float balance = mFdp.ConsumeFloatingPoint<float>();
     af->getMasterBalance(&balance);
-    af->invalidateStream(static_cast<audio_stream_type_t>(mFdp.ConsumeIntegral<uint32_t>()));
+
+    std::vector<audio_port_handle_t> tracks;
+    for (int i = 0; i < mFdp.ConsumeIntegralInRange<int32_t>(0, MAX_ARRAY_LENGTH); ++i) {
+        tracks.push_back(static_cast<audio_port_handle_t>(mFdp.ConsumeIntegral<int32_t>()));
+    }
+    af->invalidateTracks(tracks);
 }
 
 status_t AudioFlingerFuzzer::invokeAudioInputDevice() {
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index afcb61a..ff3598f 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -29,8 +29,8 @@
 #include <android/media/AudioIoConfigEvent.h>
 #include <android/media/AudioIoDescriptor.h>
 #include <android/media/AudioPlaybackRate.h>
-#include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortFw.h>
+#include <android/media/AudioPortConfigFw.h>
 #include <android/media/AudioPortDeviceExtSys.h>
 #include <android/media/AudioTimestampInternal.h>
 #include <android/media/AudioUniqueIdUse.h>
@@ -92,13 +92,13 @@
         const audio_port_config_session_ext& legacy);
 
 ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
-        const media::AudioPortConfig& aidl);
-ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+        const media::AudioPortConfigFw& aidl);
+ConversionResult<media::AudioPortConfigFw> legacy2aidl_audio_port_config_AudioPortConfig(
         const audio_port_config& legacy);
 
 ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
-        const media::AudioPatch& aidl);
-ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+        const media::AudioPatchFw& aidl);
+ConversionResult<media::AudioPatchFw> legacy2aidl_audio_patch_AudioPatch(
         const struct audio_patch& legacy);
 
 ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
@@ -170,8 +170,8 @@
 legacy2aidl_audio_port_session_ext_int32_t(const audio_port_session_ext& legacy);
 
 ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl);
-ConversionResult<media::AudioPort>
+aidl2legacy_AudioPort_audio_port_v7(const media::AudioPortFw& aidl);
+ConversionResult<media::AudioPortFw>
 legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy);
 
 ConversionResult<audio_unique_id_use_t>
diff --git a/media/libaudioclient/include/media/AudioCommonTypes.h b/media/libaudioclient/include/media/AudioCommonTypes.h
index 862a0f9..2567542 100644
--- a/media/libaudioclient/include/media/AudioCommonTypes.h
+++ b/media/libaudioclient/include/media/AudioCommonTypes.h
@@ -94,6 +94,7 @@
 
 using AttributesVector = std::vector<audio_attributes_t>;
 using StreamTypeVector = std::vector<audio_stream_type_t>;
+using PortHandleVector = std::vector<audio_port_handle_t>;
 
 using TrackSecondaryOutputsMap = std::map<audio_port_handle_t, std::vector<audio_io_handle_t>>;
 
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index cab476e..61f2069 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -34,11 +34,13 @@
 #define RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET (0x1 << 1)
 #define RULE_MATCH_UID                      (0x1 << 2)
 #define RULE_MATCH_USERID                   (0x1 << 3)
+#define RULE_MATCH_AUDIO_SESSION_ID         (0x1 << 4)
 #define RULE_EXCLUDE_ATTRIBUTE_USAGE  (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_USAGE)
 #define RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET \
                                       (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
 #define RULE_EXCLUDE_UID              (RULE_EXCLUSION_MASK|RULE_MATCH_UID)
 #define RULE_EXCLUDE_USERID           (RULE_EXCLUSION_MASK|RULE_MATCH_USERID)
+#define RULE_EXCLUDE_AUDIO_SESSION_ID       (RULE_EXCLUSION_MASK|RULE_MATCH_AUDIO_SESSION_ID)
 
 #define MIX_TYPE_INVALID (-1)
 #define MIX_TYPE_PLAYERS 0
@@ -78,6 +80,7 @@
         audio_source_t  mSource;
         uid_t           mUid;
         int        mUserId;
+        audio_session_t  mAudioSessionId;
     } mValue;
     uint32_t        mRule;
 };
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index b55b506..7bcb5aa 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -20,7 +20,7 @@
 #include <android/media/AudioProductStrategy.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioCommonTypes.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
 #include <binder/Parcelable.h>
@@ -31,12 +31,15 @@
 {
 public:
     AudioProductStrategy() {}
-    AudioProductStrategy(const std::string &name, const std::vector<AudioAttributes> &attributes,
+    AudioProductStrategy(const std::string &name,
+                         const std::vector<VolumeGroupAttributes> &attributes,
                          product_strategy_t id) :
-        mName(name), mAudioAttributes(attributes), mId(id) {}
+        mName(name), mVolumeGroupAttributes(attributes), mId(id) {}
 
     const std::string &getName() const { return mName; }
-    std::vector<AudioAttributes> getAudioAttributes() const { return mAudioAttributes; }
+    std::vector<VolumeGroupAttributes> getVolumeGroupAttributes() const {
+        return mVolumeGroupAttributes;
+    }
     product_strategy_t getId() const { return mId; }
 
     status_t readFromParcel(const Parcel *parcel) override;
@@ -58,7 +61,7 @@
                                   const audio_attributes_t clientAttritubes);
 private:
     std::string mName;
-    std::vector<AudioAttributes> mAudioAttributes;
+    std::vector<VolumeGroupAttributes> mVolumeGroupAttributes;
     product_strategy_t mId;
 };
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 5a1ff65..3f33d7a 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -326,6 +326,15 @@
      */
             uint32_t    getSampleRate() const   { return mSampleRate; }
 
+    /* Return the sample rate from the AudioFlinger input thread. */
+            uint32_t    getHalSampleRate() const;
+
+    /* Return the channel count from the AudioFlinger input thread. */
+            uint32_t    getHalChannelCount() const;
+
+    /* Return the HAL format from the AudioFlinger input thread. */
+            audio_format_t    getHalFormat() const;
+
     /* Sets marker position. When record reaches the number of frames specified,
      * a callback with event type EVENT_MARKER is called. Calling setMarkerPosition
      * with marker == 0 cancels marker notification callback.
@@ -770,6 +779,9 @@
     size_t                  mServerSampleSize;
     std::unique_ptr<uint8_t[]> mFormatConversionBufRaw;
     Buffer                  mFormatConversionBuffer;
+    uint32_t                mHalSampleRate;          // AudioFlinger thread sample rate
+    uint32_t                mHalChannelCount;        // AudioFlinger thread channel count
+    audio_format_t          mHalFormat;              // AudioFlinger thread format
 
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 9411f46..e2806c7 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -26,8 +26,13 @@
 #include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerClient.h>
 #include <android/media/BnAudioPolicyServiceClient.h>
+#include <android/media/EffectDescriptor.h>
 #include <android/media/INativeSpatializerCallback.h>
+#include <android/media/ISoundDose.h>
+#include <android/media/ISoundDoseCallback.h>
 #include <android/media/ISpatializer.h>
+#include <android/media/RecordClientInfo.h>
+#include <android/media/audio/common/AudioConfigBase.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <android/media/audio/common/AudioPort.h>
@@ -279,29 +284,67 @@
     static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
 
+    /**
+     * Get output stream for given parameters.
+     *
+     * @param[in] attr the requested audio attributes
+     * @param[in|out] output the io handle of the output for the playback. It is specified when
+     *                       starting mmap thread.
+     * @param[in] session the session id for the client
+     * @param[in|out] stream the stream type used for the playback
+     * @param[in] attributionSource a source to which access to permission protected data
+     * @param[in|out] config the requested configuration client, the suggested configuration will
+     *                       be returned if no proper output is found for requested configuration
+     * @param[in] flags the requested output flag from client
+     * @param[in|out] selectedDeviceId the requested device id for playback, the actual device id
+     *                                 for playback will be returned
+     * @param[out] portId the generated port id to identify the client
+     * @param[out] secondaryOutputs collection of io handle for secondary outputs
+     * @param[out] isSpatialized true if the playback will be spatialized
+     * @param[out] isBitPerfect true if the playback will be bit-perfect
+     * @return if the call is successful or not
+     */
     static status_t getOutputForAttr(audio_attributes_t *attr,
                                      audio_io_handle_t *output,
                                      audio_session_t session,
                                      audio_stream_type_t *stream,
                                      const AttributionSourceState& attributionSource,
-                                     const audio_config_t *config,
+                                     audio_config_t *config,
                                      audio_output_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      audio_port_handle_t *portId,
                                      std::vector<audio_io_handle_t> *secondaryOutputs,
-                                     bool *isSpatialized);
+                                     bool *isSpatialized,
+                                     bool *isBitPerfect);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
 
-    // Client must successfully hand off the handle reference to AudioFlinger via createRecord(),
-    // or release it with releaseInput().
+    /**
+     * Get input stream for given parameters.
+     * Client must successfully hand off the handle reference to AudioFlinger via createRecord(),
+     * or release it with releaseInput().
+     *
+     * @param[in] attr the requested audio attributes
+     * @param[in|out] input the io handle of the input for the capture. It is specified when
+     *                      starting mmap thread.
+     * @param[in] riid an unique id to identify the record client
+     * @param[in] session the session id for the client
+     * @param[in] attributionSource a source to which access to permission protected data
+     * @param[in|out] config the requested configuration client, the suggested configuration will
+     *                       be returned if no proper input is found for requested configuration
+     * @param[in] flags the requested input flag from client
+     * @param[in|out] selectedDeviceId the requested device id for playback, the actual device id
+     *                                 for playback will be returned
+     * @param[out] portId the generated port id to identify the client
+     * @return if the call is successful or not
+     */
     static status_t getInputForAttr(const audio_attributes_t *attr,
                                     audio_io_handle_t *input,
                                     audio_unique_id_t riid,
                                     audio_session_t session,
-                                     const AttributionSourceState& attributionSource,
-                                    const audio_config_base_t *config,
+                                    const AttributionSourceState& attributionSource,
+                                    audio_config_base_t *config,
                                     audio_input_flags_t flags,
                                     audio_port_handle_t *selectedDeviceId,
                                     audio_port_handle_t *portId);
@@ -331,7 +374,7 @@
     static status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
 
     static product_strategy_t getStrategyForStream(audio_stream_type_t stream);
-    static status_t getDevicesForAttributes(const AudioAttributes &aa,
+    static status_t getDevicesForAttributes(const audio_attributes_t &aa,
                                             AudioDeviceTypeAddrVector *devices,
                                             bool forVolume);
 
@@ -455,7 +498,7 @@
 
     static status_t listAudioProductStrategies(AudioProductStrategyVector &strategies);
     static status_t getProductStrategyFromAudioAttributes(
-            const AudioAttributes &aa, product_strategy_t &productStrategy,
+            const audio_attributes_t &aa, product_strategy_t &productStrategy,
             bool fallbackOnDefault = true);
 
     static audio_attributes_t streamTypeToAttributes(audio_stream_type_t stream);
@@ -464,7 +507,8 @@
     static status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups);
 
     static status_t getVolumeGroupFromAudioAttributes(
-            const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault = true);
+            const audio_attributes_t &aa, volume_group_t &volumeGroup,
+            bool fallbackOnDefault = true);
 
     static status_t setRttEnabled(bool enabled);
 
@@ -546,6 +590,16 @@
                                      bool *canBeSpatialized);
 
     /**
+     * Registers the sound dose callback with the audio server and returns the ISoundDose
+     * interface.
+     *
+     * \param callback to send messages to the audio server
+     * \param soundDose binder to send messages to the AudioService
+     **/
+    static status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                          sp<media::ISoundDose>* soundDose);
+
+    /**
      * Query how the direct playback is currently supported on the device.
      * @param attr audio attributes describing the playback use case
      * @param config audio configuration for the playback
@@ -575,6 +629,23 @@
     static status_t getSupportedLatencyModes(audio_io_handle_t output,
             std::vector<audio_latency_mode_t>* modes);
 
+    static status_t setBluetoothLatencyModesEnabled(bool enabled);
+
+    static status_t supportsBluetoothLatencyModes(bool *support);
+
+    static status_t getSupportedMixerAttributes(audio_port_handle_t portId,
+                                                std::vector<audio_mixer_attributes_t> *mixerAttrs);
+    static status_t setPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                audio_port_handle_t portId,
+                                                uid_t uid,
+                                                const audio_mixer_attributes_t *mixerAttr);
+    static status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                audio_port_handle_t portId,
+                                                std::optional<audio_mixer_attributes_t>* mixerAttr);
+    static status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                  audio_port_handle_t portId,
+                                                  uid_t uid);
+
     // A listener for capture state changes.
     class CaptureStateListener : public virtual RefBase {
     public:
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index b6ee483..31f81be 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AUDIOTRACK_H
 #define ANDROID_AUDIOTRACK_H
 
+#include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
 #include <cutils/sched_policy.h>
 #include <media/AudioSystem.h>
@@ -654,6 +655,15 @@
      */
             uint32_t    getOriginalSampleRate() const;
 
+    /* Return the sample rate from the AudioFlinger output thread. */
+            uint32_t    getHalSampleRate() const;
+
+    /* Return the channel count from the AudioFlinger output thread. */
+            uint32_t    getHalChannelCount() const;
+
+    /* Return the HAL format from the AudioFlinger output thread. */
+            audio_format_t    getHalFormat() const;
+
     /* Sets the Dual Mono mode presentation on the output device. */
             status_t    setDualMonoMode(audio_dual_mono_mode_t mode);
 
@@ -1145,6 +1155,8 @@
             void setAudioTrackCallback(const sp<media::IAudioTrackCallback>& callback) {
                 mAudioTrackCallback->setAudioTrackCallback(callback);
             }
+ private:
+            void triggerPortIdUpdate_l();
 
  protected:
     /* copying audio tracks is not allowed */
@@ -1267,12 +1279,14 @@
     size_t                  mReqFrameCount;         // frame count to request the first or next time
                                                     // a new IAudioTrack is needed, non-decreasing
 
-    // The following AudioFlinger server-side values are cached in createAudioTrack_l().
+    // The following AudioFlinger server-side values are cached in createTrack_l().
     // These values can be used for informational purposes until the track is invalidated,
     // whereupon restoreTrack_l() calls createTrack_l() to update the values.
     uint32_t                mAfLatency;             // AudioFlinger latency in ms
     size_t                  mAfFrameCount;          // AudioFlinger frame count
     uint32_t                mAfSampleRate;          // AudioFlinger sample rate
+    uint32_t                mAfChannelCount;        // AudioFlinger channel count
+    audio_format_t          mAfFormat;              // AudioFlinger format
 
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
@@ -1411,7 +1425,7 @@
 
     audio_session_t         mSessionId;
     int                     mAuxEffectId;
-    audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+    audio_port_handle_t     mPortId = AUDIO_PORT_HANDLE_NONE; // Id from Audio Policy Manager
 
     /**
      * mPlayerIId is the player id of the AudioTrack used by AudioManager.
@@ -1419,6 +1433,9 @@
      */
     int                     mPlayerIId = -1;  // AudioManager.h PLAYER_PIID_INVALID
 
+    /** Interface for interacting with the AudioService. */
+    sp<IAudioManager>       mAudioManager;
+
     /**
      * mLogSessionId is a string identifying this AudioTrack for the metrics service.
      * It may be unique or shared with other objects.  An empty string means the
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index c891ae6..57dd2c1 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -55,6 +55,8 @@
 #include "android/media/IAudioTrackCallback.h"
 #include "android/media/IEffect.h"
 #include "android/media/IEffectClient.h"
+#include "android/media/ISoundDose.h"
+#include "android/media/ISoundDoseCallback.h"
 #include "android/media/OpenInputRequest.h"
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
@@ -116,6 +118,8 @@
         size_t   afFrameCount;
         uint32_t afSampleRate;
         uint32_t afLatencyMs;
+        audio_channel_mask_t afChannelMask;
+        audio_format_t afFormat;
         audio_io_handle_t outputId;
         audio_port_handle_t portId;
         sp<media::IAudioTrack> audioTrack;
@@ -169,6 +173,7 @@
         audio_port_handle_t portId;
         sp<media::IAudioRecord> audioRecord;
         audio_config_base_t serverConfig;
+        audio_config_base_t halConfig;
 
         ConversionResult<media::CreateRecordResponse> toAidl() const;
         static ConversionResult<CreateRecordOutput>
@@ -263,8 +268,6 @@
 
     virtual status_t closeInput(audio_io_handle_t input) = 0;
 
-    virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
-
     virtual status_t setVoiceVolume(float volume) = 0;
 
     virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
@@ -367,6 +370,14 @@
     virtual status_t getSupportedLatencyModes(audio_io_handle_t output,
             std::vector<audio_latency_mode_t>* modes) = 0;
 
+    virtual status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                           sp<media::ISoundDose>* soundDose) = 0;
+
+    virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
+
+    virtual status_t setBluetoothLatencyModesEnabled(bool enabled) = 0;
+
+    virtual status_t supportsBluetoothLatencyModes(bool* support) = 0;
 };
 
 /**
@@ -421,7 +432,6 @@
     status_t openInput(const media::OpenInputRequest& request,
                        media::OpenInputResponse* response) override;
     status_t closeInput(audio_io_handle_t input) override;
-    status_t invalidateStream(audio_stream_type_t stream) override;
     status_t setVoiceVolume(float volume) override;
     status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
                                audio_io_handle_t output) const override;
@@ -473,6 +483,11 @@
             audio_latency_mode_t mode) override;
     status_t getSupportedLatencyModes(
             audio_io_handle_t output, std::vector<audio_latency_mode_t>* modes) override;
+    status_t setBluetoothLatencyModesEnabled(bool enabled) override;
+    status_t supportsBluetoothLatencyModes(bool* support) override;
+    status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                   sp<media::ISoundDose>* soundDose) override;
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -526,7 +541,6 @@
             RESTORE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_restoreOutput,
             OPEN_INPUT = media::BnAudioFlingerService::TRANSACTION_openInput,
             CLOSE_INPUT = media::BnAudioFlingerService::TRANSACTION_closeInput,
-            INVALIDATE_STREAM = media::BnAudioFlingerService::TRANSACTION_invalidateStream,
             SET_VOICE_VOLUME = media::BnAudioFlingerService::TRANSACTION_setVoiceVolume,
             GET_RENDER_POSITION = media::BnAudioFlingerService::TRANSACTION_getRenderPosition,
             GET_INPUT_FRAMES_LOST = media::BnAudioFlingerService::TRANSACTION_getInputFramesLost,
@@ -564,6 +578,12 @@
             SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
             SET_REQUESTED_LATENCY_MODE = media::BnAudioFlingerService::TRANSACTION_setRequestedLatencyMode,
             GET_SUPPORTED_LATENCY_MODES = media::BnAudioFlingerService::TRANSACTION_getSupportedLatencyModes,
+            SET_BLUETOOTH_LATENCY_MODES_ENABLED =
+                    media::BnAudioFlingerService::TRANSACTION_setBluetoothLatencyModesEnabled,
+            SUPPORTS_BLUETOOTH_LATENCY_MODES =
+                    media::BnAudioFlingerService::TRANSACTION_supportsBluetoothLatencyModes,
+            GET_SOUND_DOSE_INTERFACE = media::BnAudioFlingerService::TRANSACTION_getSoundDoseInterface,
+            INVALIDATE_TRACKS = media::BnAudioFlingerService::TRANSACTION_invalidateTracks,
         };
 
     protected:
@@ -643,7 +663,6 @@
     Status openInput(const media::OpenInputRequest& request,
                      media::OpenInputResponse* _aidl_return) override;
     Status closeInput(int32_t input) override;
-    Status invalidateStream(media::audio::common::AudioStreamType stream) override;
     Status setVoiceVolume(float volume) override;
     Status getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) override;
     Status getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) override;
@@ -664,12 +683,12 @@
     Status getPrimaryOutputSamplingRate(int32_t* _aidl_return) override;
     Status getPrimaryOutputFrameCount(int64_t* _aidl_return) override;
     Status setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
-    Status getAudioPort(const media::AudioPort& port, media::AudioPort* _aidl_return) override;
-    Status createAudioPatch(const media::AudioPatch& patch, int32_t* _aidl_return) override;
+    Status getAudioPort(const media::AudioPortFw& port, media::AudioPortFw* _aidl_return) override;
+    Status createAudioPatch(const media::AudioPatchFw& patch, int32_t* _aidl_return) override;
     Status releaseAudioPatch(int32_t handle) override;
     Status listAudioPatches(int32_t maxCount,
-                            std::vector<media::AudioPatch>* _aidl_return) override;
-    Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+                            std::vector<media::AudioPatchFw>* _aidl_return) override;
+    Status setAudioPortConfig(const media::AudioPortConfigFw& config) override;
     Status getAudioHwSyncForSession(int32_t sessionId, int32_t* _aidl_return) override;
     Status systemReady() override;
     Status audioPolicyReady() override;
@@ -684,10 +703,15 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *_aidl_return) override;
     Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
     Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
-    Status setDeviceConnectedState(const media::AudioPort& port, bool connected) override;
+    Status setDeviceConnectedState(const media::AudioPortFw& port, bool connected) override;
     Status setRequestedLatencyMode(int output, media::LatencyMode mode) override;
     Status getSupportedLatencyModes(int output,
             std::vector<media::LatencyMode>* _aidl_return) override;
+    Status setBluetoothLatencyModesEnabled(bool enabled) override;
+    Status supportsBluetoothLatencyModes(bool* support) override;
+    Status getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                 sp<media::ISoundDose>* _aidl_return) override;
+    Status invalidateTracks(const std::vector<int32_t>& portIds) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index 23b6bfd..5475f76 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -53,6 +53,10 @@
 
             void baseUpdateDeviceId(audio_port_handle_t deviceId);
 
+            /**
+             * Updates the mapping in the AudioService between portId and piid
+             */
+            void triggerPortIdUpdate(audio_port_handle_t portId) const;
 protected:
 
             void init(player_type_t playerType, audio_usage_t usage, audio_session_t sessionId);
@@ -74,7 +78,6 @@
     // player interface ID, uniquely identifies the player in the system
     // effectively const after PlayerBase::init().
     audio_unique_id_t mPIId;
-
 private:
             // report events to AudioService
             void servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId);
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index 54e778e..ed9ddd6 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -22,6 +22,8 @@
 #include <system/audio.h>
 
 #include <android/media/AudioMix.h>
+#include <android/media/AudioMixerAttributesInternal.h>
+#include <android/media/AudioMixerBehavior.h>
 #include <android/media/AudioMixCallbackFlag.h>
 #include <android/media/AudioMixRouteFlag.h>
 #include <android/media/AudioMixType.h>
@@ -102,4 +104,16 @@
 ConversionResult<media::AudioOffloadMode>
 legacy2aidl_audio_offload_mode_t_AudioOffloadMode(audio_offload_mode_t legacy);
 
+ConversionResult<audio_mixer_behavior_t>
+aidl2legacy_AudioMixerBehavior_audio_mixer_behavior_t(media::AudioMixerBehavior aidl);
+ConversionResult<media::AudioMixerBehavior>
+legacy2aidl_audio_mixer_behavior_t_AudioMixerBehavior(audio_mixer_behavior_t legacy);
+
+ConversionResult<audio_mixer_attributes_t>
+aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(
+        const media::AudioMixerAttributesInternal& aidl);
+ConversionResult<media::AudioMixerAttributesInternal>
+legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(
+        const audio_mixer_attributes_t& legacy);
+
 }  // namespace android
diff --git a/media/libaudioclient/include/media/AudioAttributes.h b/media/libaudioclient/include/media/VolumeGroupAttributes.h
similarity index 74%
rename from media/libaudioclient/include/media/AudioAttributes.h
rename to media/libaudioclient/include/media/VolumeGroupAttributes.h
index 24bd179..0859995 100644
--- a/media/libaudioclient/include/media/AudioAttributes.h
+++ b/media/libaudioclient/include/media/VolumeGroupAttributes.h
@@ -26,15 +26,20 @@
 
 namespace android {
 
-class AudioAttributes : public Parcelable
+class VolumeGroupAttributes : public Parcelable
 {
 public:
-    AudioAttributes() = default;
-    AudioAttributes(const audio_attributes_t &attributes) : mAttributes(attributes) {} // NOLINT
-    AudioAttributes(volume_group_t groupId,
+    VolumeGroupAttributes() = default;
+    VolumeGroupAttributes(const audio_attributes_t &attributes)
+        : mAttributes(attributes) {} // NOLINT
+    VolumeGroupAttributes(volume_group_t groupId,
                     audio_stream_type_t stream,
                     const audio_attributes_t &attributes) :
-         mAttributes(attributes), mStreamType(stream), mGroupId(groupId) {}
+         mAttributes(attributes), mStreamType(stream), mGroupId(groupId) {
+        // TODO: align native & JAVA source initializer.
+        // As far as this class concerns attributes for volume group, it applies only to playback.
+        mAttributes.source = AUDIO_SOURCE_INVALID;
+    }
 
     audio_attributes_t getAttributes() const { return mAttributes; }
 
@@ -61,8 +66,8 @@
 
 // AIDL conversion routines.
 ConversionResult<media::AudioAttributesEx>
-legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy);
-ConversionResult<AudioAttributes>
-aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl);
+legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy);
+ConversionResult<VolumeGroupAttributes>
+aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(const media::AudioAttributesEx& aidl);
 
 } // namespace android
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
index ef8500b..d1e3d16 100644
--- a/media/libaudioclient/tests/audioclient_serialization_tests.cpp
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -66,16 +66,16 @@
                  decltype(audio_stream_type_from_string)>(xsdc_enum_range<xsd::AudioStreamType>{},
                                                           audio_stream_type_from_string);
 
-static const std::vector<uint32_t> kMixMatchRules = {
-        RULE_MATCH_ATTRIBUTE_USAGE,
-        RULE_EXCLUDE_ATTRIBUTE_USAGE,
-        RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET,
-        RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET,
-        RULE_MATCH_UID,
-        RULE_EXCLUDE_UID,
-        RULE_MATCH_USERID,
-        RULE_EXCLUDE_USERID,
-};
+static const std::vector<uint32_t> kMixMatchRules = {RULE_MATCH_ATTRIBUTE_USAGE,
+                                                     RULE_EXCLUDE_ATTRIBUTE_USAGE,
+                                                     RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET,
+                                                     RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET,
+                                                     RULE_MATCH_UID,
+                                                     RULE_EXCLUDE_UID,
+                                                     RULE_MATCH_USERID,
+                                                     RULE_EXCLUDE_USERID,
+                                                     RULE_MATCH_AUDIO_SESSION_ID,
+                                                     RULE_EXCLUDE_AUDIO_SESSION_ID};
 
 // Generates a random string.
 std::string CreateRandomString(size_t n) {
@@ -119,16 +119,17 @@
 TEST_F(SerializationTest, AudioProductStrategyBinderization) {
     for (int j = 0; j < 512; j++) {
         const std::string name{"Test APSBinderization for seed::" + std::to_string(mSeed)};
-        std::vector<AudioAttributes> audioattributesvector;
+        std::vector<VolumeGroupAttributes> volumeGroupAttrVector;
         for (auto i = 0; i < 16; i++) {
             audio_attributes_t attributes;
             fillAudioAttributes(attributes);
-            AudioAttributes audioattributes{static_cast<volume_group_t>(rand()),
-                                            kStreamtypes[rand() % kStreamtypes.size()], attributes};
-            audioattributesvector.push_back(audioattributes);
+            VolumeGroupAttributes volumeGroupAttr{static_cast<volume_group_t>(rand()),
+                                                  kStreamtypes[rand() % kStreamtypes.size()],
+                                                  attributes};
+            volumeGroupAttrVector.push_back(volumeGroupAttr);
         }
         product_strategy_t psId = static_cast<product_strategy_t>(rand());
-        AudioProductStrategy aps{name, audioattributesvector, psId};
+        AudioProductStrategy aps{name, volumeGroupAttrVector, psId};
 
         Parcel p;
         EXPECT_EQ(NO_ERROR, aps.writeToParcel(&p)) << name;
@@ -138,12 +139,12 @@
         EXPECT_EQ(NO_ERROR, apsCopy.readFromParcel(&p)) << name;
         EXPECT_EQ(apsCopy.getName(), name) << name;
         EXPECT_EQ(apsCopy.getId(), psId) << name;
-        auto avec = apsCopy.getAudioAttributes();
-        EXPECT_EQ(avec.size(), audioattributesvector.size()) << name;
-        for (int i = 0; i < audioattributesvector.size(); i++) {
-            EXPECT_EQ(avec[i].getGroupId(), audioattributesvector[i].getGroupId()) << name;
-            EXPECT_EQ(avec[i].getStreamType(), audioattributesvector[i].getStreamType()) << name;
-            EXPECT_TRUE(avec[i].getAttributes() == audioattributesvector[i].getAttributes())
+        auto avec = apsCopy.getVolumeGroupAttributes();
+        EXPECT_EQ(avec.size(), volumeGroupAttrVector.size()) << name;
+        for (int i = 0; i < volumeGroupAttrVector.size(); i++) {
+            EXPECT_EQ(avec[i].getGroupId(), volumeGroupAttrVector[i].getGroupId()) << name;
+            EXPECT_EQ(avec[i].getStreamType(), volumeGroupAttrVector[i].getStreamType()) << name;
+            EXPECT_TRUE(avec[i].getAttributes() == volumeGroupAttrVector[i].getAttributes())
                     << name;
         }
     }
@@ -293,17 +294,17 @@
     audio_stream_type_t stream = mAudioStream;
     audio_attributes_t attributes;
     fillAudioAttributes(attributes);
-    AudioAttributes audioattributes{groupId, stream, attributes};
+    VolumeGroupAttributes volumeGroupAttr{groupId, stream, attributes};
 
     Parcel p;
-    EXPECT_EQ(NO_ERROR, audioattributes.writeToParcel(&p)) << msg;
+    EXPECT_EQ(NO_ERROR, volumeGroupAttr.writeToParcel(&p)) << msg;
 
-    AudioAttributes audioattributesCopy;
+    VolumeGroupAttributes volumeGroupAttrCopy;
     p.setDataPosition(0);
-    EXPECT_EQ(NO_ERROR, audioattributesCopy.readFromParcel(&p)) << msg;
-    EXPECT_EQ(audioattributesCopy.getGroupId(), audioattributes.getGroupId()) << msg;
-    EXPECT_EQ(audioattributesCopy.getStreamType(), audioattributes.getStreamType()) << msg;
-    EXPECT_TRUE(audioattributesCopy.getAttributes() == attributes) << msg;
+    EXPECT_EQ(NO_ERROR, volumeGroupAttrCopy.readFromParcel(&p)) << msg;
+    EXPECT_EQ(volumeGroupAttrCopy.getGroupId(), volumeGroupAttr.getGroupId()) << msg;
+    EXPECT_EQ(volumeGroupAttrCopy.getStreamType(), volumeGroupAttr.getStreamType()) << msg;
+    EXPECT_TRUE(volumeGroupAttrCopy.getAttributes() == attributes) << msg;
 }
 
 // audioStream
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index aed847c..3dd2c95 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -332,7 +332,7 @@
 
 bool isPublicStrategy(const AudioProductStrategy& strategy) {
     bool result = true;
-    for (auto& attribute : strategy.getAudioAttributes()) {
+    for (auto& attribute : strategy.getVolumeGroupAttributes()) {
         if (attribute.getAttributes() == AUDIO_ATTRIBUTES_INITIALIZER &&
             (uint32_t(attribute.getStreamType()) >= AUDIO_STREAM_PUBLIC_CNT)) {
             result = false;
@@ -371,7 +371,7 @@
     for (const auto& strategy : strategies) {
         if (!isPublicStrategy(strategy)) continue;
 
-        for (const auto& att : strategy.getAudioAttributes()) {
+        for (const auto& att : strategy.getVolumeGroupAttributes()) {
             if (strategy.attributesMatches(att.getAttributes(), attributes)) {
                 hasStrategyForMedia = true;
                 mediaStrategy = strategy;
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 4513323..6e05abc 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -222,7 +222,7 @@
            mExtraAudioDescriptors == other->getExtraAudioDescriptors();
 }
 
-status_t AudioPort::writeToParcelable(media::AudioPort* parcelable) const {
+status_t AudioPort::writeToParcelable(media::AudioPortFw* parcelable) const {
     parcelable->hal.name = mName;
     parcelable->sys.type = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_type_t_AudioPortType(mType));
@@ -249,7 +249,7 @@
     return OK;
 }
 
-status_t AudioPort::readFromParcelable(const media::AudioPort& parcelable) {
+status_t AudioPort::readFromParcelable(const media::AudioPortFw& parcelable) {
     mName = parcelable.hal.name;
     mType = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioPortType_audio_port_type_t(parcelable.sys.type));
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 5ffbffc..c499513 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -126,7 +126,14 @@
                     "%*sEncapsulation modes: %u, metadata types: %u\n", spaces, "",
                     mEncapsulationModes, mEncapsulationMetadataTypes));
 
-    AudioPort::dump(dst, spaces, nullptr, verbose);
+    std::string portStr;
+    AudioPort::dump(&portStr, spaces, nullptr, verbose);
+    if (!portStr.empty()) {
+        if (!mName.empty()) {
+            dst->append(base::StringPrintf("%*s", spaces, ""));
+        }
+        dst->append(portStr);
+    }
 }
 
 std::string DeviceDescriptorBase::toString(bool includeSensitiveInfo) const
@@ -174,7 +181,7 @@
     return false;
 }
 
-status_t DeviceDescriptorBase::writeToParcelable(media::AudioPort* parcelable) const {
+status_t DeviceDescriptorBase::writeToParcelable(media::AudioPortFw* parcelable) const {
     AudioPort::writeToParcelable(parcelable);
     AudioPortConfig::writeToParcelable(&parcelable->sys.activeConfig.hal, useInputChannelMask());
     parcelable->hal.id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
@@ -196,7 +203,7 @@
     return OK;
 }
 
-status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPort& parcelable) {
+status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPortFw& parcelable) {
     if (parcelable.sys.type != media::AudioPortType::DEVICE) {
         return BAD_VALUE;
     }
@@ -245,7 +252,7 @@
 }
 
 ConversionResult<sp<DeviceDescriptorBase>>
-aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl) {
+aidl2legacy_DeviceDescriptorBase(const media::AudioPortFw& aidl) {
     sp<DeviceDescriptorBase> result = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
     status_t status = result->readFromParcelable(aidl);
     if (status != OK) {
@@ -254,9 +261,9 @@
     return result;
 }
 
-ConversionResult<media::AudioPort>
+ConversionResult<media::AudioPortFw>
 legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy) {
-    media::AudioPort aidl;
+    media::AudioPortFw aidl;
     status_t status = legacy->writeToParcelable(&aidl);
     if (status != OK) {
         return base::unexpected(status);
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index efe8437..a4e271e 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -7,7 +7,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 6c01e29..2a14504 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -31,6 +31,7 @@
 using DeviceTypeSet = std::set<audio_devices_t>;
 using FormatSet = std::set<audio_format_t>;
 using SampleRateSet = std::set<uint32_t>;
+using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
 
 using FormatVector = std::vector<audio_format_t>;
 
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index b1235f5..77e58ed 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -19,8 +19,8 @@
 #include <string>
 #include <type_traits>
 
-#include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortFw.h>
+#include <android/media/AudioPortConfigFw.h>
 #include <android/media/audio/common/ExtraAudioDescriptor.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
@@ -118,8 +118,8 @@
 
     bool equals(const sp<AudioPort>& other) const;
 
-    status_t writeToParcelable(media::AudioPort* parcelable) const;
-    status_t readFromParcelable(const media::AudioPort& parcelable);
+    status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+    status_t readFromParcelable(const media::AudioPortFw& parcelable);
 
     AudioGains mGains; // gain controllers
     // Maximum number of input or output streams that can be simultaneously
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index dc2899a..501831d 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -18,7 +18,7 @@
 
 #include <vector>
 
-#include <android/media/AudioPort.h>
+#include <android/media/AudioPortFw.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <media/AudioContainers.h>
@@ -79,8 +79,8 @@
 
     bool equals(const sp<DeviceDescriptorBase>& other) const;
 
-    status_t writeToParcelable(media::AudioPort* parcelable) const;
-    status_t readFromParcelable(const media::AudioPort& parcelable);
+    status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+    status_t readFromParcelable(const media::AudioPortFw& parcelable);
 
 protected:
     AudioDeviceTypeAddr mDeviceTypeAddr;
@@ -116,8 +116,8 @@
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<sp<DeviceDescriptorBase>>
-aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl);
-ConversionResult<media::AudioPort>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPortFw& aidl);
+ConversionResult<media::AudioPortFw>
 legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy);
 
 } // namespace android
diff --git a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
index 50d8dc8..e315858 100644
--- a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
+++ b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
@@ -117,7 +117,7 @@
     audioPort->setGains(getAudioGainsForTest());
     audioPort->setAudioProfiles(getAudioProfileVectorForTest());
 
-    media::AudioPort parcelable;
+    media::AudioPortFw parcelable;
     ASSERT_EQ(NO_ERROR, audioPort->writeToParcelable(&parcelable));
     sp<AudioPort> audioPortFromParcel = new AudioPort(
             "", AUDIO_PORT_TYPE_NONE, AUDIO_PORT_ROLE_NONE);
@@ -152,7 +152,7 @@
     ASSERT_EQ(desc->setEncapsulationMetadataTypes(
             AUDIO_ENCAPSULATION_METADATA_TYPE_ALL_POSITION_BITS), NO_ERROR);
 
-    media::AudioPort parcelable;
+    media::AudioPortFw parcelable;
     ASSERT_EQ(NO_ERROR, desc->writeToParcelable(&parcelable));
     sp<DeviceDescriptorBase> descFromParcel = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
     ASSERT_EQ(NO_ERROR, descFromParcel->readFromParcelable(parcelable));
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/media/libaudiohal/TEST_MAPPING
+++ b/media/libaudiohal/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index f6519b6..727e92f 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -132,6 +132,11 @@
         return INVALID_OPERATION;
     }
 
+    int32_t supportsBluetoothLatencyModes(bool* supports __unused) override {
+        // TODO: Implement the HAL query when moving to AIDL HAL.
+        return INVALID_OPERATION;
+    }
+
     status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
     error::Result<audio_hw_sync_t> getHwAvSync() override;
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index d27ad4c..6379521 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -128,6 +128,7 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos)  = 0;
     virtual int32_t getAAudioMixerBurstCount() = 0;
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
+    virtual int32_t supportsBluetoothLatencyModes(bool* supports) = 0;
 
     // Update the connection status of an external device.
     virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index e6fdb1d..6a39108 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -116,6 +116,9 @@
         track->mKeepContractedChannels = false;
     }
 
+    track->mInputFrameSize = audio_bytes_per_frame(
+            track->channelCount + track->mHapticChannelCount, track->mFormat);
+
     // channel masks have changed, does this track need a downmixer?
     // update to try using our desired format (if we aren't already using it)
     const status_t status = track->prepareForDownmix();
@@ -297,6 +300,26 @@
     return NO_ERROR;
 }
 
+void AudioMixer::Track::unprepareForTee() {
+    ALOGV("AudioMixer::%s", __func__);
+    if (mTeeBufferProvider.get() != nullptr) {
+        mTeeBufferProvider.reset(nullptr);
+        reconfigureBufferProviders();
+    }
+}
+
+status_t AudioMixer::Track::prepareForTee() {
+    ALOGV("AudioMixer::%s(%p) teeBuffer=%p", __func__, this, teeBuffer);
+    unprepareForTee();
+    if (teeBuffer != nullptr) {
+        mTeeBufferProvider.reset(new TeeBufferProvider(
+                mInputFrameSize, mInputFrameSize, kCopyBufferFrameCount,
+                (uint8_t*)teeBuffer, mTeeBufferFrameCount));
+        reconfigureBufferProviders();
+    }
+    return NO_ERROR;
+}
+
 void AudioMixer::Track::clearContractedBuffer()
 {
     if (mAdjustChannelsBufferProvider.get() != nullptr) {
@@ -305,10 +328,20 @@
     }
 }
 
+void AudioMixer::Track::clearTeeFrameCopied() {
+    if (mTeeBufferProvider.get() != nullptr) {
+        static_cast<TeeBufferProvider*>(mTeeBufferProvider.get())->clearFramesCopied();
+    }
+}
+
 void AudioMixer::Track::reconfigureBufferProviders()
 {
     // configure from upstream to downstream buffer providers.
     bufferProvider = mInputBufferProvider;
+    if (mTeeBufferProvider != nullptr) {
+        mTeeBufferProvider->setBufferProvider(bufferProvider);
+        bufferProvider = mTeeBufferProvider.get();
+    }
     if (mAdjustChannelsBufferProvider.get() != nullptr) {
         mAdjustChannelsBufferProvider->setBufferProvider(bufferProvider);
         bufferProvider = mAdjustChannelsBufferProvider.get();
@@ -420,6 +453,20 @@
                 track->mHapticMaxAmplitude = hapticMaxAmplitude;
             }
             } break;
+        case TEE_BUFFER:
+            if (track->teeBuffer != valueBuf) {
+                track->teeBuffer = valueBuf;
+                ALOGV("setParameter(TRACK, TEE_BUFFER, %p)", valueBuf);
+                track->prepareForTee();
+            }
+            break;
+        case TEE_BUFFER_FRAME_COUNT:
+            if (track->mTeeBufferFrameCount != valueInt) {
+                track->mTeeBufferFrameCount = valueInt;
+                ALOGV("setParameter(TRACK, TEE_BUFFER_FRAME_COUNT, %i)", valueInt);
+                track->prepareForTee();
+            }
+            break;
         default:
             LOG_ALWAYS_FATAL("setParameter track: bad param %d", param);
         }
@@ -500,6 +547,8 @@
         track->mReformatBufferProvider->reset();
     } else if (track->mAdjustChannelsBufferProvider.get() != nullptr) {
         track->mAdjustChannelsBufferProvider->reset();
+    } else if (track->mTeeBufferProvider.get() != nullptr) {
+        track->mTeeBufferProvider->reset();
     }
 
     track->mInputBufferProvider = bufferProvider;
@@ -543,6 +592,8 @@
     t->mAdjustInChannelCount = t->channelCount + t->mHapticChannelCount;
     t->mAdjustOutChannelCount = t->channelCount;
     t->mKeepContractedChannels = false;
+    t->mInputFrameSize = audio_bytes_per_frame(
+            t->channelCount + t->mHapticChannelCount, t->mFormat);
     // Check the downmixing (or upmixing) requirements.
     status_t status = t->prepareForDownmix();
     if (status != OK) {
@@ -565,6 +616,7 @@
         if (t->mKeepContractedChannels) {
             t->clearContractedBuffer();
         }
+        t->clearTeeFrameCopied();
     }
 }
 
@@ -593,6 +645,10 @@
                 }
                 break;
             }
+            if (t->teeBuffer != nullptr && t->volumeRL == 0) {
+                // Need to mute tee
+                memset(t->teeBuffer, 0, t->mTeeBufferFrameCount * t->mInputFrameSize);
+            }
         }
     }
 }
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index f30eb54..427bd55 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -143,6 +143,7 @@
         // setParameter(name, TRACK, MAIN_BUFFER, mixBuffer) is required before enable(name)
         t->mainBuffer = NULL;
         t->auxBuffer = NULL;
+        t->teeBuffer = nullptr;
         t->mMixerFormat = AUDIO_FORMAT_PCM_16_BIT;
         t->mFormat = format;
         t->mMixerInFormat = kUseFloat && kUseNewMixer ?
@@ -150,6 +151,8 @@
         t->mMixerChannelMask = audio_channel_mask_from_representation_and_bits(
                 AUDIO_CHANNEL_REPRESENTATION_POSITION, AUDIO_CHANNEL_OUT_STEREO);
         t->mMixerChannelCount = audio_channel_count_from_out_mask(t->mMixerChannelMask);
+        t->mTeeBufferFrameCount = 0;
+        t->mInputFrameSize = audio_bytes_per_frame(t->channelCount, t->mFormat);
         status_t status = postCreateTrack(t.get());
         if (status != OK) return status;
         mTracks[name] = t;
@@ -176,6 +179,7 @@
     track->channelCount = trackChannelCount;
     track->mMixerChannelMask = mixerChannelMask;
     track->mMixerChannelCount = mixerChannelCount;
+    track->mInputFrameSize = audio_bytes_per_frame(track->channelCount, track->mFormat);
 
     // Resampler channels may have changed.
     track->recreateResampler(mSampleRate);
@@ -401,6 +405,20 @@
                 invalidate();
             }
             } break;
+        case TEE_BUFFER:
+            if (track->teeBuffer != valueBuf) {
+                track->teeBuffer = valueBuf;
+                ALOGV("setParameter(TRACK, TEE_BUFFER, %p)", valueBuf);
+                invalidate();
+            }
+            break;
+        case TEE_BUFFER_FRAME_COUNT:
+            if (track->mTeeBufferFrameCount != valueInt) {
+                track->mTeeBufferFrameCount = valueInt;
+                ALOGV("setParameter(TRACK, TEE_BUFFER_FRAME_COUNT, %i)", valueInt);
+                invalidate();
+            }
+            break;
         default:
             LOG_ALWAYS_FATAL("setParameter track: bad param %d", param);
         }
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index 4658db8..a9944fb 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -739,5 +739,21 @@
     mContractedWrittenFrames = 0;
     CopyBufferProvider::reset();
 }
+
+void TeeBufferProvider::copyFrames(void *dst, const void *src, size_t frames) {
+    memcpy(dst, src, frames * mInputFrameSize);
+    if (int teeBufferFrameLeft = mTeeBufferFrameCount - mFrameCopied; teeBufferFrameLeft < frames) {
+        ALOGW("Unable to copy all frames to tee buffer, %d frames dropped",
+              (int)frames - teeBufferFrameLeft);
+        frames = teeBufferFrameLeft;
+    }
+    memcpy(mTeeBuffer + mFrameCopied * mInputFrameSize, src, frames * mInputFrameSize);
+    mFrameCopied += frames;
+}
+
+void TeeBufferProvider::clearFramesCopied() {
+    mFrameCopied = 0;
+}
+
 // ----------------------------------------------------------------------------
 } // namespace android
diff --git a/media/libaudioprocessing/TEST_MAPPING b/media/libaudioprocessing/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/media/libaudioprocessing/TEST_MAPPING
+++ b/media/libaudioprocessing/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index 2993a60..b39fb92 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -96,7 +96,10 @@
         void        unprepareForReformat();
         status_t    prepareForAdjustChannels(size_t frames);
         void        unprepareForAdjustChannels();
+        void        unprepareForTee();
+        status_t    prepareForTee();
         void        clearContractedBuffer();
+        void        clearTeeFrameCopied();
         bool        setPlaybackRate(const AudioPlaybackRate &playbackRate);
         void        reconfigureBufferProviders();
 
@@ -108,20 +111,22 @@
          * all pre-mixer track buffer conversions outside the AudioMixer class.
          *
          * 1) mInputBufferProvider: The AudioTrack buffer provider.
-         * 2) mAdjustChannelsBufferProvider: Expands or contracts sample data from one interleaved
+         * 2) mTeeBufferProvider: If not NULL, copy the data to tee buffer.
+         * 3) mAdjustChannelsBufferProvider: Expands or contracts sample data from one interleaved
          *    channel format to another. Expanded channels are filled with zeros and put at the end
          *    of each audio frame. Contracted channels are copied to the end of the buffer.
-         * 3) mReformatBufferProvider: If not NULL, performs the audio reformat to
+         * 4) mReformatBufferProvider: If not NULL, performs the audio reformat to
          *    match either mMixerInFormat or mDownmixRequiresFormat, if the downmixer
          *    requires reformat. For example, it may convert floating point input to
          *    PCM_16_bit if that's required by the downmixer.
-         * 4) mDownmixerBufferProvider: If not NULL, performs the channel remixing to match
+         * 5) mDownmixerBufferProvider: If not NULL, performs the channel remixing to match
          *    the number of channels required by the mixer sink.
-         * 5) mPostDownmixReformatBufferProvider: If not NULL, performs reformatting from
+         * 6) mPostDownmixReformatBufferProvider: If not NULL, performs reformatting from
          *    the downmixer requirements to the mixer engine input requirements.
-         * 6) mTimestretchBufferProvider: Adds timestretching for playback rate
+         * 7) mTimestretchBufferProvider: Adds timestretching for playback rate
          */
         AudioBufferProvider* mInputBufferProvider;    // externally provided buffer provider.
+        std::unique_ptr<PassthruBufferProvider> mTeeBufferProvider;
         std::unique_ptr<PassthruBufferProvider> mAdjustChannelsBufferProvider;
         std::unique_ptr<PassthruBufferProvider> mReformatBufferProvider;
         std::unique_ptr<PassthruBufferProvider> mDownmixerBufferProvider;
diff --git a/media/libaudioprocessing/include/media/AudioMixerBase.h b/media/libaudioprocessing/include/media/AudioMixerBase.h
index 3419816..4bd85d8 100644
--- a/media/libaudioprocessing/include/media/AudioMixerBase.h
+++ b/media/libaudioprocessing/include/media/AudioMixerBase.h
@@ -68,6 +68,10 @@
         // 0x4004 reserved
         MIXER_FORMAT    = 0x4005, // AUDIO_FORMAT_PCM_(FLOAT|16_BIT)
         MIXER_CHANNEL_MASK = 0x4006, // Channel mask for mixer output
+        // 0x4007, 0x4008, 0x4009 is defined for haptic stuff in AudioMixer.h
+        TEE_BUFFER = 0x400A,
+        TEE_BUFFER_FORMAT = 0x400B,
+        TEE_BUFFER_FRAME_COUNT = 0x400C,
         // for target RESAMPLE
         SAMPLE_RATE     = 0x4100, // Configure sample rate conversion on this track name;
                                   // parameter 'value' is the new sample rate in Hz.
@@ -271,6 +275,7 @@
         uint32_t    sampleRate;
         int32_t*    mainBuffer;
         int32_t*    auxBuffer;
+        int32_t*    teeBuffer;
 
         int32_t     sessionId;
 
@@ -290,6 +295,10 @@
         audio_channel_mask_t mMixerChannelMask;
         uint32_t             mMixerChannelCount;
 
+        int32_t        mTeeBufferFrameCount;
+
+        uint32_t       mInputFrameSize; // The track input frame size, used for tee buffer
+
       protected:
 
         // hooks
diff --git a/media/libaudioprocessing/include/media/BufferProviders.h b/media/libaudioprocessing/include/media/BufferProviders.h
index b3ab8a5..7a41002 100644
--- a/media/libaudioprocessing/include/media/BufferProviders.h
+++ b/media/libaudioprocessing/include/media/BufferProviders.h
@@ -279,6 +279,27 @@
     size_t               mContractedWrittenFrames;
     size_t               mContractedOutputFrameSize; // contracted output frame size
 };
+
+class TeeBufferProvider : public CopyBufferProvider {
+public:
+    TeeBufferProvider(
+            size_t inputFrameSize, size_t outputFrameSize,
+            size_t bufferFrameCount, uint8_t* teeBuffer, int teeBufferFrameCount)
+            : CopyBufferProvider(inputFrameSize, outputFrameSize, bufferFrameCount),
+              mTeeBuffer(teeBuffer), mTeeBufferFrameCount(teeBufferFrameCount),
+              mFrameCopied(0) {};
+
+    void copyFrames(void *dst, const void *src, size_t frames) override;
+
+    void clearFramesCopied();
+
+protected:
+    AudioBufferProvider *mTrackBufferProvider;
+    uint8_t* mTeeBuffer;
+    const int mTeeBufferFrameCount;
+    int mFrameCopied;
+};
+
 // ----------------------------------------------------------------------------
 } // namespace android
 
diff --git a/media/libaudiousecasevalidation/Android.bp b/media/libaudiousecasevalidation/Android.bp
new file mode 100644
index 0000000..3ee7e32
--- /dev/null
+++ b/media/libaudiousecasevalidation/Android.bp
@@ -0,0 +1,49 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libaudiousecasevalidation",
+    host_supported: true,
+    srcs: [
+        "UsecaseLookup.cpp",
+        "UsecaseValidator.cpp",
+    ],
+    header_libs: [
+        "liberror_headers",
+    ],
+    shared_libs: [
+        "framework-permission-aidl-cpp",
+        "libaudioutils",
+        "libbase",
+        "liblog",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_test_host {
+    name: "libaudiousecasevalidation-test",
+    srcs: [
+        "tests/UsecaseValidator-test.cpp",
+    ],
+    header_libs: [
+        "liberror_headers",
+    ],
+    shared_libs: [
+        "framework-permission-aidl-cpp",
+        "libaudiousecasevalidation",
+        "libutils",
+    ],
+}
diff --git a/media/libaudiousecasevalidation/UsecaseLookup.cpp b/media/libaudiousecasevalidation/UsecaseLookup.cpp
new file mode 100644
index 0000000..01e667f
--- /dev/null
+++ b/media/libaudiousecasevalidation/UsecaseLookup.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "UsecaseLookup"
+// #define LOG_NDEBUG 0
+
+#include "media/UsecaseLookup.h"
+
+#include <utils/Log.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Add streamId and outputFlags to stream list.
+ */
+void UsecaseLookup::addStream(STREAMID streamId, bool outputFlagGame) {
+    ALOGV("%s streamId: %d outputFlagGame: %d", __func__, streamId, outputFlagGame);
+
+    mutex_lock lock(m_mutex);
+    m_streams[streamId] = outputFlagGame;
+}
+
+/**
+ * Remove streamId from stream list.
+ */
+void UsecaseLookup::removeStream(STREAMID streamId) {
+    ALOGV("%s streamId: %d ", __func__, streamId);
+
+    mutex_lock lock(m_mutex);
+    m_streams.erase(streamId);
+
+    // Shouldn't happen but it might.
+    for (auto it = m_tracks.begin(); it != m_tracks.end();) {
+        if (it->second == streamId) {
+            it = m_tracks.erase(it);
+        } else {
+            it++;
+        }
+    }
+}
+
+/**
+ * Add streamId and portId to track list.
+ */
+void UsecaseLookup::addTrack(STREAMID streamId, PORTID portId) {
+    ALOGV("%s streamId: %d portId: %d", __func__, streamId, portId);
+
+    mutex_lock lock(m_mutex);
+
+    if (m_tracks.find(portId) == m_tracks.end()) {
+        m_tracks[portId] = streamId;
+    }
+}
+
+/**
+ * Remove streamId and portId from track list.
+ */
+void UsecaseLookup::removeTrack(STREAMID streamId, PORTID portId) {
+    ALOGV("%s streamId: %d portId: %d", __func__, streamId, portId);
+
+    mutex_lock lock(m_mutex);
+    auto it = m_tracks.find(portId);
+
+    if (it != m_tracks.end() && it->second == streamId) {
+        m_tracks.erase(portId);
+    }
+}
+
+/**
+ * Check if stream list contains streamId with Game outputFlag.
+ */
+bool UsecaseLookup::isGameStream(STREAMID streamId) {
+    ALOGV("%s streamId: %d ", __func__, streamId);
+    mutex_lock lock(m_mutex);
+    auto it = m_streams.find(streamId);
+
+    return (it != m_streams.end()) ? it->second : false;
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libaudiousecasevalidation/UsecaseValidator.cpp b/media/libaudiousecasevalidation/UsecaseValidator.cpp
new file mode 100644
index 0000000..0e5a824
--- /dev/null
+++ b/media/libaudiousecasevalidation/UsecaseValidator.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "UsecaseValidator"
+// #define LOG_NDEBUG 0
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "media/UsecaseValidator.h"
+#include "media/UsecaseLookup.h"
+
+namespace android {
+namespace media {
+namespace {
+
+class UsecaseValidatorImpl : public UsecaseValidator {
+ public:
+    UsecaseValidatorImpl() {}
+
+    /**
+     * Register a new mixer/stream.
+     * Called when the stream is opened at the HAL and communicates
+     * immutable stream attributes like flags, sampling rate, format.
+     */
+    status_t registerStream(audio_io_handle_t streamId,
+                            const audio_config_base_t& audioConfig __attribute__((unused)),
+                            const audio_output_flags_t outputFlags) override {
+        ALOGV("%s output: %d flags: %#x", __func__, streamId, outputFlags);
+
+        // Check if FAST or MMAP output flag has been set.
+        bool outputFlagGame = outputFlags & (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
+        m_lookup.addStream(streamId, outputFlagGame);
+        return OK;
+    };
+
+    /**
+     * Unregister a stream/mixer.
+     * Called when the stream is closed.
+     */
+    status_t unregisterStream(audio_io_handle_t streamId) override {
+        ALOGV("%s output: %d", __func__, streamId);
+
+        m_lookup.removeStream(streamId);
+        return OK;
+    };
+
+    /**
+     * Indicates that some playback activity started on the stream.
+     * Called each time an audio track starts or resumes.
+     */
+    error::Result<audio_attributes_t> startClient(audio_io_handle_t streamId,
+            audio_port_handle_t portId, const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes,
+            const AttributesChangedCallback *callback __attribute__((unused))) override {
+        ALOGV("%s output: %d portId: %d usage: %d pid: %d package: %s",
+                __func__, streamId, portId, attributes.usage, attributionSource.pid,
+                attributionSource.packageName.value_or("").c_str());
+
+        m_lookup.addTrack(streamId, portId);
+
+        return verifyAudioAttributes(streamId, attributionSource, attributes);
+    };
+
+    /**
+     * Indicates that some playback activity stopped on the stream.
+     * Called each time an audio track stops or pauses.
+     */
+    status_t stopClient(audio_io_handle_t streamId, audio_port_handle_t portId) override {
+        ALOGV("%s output: %d portId: %d", __func__, streamId, portId);
+
+        m_lookup.removeTrack(streamId, portId);
+        return OK;
+    };
+
+    /**
+     * Called to verify and update audio attributes for a track that is connected
+     * to the specified stream.
+     */
+    error::Result<audio_attributes_t> verifyAudioAttributes(audio_io_handle_t streamId,
+            const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes) override {
+        ALOGV("%s output: %d usage: %d pid: %d package: %s",
+                __func__, streamId, attributes.usage, attributionSource.pid,
+                attributionSource.packageName.value_or("").c_str());
+
+        audio_attributes_t attrRet = attributes;
+
+        // Check if attribute usage media or unknown has been set.
+        bool isUsageValid = this->isUsageValid(attributes);
+
+        if (isUsageValid && m_lookup.isGameStream(streamId)) {
+            ALOGI("%s update usage: %d to AUDIO_USAGE_GAME for output: %d pid: %d package: %s",
+                    __func__, attributes.usage, streamId, attributionSource.pid,
+                    attributionSource.packageName.value_or("").c_str());
+            // Set attribute usage Game.
+            attrRet.usage = AUDIO_USAGE_GAME;
+        }
+
+        return {attrRet};
+    };
+
+ protected:
+    /**
+     * Check if attribute usage valid.
+     */
+    bool isUsageValid(const audio_attributes_t& attr) {
+        ALOGV("isUsageValid attr.usage: %d", attr.usage);
+        switch (attr.usage) {
+            case AUDIO_USAGE_MEDIA:
+            case AUDIO_USAGE_UNKNOWN:
+                return true;
+            default:
+                break;
+        }
+        return false;
+    }
+
+ protected:
+    UsecaseLookup m_lookup;
+};
+
+}  // namespace
+
+std::unique_ptr<UsecaseValidator> createUsecaseValidator() {
+    return std::make_unique<UsecaseValidatorImpl>();
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libaudiousecasevalidation/include/media/UsecaseLookup.h b/media/libaudiousecasevalidation/include/media/UsecaseLookup.h
new file mode 100644
index 0000000..a35d88d
--- /dev/null
+++ b/media/libaudiousecasevalidation/include/media/UsecaseLookup.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASELOOKUP_H_
+#define MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASELOOKUP_H_
+
+#pragma once
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+
+namespace android {
+namespace media {
+
+typedef int STREAMID;
+typedef int PORTID;
+
+// List of streamId and outputFlag state.
+typedef std::map<STREAMID, bool> STREAMLIST;
+// List of portId and streamId.
+typedef std::map<PORTID, STREAMID> TRACKLIST;
+typedef std::lock_guard<std::mutex> mutex_lock;
+
+class UsecaseLookup {
+ public:
+    UsecaseLookup() { }
+    virtual ~UsecaseLookup() { }
+
+    // Required for testing.
+    void clear() {
+        m_streams.clear();
+        m_tracks.clear();
+    }
+
+    /**
+     * Add streamId and outputFlag to stream list.
+     */
+    void addStream(STREAMID streamId, bool outputFlagGame = false);
+
+    /**
+     * Remove streamId from stream list.
+     */
+    void removeStream(STREAMID streamId);
+
+    /**
+     * Add streamId and portId to track list.
+     */
+    void addTrack(STREAMID streamId, PORTID portId);
+
+    /**
+     * Remove streamId and portId from track list.
+     */
+    void removeTrack(STREAMID streamId, PORTID portId);
+
+    /**
+     * Check if stream list contains streamId with Game output flag.
+     */
+    bool isGameStream(STREAMID streamId);
+
+ protected:
+    STREAMLIST m_streams;
+    TRACKLIST m_tracks;
+    std::mutex m_mutex;
+};
+
+}  // namespace media
+}  // namespace android
+
+#endif  // MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASELOOKUP_H_
diff --git a/media/libaudiousecasevalidation/include/media/UsecaseValidator.h b/media/libaudiousecasevalidation/include/media/UsecaseValidator.h
new file mode 100644
index 0000000..2e1d7f4
--- /dev/null
+++ b/media/libaudiousecasevalidation/include/media/UsecaseValidator.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASEVALIDATOR_H_
+#define MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASEVALIDATOR_H_
+
+#pragma once
+
+#include <error/Result.h>
+#include <system/audio.h>
+#include <android/content/AttributionSourceState.h>
+
+#include <limits>
+#include <memory>
+
+namespace android {
+namespace media {
+
+/**
+ * Main entry-point for this library.
+ */
+class UsecaseValidator {
+ public:
+    virtual ~UsecaseValidator() = default;
+
+    /**
+     * A callback called by the module when the audio attributes for
+     * an active portId changes.
+     */
+    class AttributesChangedCallback {
+     public:
+        virtual ~AttributesChangedCallback() = default;
+        virtual void onAttributesChanged(audio_port_handle_t portId,
+                                         const audio_attributes_t& attributes) = 0;
+    };
+
+    /**
+     * Register a new mixer/stream.
+     * Called when the stream is opened at the HAL  and communicates
+     * immutable stream attributes like flags, sampling rate, format.
+     */
+    virtual status_t registerStream(audio_io_handle_t streamId,
+                                    const audio_config_base_t& audioConfig,
+                                    const audio_output_flags_t outputFlags) = 0;
+
+    /**
+     * Unregister a stream/mixer.
+     * Called when the stream is closed.
+     */
+    virtual status_t unregisterStream(audio_io_handle_t streamId) = 0;
+
+    /**
+     * Indicates that some playback activity started on the stream.
+     * Called each time an audio track starts or resumes.
+     */
+    virtual error::Result<audio_attributes_t> startClient(audio_io_handle_t streamId,
+            audio_port_handle_t portId,
+            const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes,
+            const AttributesChangedCallback *callback) = 0;
+
+    /**
+     * Indicates that some playback activity stopped on the stream.
+     * Called each time an audio track stops or pauses.
+     */
+    virtual status_t stopClient(audio_io_handle_t streamId, audio_port_handle_t portId) = 0;
+
+    /**
+     * Called to verify and update audio attributes for a track that is connected
+     * to the specified stream.
+     */
+    virtual error::Result<audio_attributes_t> verifyAudioAttributes(audio_io_handle_t streamId,
+            const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes) = 0;
+};
+
+/**
+ * Creates an instance featuring a default implementation of the UsecaseValidator interface.
+ */
+std::unique_ptr<UsecaseValidator> createUsecaseValidator();
+
+}  // namespace media
+}  // namespace android
+
+#endif  // MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASEVALIDATOR_H_
diff --git a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
new file mode 100644
index 0000000..d92c8ba
--- /dev/null
+++ b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "tests/UsecaseValidator-test.h"
+
+#include <gtest/gtest.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Helper test functions.
+ */
+
+/**
+ * Register a mock stream.
+ */
+audio_io_handle_t UsecaseValidatorTest::testRegisterStream(bool outputFlagGame) {
+    static int streamId = 0;
+    status_t result;
+    static audio_config_base_t audioConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    audio_output_flags_t outputFlags = outputFlagGame ? GAME_OUTPUT_FLAGS : MEDIA_OUTPUT_FLAGS;
+
+    result = m_validator->registerStream(++streamId, audioConfig, outputFlags);
+
+    return result == OK ? streamId : 0;
+}
+
+/**
+ * Create a mock portId.
+ */
+audio_port_handle_t UsecaseValidatorTest::testCreatePortId(audio_io_handle_t streamId) {
+    static int portId = 0;
+
+    return (streamId << 8) | (++portId);
+}
+
+/**
+ * Add a mock portId to a stream and verify.
+ */
+error::Result<audio_attributes_t> UsecaseValidatorTest::testStartClient(audio_io_handle_t streamId,
+        audio_port_handle_t portId,
+        audio_usage_t usage) {
+    content::AttributionSourceState attributionSource;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = usage;
+
+    return m_validator->startClient(streamId, portId, attributionSource, attributes, NULL);
+}
+
+/**
+ * Verify a mock stream.
+ */
+error::Result<audio_attributes_t> UsecaseValidatorTest::testVerifyAudioAttributes(
+        audio_io_handle_t streamId,
+        audio_usage_t usage) {
+    content::AttributionSourceState attributionSource;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = usage;
+
+    return m_validator->verifyAudioAttributes(streamId, attributionSource, attributes);
+}
+
+/**
+ * Test functions.
+ */
+
+/**
+ * Test adding and removing streams.
+ */
+TEST_F(UsecaseLookupTest, testAddAndRemoveStream) {
+    addStream(1, false);
+    addStream(2, true);
+
+    EXPECT_NE(m_streams.find(1), m_streams.end());
+    EXPECT_NE(m_streams.find(2), m_streams.end());
+    EXPECT_EQ(m_streams.find(3), m_streams.end());
+
+    EXPECT_FALSE(isGameStream(1));
+    EXPECT_TRUE(isGameStream(2));
+    EXPECT_FALSE(isGameStream(3));
+
+    removeStream(2);
+
+    EXPECT_FALSE(isGameStream(2));
+}
+
+/**
+ * Verify attributes usage for stream.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsage) {
+    audio_io_handle_t gameStreamId, mediaStreamId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    mediaStreamId = testRegisterStream(false);
+    EXPECT_NE(gameStreamId, 0);
+    EXPECT_NE(mediaStreamId, 0);
+    EXPECT_NE(gameStreamId, mediaStreamId);
+
+    // Verify attributes on game stream.
+    auto attr = testVerifyAudioAttributes(gameStreamId, AUDIO_USAGE_GAME);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    // Verify attributes on media stream.
+    attr = testVerifyAudioAttributes(mediaStreamId, AUDIO_USAGE_MEDIA);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+    EXPECT_EQ(m_validator->unregisterStream(mediaStreamId), 0);
+}
+
+/**
+ * Test hanging client.
+ */
+TEST_F(UsecaseValidatorTest, testHangingClient) {
+    audio_io_handle_t gameStreamId, mediaStreamId;
+    audio_port_handle_t gamePortId, mediaPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+    mediaStreamId = testRegisterStream(false);
+    EXPECT_NE(mediaStreamId, 0);
+
+    // Assign portId.
+    gamePortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(gamePortId, 0);
+    mediaPortId = testCreatePortId(mediaStreamId);
+    EXPECT_NE(mediaPortId, 0);
+
+    // Start client on game stream.
+    testStartClient(gameStreamId, gamePortId, AUDIO_USAGE_GAME);
+
+    // Start client on media stream.
+    testStartClient(mediaStreamId, mediaPortId, AUDIO_USAGE_MEDIA);
+
+    // Unregister media stream before stopClient.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+    EXPECT_EQ(m_validator->unregisterStream(mediaStreamId), 0);
+}
+
+/**
+ * Verify attributes usage does not change.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageUnchanged) {
+    audio_io_handle_t gameStreamId, mediaStreamId;
+    audio_port_handle_t gamePortId, mediaPortId, unknownPortId, voiceCommPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+    mediaStreamId = testRegisterStream(false);
+    EXPECT_NE(mediaStreamId, 0);
+
+    // Assign portId.
+    gamePortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(gamePortId, 0);
+    mediaPortId = testCreatePortId(mediaStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(mediaStreamId);
+    EXPECT_NE(unknownPortId, 0);
+    voiceCommPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(voiceCommPortId, 0);
+
+    // Verify attributes on game stream.
+    auto attr = testStartClient(gameStreamId, gamePortId, AUDIO_USAGE_GAME);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    attr = testStartClient(gameStreamId, voiceCommPortId, AUDIO_USAGE_VOICE_COMMUNICATION);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_VOICE_COMMUNICATION);
+
+    // Verify attributes on media stream.
+    attr = testStartClient(mediaStreamId, mediaPortId, AUDIO_USAGE_MEDIA);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    attr = testStartClient(mediaStreamId, unknownPortId, AUDIO_USAGE_UNKNOWN);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_UNKNOWN);
+
+    // Stop client on game and media stream.
+    EXPECT_EQ(m_validator->stopClient(gameStreamId, gamePortId), 0);
+    EXPECT_EQ(m_validator->stopClient(mediaStreamId, mediaPortId), 0);
+
+    // Unregister game and media stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+    EXPECT_EQ(m_validator->unregisterStream(mediaStreamId), 0);
+}
+
+/**
+ * Verify attributes usage changes.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageChanged) {
+    audio_io_handle_t gameStreamId;
+    audio_port_handle_t mediaPortId, unknownPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+
+    // Assign portId.
+    mediaPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(unknownPortId, 0);
+
+    // Verify attributes on game stream.
+    auto attr = testStartClient(gameStreamId, mediaPortId, AUDIO_USAGE_MEDIA);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    attr = testStartClient(gameStreamId, unknownPortId, AUDIO_USAGE_UNKNOWN);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    // Unregister game stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
new file mode 100644
index 0000000..3159ab4
--- /dev/null
+++ b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MEDIA_LIBAUDIOUSECASEVALIDATION_TESTS_USECASEVALIDATOR_TEST_H_
+#define MEDIA_LIBAUDIOUSECASEVALIDATION_TESTS_USECASEVALIDATOR_TEST_H_
+
+#include <gtest/gtest.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+
+#include "media/UsecaseLookup.h"
+#include "media/UsecaseValidator.h"
+
+namespace android {
+namespace media {
+
+#define MEDIA_OUTPUT_FLAGS (audio_output_flags_t)(0xFFFFF &\
+                                ~(AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ))
+
+#define GAME_OUTPUT_FLAGS (audio_output_flags_t)\
+                                (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)
+
+class TestCallback : public UsecaseValidator::AttributesChangedCallback {
+ public:
+    TestCallback() {
+        m_iCallCnt = 0;
+    }
+    virtual ~TestCallback() { }
+    virtual void onAttributesChanged(audio_port_handle_t /*portId*/,
+                                     const audio_attributes_t& /*attributes*/) {
+        ++m_iCallCnt;
+    }
+
+ public:
+    int m_iCallCnt;
+};
+
+class UsecaseLookupTest : public UsecaseLookup, public ::testing::Test {
+ public:
+    UsecaseLookupTest() { }
+    virtual ~UsecaseLookupTest() = default;
+};
+
+class UsecaseValidatorTest : public ::testing::Test {
+ public:
+    UsecaseValidatorTest() {
+        m_validator = createUsecaseValidator();
+    }
+
+    virtual ~UsecaseValidatorTest() = default;
+
+ protected:
+    audio_io_handle_t testRegisterStream(bool outputFlagGame);
+    audio_port_handle_t testCreatePortId(audio_io_handle_t streamId);
+    error::Result<audio_attributes_t> testStartClient(audio_io_handle_t streamId,
+                                                      audio_port_handle_t portId,
+                                                      audio_usage_t usage);
+    error::Result<audio_attributes_t> testVerifyAudioAttributes(audio_io_handle_t streamId,
+                                                                audio_usage_t usage);
+
+    std::unique_ptr<UsecaseValidator> m_validator;
+};
+
+}  // namespace media
+}  // namespace android
+
+#endif  // MEDIA_LIBAUDIOUSECASEVALIDATION_TESTS_USECASEVALIDATOR_TEST_H_
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index 3137e13..d8cf20e 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -22,6 +22,7 @@
 
 #include <algorithm>
 #include <memory>
+#include <sstream>
 #include <string>
 #include <utility>
 
@@ -94,6 +95,33 @@
     return defaultValue;
 }
 
+std::string hapticParamToString(const struct HapticGeneratorParam& param) {
+    std::stringstream ss;
+    ss << "\t\tHapticGenerator Parameters:\n";
+    ss << "\t\t- resonant frequency: " << param.resonantFrequency << '\n';
+    ss << "\t\t- bpf Q: " << param.bpfQ << '\n';
+    ss << "\t\t- slow env normalization power: " << param.slowEnvNormalizationPower << '\n';
+    ss << "\t\t- bsf zero Q: " << param.bsfZeroQ << '\n';
+    ss << "\t\t- bsf pole Q: " << param.bsfPoleQ << '\n';
+    ss << "\t\t- distortion corner frequency: " << param.distortionCornerFrequency << '\n';
+    ss << "\t\t- distortion input gain: " << param.distortionInputGain << '\n';
+    ss << "\t\t- distortion cube threshold: " << param.distortionCubeThreshold << '\n';
+    ss << "\t\t- distortion output gain: " << param.distortionOutputGain << '\n';
+    return ss.str();
+}
+
+std::string hapticSettingToString(const struct HapticGeneratorParam& param) {
+    std::stringstream ss;
+    ss << "\t\tHaptic setting:\n";
+    ss << "\t\t- tracks intensity map:\n";
+    for (const auto&[id, intensity] : param.id2Intensity) {
+        ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+    }
+    ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+    ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
+    return ss.str();
+}
+
 int HapticGenerator_Init(struct HapticGeneratorContext *context) {
     context->itfe = &gHapticGeneratorInterface;
 
@@ -129,7 +157,7 @@
     context->param.distortionCubeThreshold = 0.1f;
     context->param.distortionOutputGain = getFloatProperty(
             "vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
-    ALOGD("Using distortion output gain as %f", context->param.distortionOutputGain);
+    ALOGD("%s\n%s", __func__, hapticParamToString(context->param).c_str());
 
     context->state = HAPTICGENERATOR_STATE_INITIALIZED;
     return 0;
@@ -289,6 +317,7 @@
         }
         int id = *(int *) value;
         os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
+        ALOGD("Setting haptic intensity as %d", hapticIntensity);
         if (hapticIntensity == os::HapticScale::MUTE) {
             context->param.id2Intensity.erase(id);
         } else {
@@ -313,6 +342,10 @@
         context->param.bsfZeroQ = isnan(qFactor) ? DEFAULT_BSF_POLE_Q : qFactor;
         context->param.bsfPoleQ = context->param.bsfZeroQ / 2.0f;
         context->param.maxHapticAmplitude = maxAmplitude;
+        ALOGD("Updating vibrator info, resonantFrequency=%f, bsfZeroQ=%f, bsfPoleQ=%f, "
+              "maxHapticAmplitude=%f",
+              context->param.resonantFrequency, context->param.bsfZeroQ, context->param.bsfPoleQ,
+              context->param.maxHapticAmplitude);
 
         if (context->processorsRecord.bpf != nullptr) {
             context->processorsRecord.bpf->setCoefficients(
@@ -358,6 +391,11 @@
     return in;
 }
 
+void HapticGenerator_Dump(int32_t fd, const struct HapticGeneratorParam& param) {
+    dprintf(fd, "%s", hapticParamToString(param).c_str());
+    dprintf(fd, "%s", hapticSettingToString(param).c_str());
+}
+
 } // namespace (anonymous)
 
 //-----------------------------------------------------------------------------
@@ -562,6 +600,10 @@
         case EFFECT_CMD_SET_AUDIO_MODE:
             break;
 
+        case EFFECT_CMD_DUMP:
+            HapticGenerator_Dump(*(reinterpret_cast<int32_t*>(cmdData)), context->param);
+            break;
+
         default:
             ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
             return -EINVAL;
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 1b8656d..7c78900 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -344,19 +344,22 @@
     mFrameDecoded = false;
     mFrameMemory.clear();
 
-    mRetriever = new MediaMetadataRetriever();
-    status_t err = mRetriever->setDataSource(mDataSource, "image/heif");
+    sp<MediaMetadataRetriever> retriever = new MediaMetadataRetriever();
+    status_t err = retriever->setDataSource(mDataSource, "image/heif");
     if (err != OK) {
         ALOGE("failed to set data source!");
-
         mRetriever.clear();
         mDataSource.clear();
         return false;
     }
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever = retriever;
+    }
     ALOGV("successfully set data source.");
 
-    const char* hasImage = mRetriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
-    const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
+    const char* hasImage = retriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
+    const char* hasVideo = retriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
 
     mHasImage = hasImage && !strcasecmp(hasImage, "yes");
     mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
@@ -364,7 +367,7 @@
     HeifFrameInfo* defaultInfo = nullptr;
     if (mHasImage) {
         // image index < 0 to retrieve primary image
-        sp<IMemory> sharedMem = mRetriever->getImageAtIndex(
+        sp<IMemory> sharedMem = retriever->getImageAtIndex(
                 -1, mOutputColor, true /*metaOnly*/);
 
         if (sharedMem == nullptr || sharedMem->unsecurePointer() == nullptr) {
@@ -399,7 +402,7 @@
     }
 
     if (mHasVideo) {
-        sp<IMemory> sharedMem = mRetriever->getFrameAtTime(0,
+        sp<IMemory> sharedMem = retriever->getFrameAtTime(0,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
                 mOutputColor, true /*metaOnly*/);
 
@@ -425,7 +428,7 @@
 
         initFrameInfo(&mSequenceInfo, videoFrame);
 
-        const char* frameCount = mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
+        const char* frameCount = retriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
         if (frameCount == nullptr) {
             android_errorWriteWithInfoLog(0x534e4554, "215002587", -1, NULL, 0);
             ALOGD("No valid sequence information in metadata");
@@ -511,14 +514,26 @@
 }
 
 bool HeifDecoderImpl::decodeAsync() {
+    wp<MediaMetadataRetriever> weakRetriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        weakRetriever = mRetriever;
+    }
+
     for (size_t i = 1; i < mNumSlices; i++) {
+        sp<MediaMetadataRetriever> retriever = weakRetriever.promote();
+        if (retriever == nullptr) {
+            return false;
+        }
+
         ALOGV("decodeAsync(): decoding slice %zu", i);
         size_t top = i * mSliceHeight;
         size_t bottom = (i + 1) * mSliceHeight;
         if (bottom > mImageInfo.mHeight) {
             bottom = mImageInfo.mHeight;
         }
-        sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+
+        sp<IMemory> frameMemory = retriever->getImageRectAtIndex(
                 -1, mOutputColor, 0, top, mImageInfo.mWidth, bottom);
         {
             Mutex::Autolock autolock(mLock);
@@ -534,10 +549,13 @@
             mScanlineReady.signal();
         }
     }
-    // Aggressive clear to avoid holding on to resources
-    mRetriever.clear();
-
     // Hold on to mDataSource in case the client wants to redecode.
+
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever.clear();
+    }
+
     return false;
 }
 
@@ -549,6 +567,17 @@
         return true;
     }
 
+    sp<MediaMetadataRetriever> retriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        if (mRetriever == nullptr) {
+            ALOGE("Failed to get MediaMetadataRetriever!");
+            return false;
+        }
+
+        retriever = mRetriever;
+    }
+
     // See if we want to decode in slices to allow client to start
     // scanline processing in parallel with decode. If this fails
     // we fallback to decoding the full frame.
@@ -563,7 +592,7 @@
 
         if (mNumSlices > 1) {
             // get first slice and metadata
-            sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+            sp<IMemory> frameMemory = retriever->getImageRectAtIndex(
                     -1, mOutputColor, 0, 0, mImageInfo.mWidth, mSliceHeight);
 
             if (frameMemory == nullptr || frameMemory->unsecurePointer() == nullptr) {
@@ -598,9 +627,9 @@
 
     if (mHasImage) {
         // image index < 0 to retrieve primary image
-        mFrameMemory = mRetriever->getImageAtIndex(-1, mOutputColor);
+        mFrameMemory = retriever->getImageAtIndex(-1, mOutputColor);
     } else if (mHasVideo) {
-        mFrameMemory = mRetriever->getFrameAtTime(0,
+        mFrameMemory = retriever->getFrameAtTime(0,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
     }
 
@@ -636,7 +665,10 @@
     mFrameDecoded = true;
 
     // Aggressively clear to avoid holding on to resources
-    mRetriever.clear();
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever.clear();
+    }
 
     // Hold on to mDataSource in case the client wants to redecode.
     return true;
@@ -658,7 +690,17 @@
     // set total scanline to sequence height now
     mTotalScanline = mSequenceInfo.mHeight;
 
-    mFrameMemory = mRetriever->getFrameAtIndex(frameIndex, mOutputColor);
+    sp<MediaMetadataRetriever> retriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        retriever = mRetriever;
+        if (retriever == nullptr) {
+            ALOGE("failed to get MediaMetadataRetriever!");
+            return false;
+        }
+    }
+
+    mFrameMemory = retriever->getFrameAtIndex(frameIndex, mOutputColor);
     if (mFrameMemory == nullptr || mFrameMemory->unsecurePointer() == nullptr) {
         ALOGE("decode: videoFrame is a nullptr");
         return false;
@@ -735,9 +777,9 @@
     HeifFrameInfo* info = &mImageInfo;
     if (info != nullptr) {
         return mImageInfo.mBitDepth;
+    } else {
+        return 0;
     }
-
-    return 0;
 }
 
 } // namespace android
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index 86a8628..c1504cd 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -72,6 +72,8 @@
     bool mHasVideo;
     size_t mSequenceLength;
 
+    Mutex mRetrieverLock;
+
     // Slice decoding only
     Mutex mLock;
     Condition mScanlineReady;
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
index dc12486..56f4765 100644
--- a/media/libheif/include/HeifDecoderAPI.h
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -47,7 +47,7 @@
     int32_t  mRotationAngle;           // Rotation angle, clockwise, should be multiple of 90
     uint32_t mBytesPerPixel;           // Number of bytes for one pixel
     int64_t  mDurationUs;              // Duration of the frame in us
-    uint32_t mBitDepth;                // Number of bits for each of the R/G/B channels
+    uint32_t mBitDepth;                // Number of bits of R/G/B channel
     std::vector<uint8_t> mIccData;     // ICC data array
 };
 
@@ -164,7 +164,7 @@
     virtual size_t skipScanlines(size_t count) = 0;
 
     /*
-     * Returns color depth in bits for each of the R/G/B channels.
+     * Returns color depth of R/G/B channel.
      */
     virtual uint32_t getColorDepth() = 0;
 
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index de4c7db..b45dae5 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -195,7 +195,8 @@
     INVOKE_ID_SELECT_TRACK = 4,
     INVOKE_ID_UNSELECT_TRACK = 5,
     INVOKE_ID_SET_VIDEO_SCALING_MODE = 6,
-    INVOKE_ID_GET_SELECTED_TRACK = 7
+    INVOKE_ID_GET_SELECTED_TRACK = 7,
+    INVOKE_ID_SET_PLAYER_IID = 8,
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
index 2ed3126..d4494f6 100644
--- a/media/libmedia/tests/codeclist/Android.bp
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -25,7 +25,7 @@
 
 cc_test {
     name: "CodecListTest",
-    test_suites: ["device-tests", "mts"],
+    test_suites: ["device-tests", "mts-media"],
     gtest: true,
 
     // Support multilib variants (using different suffix per sub-architecture), which is needed on
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 27f987d..ba3df59 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -72,6 +72,7 @@
 
 // Keys are strings used for MediaMetrics Item Keys
 #define AMEDIAMETRICS_KEY_AUDIO_FLINGER       AMEDIAMETRICS_KEY_PREFIX_AUDIO "flinger"
+#define AMEDIAMETRICS_KEY_AUDIO_MIDI          AMEDIAMETRICS_KEY_PREFIX_AUDIO "midi"
 #define AMEDIAMETRICS_KEY_AUDIO_POLICY        AMEDIAMETRICS_KEY_PREFIX_AUDIO "policy"
 
 // Error keys
@@ -124,9 +125,13 @@
 #define AMEDIAMETRICS_PROP_CHANNELMASK    "channelMask"    // int32
 #define AMEDIAMETRICS_PROP_CHANNELMASKS   "channelMasks"   // string with channelMask values
                                                            // separated by |.
+#define AMEDIAMETRICS_PROP_CLOSEDCOUNT   "closedCount"    // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_CONTENTTYPE    "contentType"    // string attributes (AudioTrack)
 #define AMEDIAMETRICS_PROP_CUMULATIVETIMENS "cumulativeTimeNs" // int64_t playback/record time
                                                            // since start
+#define AMEDIAMETRICS_PROP_DEVICEDISCONNECTED "deviceDisconnected" // string true/false (MIDI)
+#define AMEDIAMETRICS_PROP_DEVICEID       "deviceId"       // int32 device id (MIDI)
+
 // DEVICE values are averaged since starting on device
 #define AMEDIAMETRICS_PROP_DEVICELATENCYMS "deviceLatencyMs" // double - avg latency time
 #define AMEDIAMETRICS_PROP_DEVICESTARTUPMS "deviceStartupMs" // double - avg startup time
@@ -150,12 +155,15 @@
 #define AMEDIAMETRICS_PROP_FLAGS          "flags"
 
 #define AMEDIAMETRICS_PROP_FRAMECOUNT     "frameCount"     // int32
+#define AMEDIAMETRICS_PROP_HARDWARETYPE   "hardwareType"   // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_HASHEADTRACKER  "hasHeadTracker" // string true/false
 #define AMEDIAMETRICS_PROP_HEADTRACKERENABLED "headTrackerEnabled" // string true/false
 #define AMEDIAMETRICS_PROP_HEADTRACKINGMODES "headTrackingModes" // string |, like modes.
 #define AMEDIAMETRICS_PROP_INPUTDEVICES   "inputDevices"   // string value
+#define AMEDIAMETRICS_PROP_INPUTPORTCOUNT  "inputPortCount" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
+#define AMEDIAMETRICS_PROP_ISSHARED      "isShared"       // string true/false (MIDI)
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
 #define AMEDIAMETRICS_PROP_LEVELS         "levels"          // string | with levels
 #define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
@@ -165,7 +173,9 @@
 #define AMEDIAMETRICS_PROP_MODES          "modes"          // string | with modes
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
+#define AMEDIAMETRICS_PROP_OPENEDCOUNT   "openedCount"    // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
+#define AMEDIAMETRICS_PROP_OUTPUTPORTCOUNT "outputPortCount" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_PERFORMANCEMODE "performanceMode"    // string value, "none", lowLatency"
 #define AMEDIAMETRICS_PROP_PLAYBACK_PITCH "playback.pitch" // double value (AudioTrack)
 #define AMEDIAMETRICS_PROP_PLAYBACK_SPEED "playback.speed" // double value (AudioTrack)
@@ -194,6 +204,13 @@
                                                            // Treated as "debug" information.
 
 #define AMEDIAMETRICS_PROP_STREAMTYPE     "streamType"     // string (AudioTrack)
+#define AMEDIAMETRICS_PROP_SUPPORTSMIDIUMP "supportsMidiUmp" // string true/false (MIDI).
+                                                             // Universal MIDI Packets is a new
+                                                             // format to transport packets.
+                                                             // Raw byte streams are used if this
+                                                             // is false.
+#define AMEDIAMETRICS_PROP_TOTALINPUTBYTES "totalInputBytes" // int32 (MIDI)
+#define AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES "totalOutputBytes" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
 #define AMEDIAMETRICS_PROP_THROTTLEMS     "throttleMs"     // double
 #define AMEDIAMETRICS_PROP_TRACKID        "trackId"        // int32 port id of track/record
@@ -202,6 +219,7 @@
 #define AMEDIAMETRICS_PROP_UNDERRUN       "underrun"       // int32
 #define AMEDIAMETRICS_PROP_UNDERRUNFRAMES "underrunFrames" // int64_t from Thread
 #define AMEDIAMETRICS_PROP_USAGE          "usage"          // string attributes (ATrack)
+#define AMEDIAMETRICS_PROP_USINGALSA     "usingAlsa"      // string true/false (MIDI)
 #define AMEDIAMETRICS_PROP_VOICEVOLUME    "voiceVolume"    // double (audio.flinger)
 #define AMEDIAMETRICS_PROP_VOLUME_LEFT    "volume.left"    // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_VOLUME_RIGHT   "volume.right"   // double (AudioTrack)
@@ -226,6 +244,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE     "create"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CREATEAUDIOPATCH "createAudioPatch"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR       "ctor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED "deviceClosed" // MIDI
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT "disconnect"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_DTOR       "dtor"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM "endAAudioStream" // AAudioStream
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index fdcf246..bdf1cbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1805,7 +1805,8 @@
 MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId,
         const AttributionSourceState& attributionSource, const audio_attributes_t* attr,
         const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
-    : mCallback(NULL),
+    : mCachedPlayerIId(PLAYER_PIID_INVALID),
+      mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
       mStreamType(AUDIO_STREAM_MUSIC),
@@ -2314,6 +2315,10 @@
         return t->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
     });
 
+    if (mCachedPlayerIId != PLAYER_PIID_INVALID) {
+        t->setPlayerIId(mCachedPlayerIId);
+    }
+
     mSampleRateHz = sampleRate;
     mFlags = flags;
     mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
@@ -2366,6 +2371,17 @@
     return NO_INIT;
 }
 
+void MediaPlayerService::AudioOutput::setPlayerIId(int32_t playerIId)
+{
+    ALOGV("setPlayerIId(%d)", playerIId);
+    Mutex::Autolock lock(mLock);
+    mCachedPlayerIId = playerIId;
+
+    if (mTrack != nullptr) {
+        mTrack->setPlayerIId(mCachedPlayerIId);
+    }
+}
+
 void MediaPlayerService::AudioOutput::setNextOutput(const sp<AudioOutput>& nextOutput) {
     Mutex::Autolock lock(mLock);
     mNextOutput = nextOutput;
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 86be3fe..52c2f79 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -113,6 +113,8 @@
                 bool doNotReconnect = false,
                 uint32_t suggestedFrameCount = 0);
 
+        virtual void            setPlayerIId(int32_t playerIId);
+
         virtual status_t        start();
         virtual ssize_t         write(const void* buffer, size_t size, bool blocking = true);
         virtual void            stop();
@@ -160,6 +162,7 @@
         sp<AudioTrack>          mTrack;
         sp<AudioTrack>          mRecycledTrack;
         sp<AudioOutput>         mNextOutput;
+        int                     mCachedPlayerIId;
         AudioCallback           mCallback;
         void *                  mCallbackCookie;
         sp<CallbackData>        mCallbackData;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index b3f7f25..db979d7 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -211,6 +211,7 @@
         mime = MEDIA_MIMETYPE_VIDEO_AV1;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
+        isHeif = true;
     }
 
     sp<AMessage> format = new AMessage;
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index a36f1d6..5abac81 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -60,15 +60,51 @@
     static_libs: [
         "libstagefright_rtsp",
         "libbase",
+        "libstagefright_nuplayer",
+        "libplayerservice_datasource",
+        "libstagefright_timedtext",
+        "libaudioprocessing_base",
     ],
     shared_libs: [
+        "android.hardware.media.omx@1.0",
         "av-types-aidl-cpp",
         "media_permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
         "libandroid_net",
+        "libaudioclient",
         "libcamera_client",
+        "libcodec2_client",
+        "libcrypto",
+        "libdatasource",
+        "libdrmframework",
         "libgui",
+        "libhidlbase",
+        "liblog",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmediadrm",
         "libmediametrics",
+        "libmediautils",
+        "libmemunreachable",
+        "libnetd_client",
+        "libpowermanager",
+        "libstagefright_httplive",
+        "packagemanager_aidl-cpp",
+        "libfakeservicemanager",
+        "libvibrator",
+        "libnbaio",
+        "libnblog",
+        "libpowermanager",
+        "libaudioprocessing",
+        "libaudioflinger",
+        "libresourcemanagerservice",
+        "libmediametricsservice",
+        "mediametricsservice-aidl-cpp",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
     ],
 }
 
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index 7799f44..a189d04 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -26,6 +26,8 @@
 #include <media/IMediaRecorder.h>
 #include <media/IRemoteDisplay.h>
 #include <media/IRemoteDisplayClient.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
 #include <media/stagefright/RemoteDataSource.h>
 #include <media/stagefright/foundation/base64.h>
 #include <thread>
@@ -102,6 +104,42 @@
     IBinder *onAsBinder() { return nullptr; };
 };
 
+struct TestMediaHTTPConnection : public MediaHTTPConnection {
+  public:
+    TestMediaHTTPConnection() {}
+    virtual ~TestMediaHTTPConnection() {}
+
+    virtual bool connect(const char* /*uri*/, const KeyedVector<String8, String8>* /*headers*/) {
+        return true;
+    }
+
+    virtual void disconnect() { return; }
+
+    virtual ssize_t readAt(off64_t /*offset*/, void* /*data*/, size_t size) { return size; }
+
+    virtual off64_t getSize() { return 0; }
+    virtual status_t getMIMEType(String8* /*mimeType*/) { return NO_ERROR; }
+    virtual status_t getUri(String8* /*uri*/) { return NO_ERROR; }
+
+  private:
+    DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPConnection);
+};
+
+struct TestMediaHTTPService : public BnInterface<IMediaHTTPService> {
+  public:
+    TestMediaHTTPService() {}
+    ~TestMediaHTTPService(){};
+
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+        mMediaHTTPConnection = sp<TestMediaHTTPConnection>::make();
+        return mMediaHTTPConnection;
+    }
+
+  private:
+    sp<TestMediaHTTPConnection> mMediaHTTPConnection = nullptr;
+    DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
+};
+
 class BinderDeathNotifier : public IBinder::DeathRecipient {
    public:
     void binderDied(const wp<IBinder> &) { abort(); }
@@ -140,7 +178,9 @@
             AString out;
             encodeBase64(uriSuffix.data(), uriSuffix.size(), &out);
             uri += out.c_str();
-            status = mMediaPlayer->setDataSource(nullptr /*httpService*/, uri.c_str(), &headers);
+            sp<TestMediaHTTPService> testService = sp<TestMediaHTTPService>::make();
+            status =
+                    mMediaPlayer->setDataSource(testService /*httpService*/, uri.c_str(), &headers);
             break;
         }
         case fd: {
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index b0040fe..4f2da67 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -18,6 +18,10 @@
 #include <media/stagefright/foundation/AString.h>
 #include "fuzzer/FuzzedDataProvider.h"
 
+#include <AudioFlinger.h>
+#include <MediaPlayerService.h>
+#include <ResourceManagerService.h>
+#include <ServiceManager.h>
 #include <StagefrightRecorder.h>
 #include <camera/Camera.h>
 #include <camera/android/hardware/ICamera.h>
@@ -25,6 +29,7 @@
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <mediametricsservice/MediaMetricsService.h>
 #include <thread>
 
 using namespace std;
@@ -305,6 +310,21 @@
     mStfRecorder->reset();
 }
 
+extern "C" int LLVMFuzzerInitialize(int /* *argc */, char /* ***argv */) {
+    /**
+     * Initializing a FakeServiceManager and adding the instances
+     * of all the required services
+     */
+    sp<IServiceManager> fakeServiceManager = new ServiceManager();
+    setDefaultServiceManager(fakeServiceManager);
+    MediaPlayerService::instantiate();
+    AudioFlinger::instantiate();
+    ResourceManagerService::instantiate();
+    fakeServiceManager->addService(String16(MediaMetricsService::kServiceName),
+                                    new MediaMetricsService());
+    return 0;
+}
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
     MediaRecorderClientFuzzer mrcFuzzer(data, size);
     mrcFuzzer.process();
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 1cbd8a0..fb20aab 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -123,6 +123,8 @@
                 bool doNotReconnect = false,
                 uint32_t suggestedFrameCount = 0) = 0;
 
+        virtual void        setPlayerIId(int32_t playerIId) = 0;
+
         virtual status_t    start() = 0;
 
         /* Input parameter |size| is in byte units stored in |buffer|.
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 36e4d4a..1358faa 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -992,6 +992,11 @@
         format->setInt32("auto", !!isAutoselect);
         format->setInt32("default", !!isDefault);
         format->setInt32("forced", !!isForced);
+    } else if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+        int32_t hapticChannelCount;
+        if (meta->findInt32(kKeyHapticChannelCount, &hapticChannelCount)) {
+            format->setInt32("haptic-channel-count", hapticChannelCount);
+        }
     }
 
     return format;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 4851684..727d68d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -555,6 +555,13 @@
         reply->writeInt32(isAuto);
         reply->writeInt32(isDefault);
         reply->writeInt32(isForced);
+    } else if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+        int32_t hapticChannelCount;
+        bool hasHapticChannels = format->findInt32("haptic-channel-count", &hapticChannelCount);
+        reply->writeInt32(hasHapticChannels);
+        if (hasHapticChannels) {
+            reply->writeInt32(hapticChannelCount);
+        }
     }
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 2a50fc2..ceea2f4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -26,6 +26,7 @@
 #include "NuPlayer.h"
 #include "NuPlayerSource.h"
 
+#include <audiomanager/AudioManager.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AUtils.h>
@@ -85,6 +86,7 @@
       mMediaClock(new MediaClock),
       mPlayer(new NuPlayer(pid, mMediaClock)),
       mPlayerFlags(0),
+      mCachedPlayerIId(PLAYER_PIID_INVALID),
       mMetricsItem(NULL),
       mClientUid(-1),
       mAtEOS(false),
@@ -804,6 +806,16 @@
             return mPlayer->getSelectedTrack(type, reply);
         }
 
+        case INVOKE_ID_SET_PLAYER_IID:
+        {
+            Mutex::Autolock autoLock(mAudioSinkLock);
+            mCachedPlayerIId = request.readInt32();
+            if (mAudioSink != nullptr) {
+                mAudioSink->setPlayerIId(mCachedPlayerIId);
+            }
+            return OK;
+        }
+
         default:
         {
             return INVALID_OPERATION;
@@ -812,8 +824,12 @@
 }
 
 void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
+    Mutex::Autolock autoLock(mAudioSinkLock);
     mPlayer->setAudioSink(audioSink);
     mAudioSink = audioSink;
+    if (mCachedPlayerIId != PLAYER_PIID_INVALID) {
+        mAudioSink->setPlayerIId(mCachedPlayerIId);
+    }
 }
 
 status_t NuPlayerDriver::setParameter(
@@ -1027,6 +1043,7 @@
             if (mState != STATE_RESET_IN_PROGRESS) {
                 if (mAutoLoop) {
                     audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+                    Mutex::Autolock autoLock(mAudioSinkLock);
                     if (mAudioSink != NULL) {
                         streamType = mAudioSink->getAudioStreamType();
                     }
@@ -1037,6 +1054,7 @@
                 }
                 if (mLooping || mAutoLoop) {
                     mPlayer->seekToAsync(0);
+                    Mutex::Autolock autoLock(mAudioSinkLock);
                     if (mAudioSink != NULL) {
                         // The renderer has stopped the sink at the end in order to play out
                         // the last little bit of audio. If we're looping, we need to restart it.
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
index 55a0fad..138cd6f 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
@@ -140,9 +140,12 @@
     sp<ALooper> mLooper;
     const sp<MediaClock> mMediaClock;
     const sp<NuPlayer> mPlayer;
-    sp<AudioSink> mAudioSink;
     uint32_t mPlayerFlags;
 
+    mutable Mutex mAudioSinkLock;
+    sp<AudioSink> mAudioSink GUARDED_BY(mAudioSinkLock);
+    int32_t mCachedPlayerIId GUARDED_BY(mAudioSinkLock);
+
     mediametrics::Item *mMetricsItem;
     mutable Mutex mMetricsLock;
     uid_t mClientUid;
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index 2beb47f..30f6a91 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -25,8 +25,8 @@
 #include <aidl/android/media/BnResourceManagerService.h>
 
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
 #include <mediadrm/DrmSessionManager.h>
+#include <mediautils/ProcessInfoInterface.h>
 
 #include <algorithm>
 #include <iostream>
diff --git a/media/libnblog/Reader.cpp b/media/libnblog/Reader.cpp
index 67d028d..d6232d4 100644
--- a/media/libnblog/Reader.cpp
+++ b/media/libnblog/Reader.cpp
@@ -208,11 +208,14 @@
     }
     while (back + Entry::kPreviousLengthOffset >= front) {
         const uint8_t *prev = back - back[Entry::kPreviousLengthOffset] - Entry::kOverhead;
-        const Event type = (const Event)prev[offsetof(entry, type)];
         if (prev < front
-                || prev + prev[offsetof(entry, length)] + Entry::kOverhead != back
-                || type <= EVENT_RESERVED || type >= EVENT_UPPER_BOUND) {
-            // prev points to an out of limits or inconsistent entry
+                || prev + prev[offsetof(entry, length)] + Entry::kOverhead != back) {
+            // prev points to an out of limits entry
+            return nullptr;
+        }
+        const Event type = (const Event)prev[offsetof(entry, type)];
+        if (type <= EVENT_RESERVED || type >= EVENT_UPPER_BOUND) {
+            // prev points to an inconsistent entry
             return nullptr;
         }
         // if invalidTypes does not contain the type, then the type is valid.
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d6028d9..4a5524d 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -6315,6 +6315,11 @@
                     flags |= OMX_BUFFERFLAG_EOS;
                 }
 
+                int32_t isDecodeOnly = 0;
+                if (buffer->meta()->findInt32("decode-only", &isDecodeOnly) && isDecodeOnly != 0) {
+                    flags |= OMX_BUFFERFLAG_DECODEONLY;
+                    mCodec->mDecodeOnlyTimesUs.emplace(timeUs);
+                }
                 size_t size = buffer->size();
                 size_t offset = buffer->offset();
                 if (buffer->base() != info->mCodecData->base()) {
@@ -6344,6 +6349,10 @@
                     ALOGV("[%s] calling emptyBuffer %u w/ EOS",
                          mCodec->mComponentName.c_str(), bufferID);
                 } else {
+                    if (flags & OMX_BUFFERFLAG_DECODEONLY) {
+                        ALOGV("[%s] calling emptyBuffer %u w/ decode only flag",
+                            mCodec->mComponentName.c_str(), bufferID);
+                    }
 #if TRACK_BUFFER_TIMING
                     ALOGI("[%s] calling emptyBuffer %u w/ time %lld us",
                          mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);
@@ -6634,6 +6643,39 @@
 
             info->mData.clear();
 
+            // Workaround: if OMX_BUFFERFLAG_DECODEONLY is not implemented in
+            // HAL, the flag is then removed in the corresponding output buffer.
+
+            // for all buffers that were marked as DECODE_ONLY, remove their timestamp
+            // if it is smaller than the timestamp of the buffer that was
+            // just received
+            while (!mCodec->mDecodeOnlyTimesUs.empty() &&
+                   *mCodec->mDecodeOnlyTimesUs.begin() < timeUs) {
+                    mCodec->mDecodeOnlyTimesUs.erase(mCodec->mDecodeOnlyTimesUs.begin());
+            }
+            // if OMX_BUFFERFLAG_DECODEONLY is not implemented in HAL, we need to restore the
+            // OMX_BUFFERFLAG_DECODEONLY flag to the frames we had saved in the set, the set
+            // contains the timestamps of buffers that were marked as DECODE_ONLY by the app
+            if (!mCodec->mDecodeOnlyTimesUs.empty() &&
+                *mCodec->mDecodeOnlyTimesUs.begin() == timeUs) {
+                mCodec->mDecodeOnlyTimesUs.erase(timeUs);
+                // If the app queued the last valid buffer as DECODE_ONLY and queued an additional
+                // empty buffer as EOS, it's possible that HAL sets the last valid frame as EOS
+                // instead and drops the empty buffer. In such a case, we should not add back
+                // the OMX_BUFFERFLAG_DECODEONLY flag to it, as doing so will make it so that the
+                // app does not receive the EOS buffer, which breaks the contract of EOS buffers
+                if (flags & OMX_BUFFERFLAG_EOS) {
+                    // Set buffer size to 0, as described by
+                    // https://developer.android.com/reference/android/media/MediaCodec.BufferInfo?hl=en#size
+                    // a buffer of size 0 should only be used to carry the EOS flag and should
+                    // be discarded by the app as it has no data
+                    buffer->setRange(0, 0);
+                } else {
+                    // re-add the OMX_BUFFERFLAG_DECODEONLY flag to the buffer in case it is
+                    // not the end of stream buffer
+                    flags |= OMX_BUFFERFLAG_DECODEONLY;
+                }
+            }
             mCodec->mBufferChannel->drainThisBuffer(info->mBufferID, flags);
 
             info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
@@ -6854,6 +6896,7 @@
     mCodec->mConverter[0].clear();
     mCodec->mConverter[1].clear();
     mCodec->mComponentName.clear();
+    mCodec->mDecodeOnlyTimesUs.clear();
 }
 
 bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
@@ -8839,6 +8882,7 @@
     ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str());
 
     mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false;
+    mCodec->mDecodeOnlyTimesUs.clear();
 
     // If we haven't transitioned after 3 seconds, we're probably stuck.
     sp<AMessage> msg = new AMessage(ACodec::kWhatCheckIfStuck, mCodec);
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 88b15ae..c5a59ff 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -114,6 +114,10 @@
         if (it->mClientBuffer->meta()->findInt32("csd", &csd)) {
             it->mCodecBuffer->meta()->setInt32("csd", csd);
         }
+        int32_t decodeOnly;
+        if (it->mClientBuffer->meta()->findInt32("decode-only", &decodeOnly)) {
+            it->mCodecBuffer->meta()->setInt32("decode-only", decodeOnly);
+        }
     }
     ALOGV("queueInputBuffer #%d", it->mBufferId);
     sp<AMessage> msg = mInputBufferFilled->dup();
@@ -263,6 +267,10 @@
     if (it->mClientBuffer->meta()->findInt32("csd", &csd)) {
         it->mCodecBuffer->meta()->setInt32("csd", csd);
     }
+    int32_t decodeOnly;
+    if (it->mClientBuffer->meta()->findInt32("decode-only", &decodeOnly)) {
+        it->mCodecBuffer->meta()->setInt32("decode-only", decodeOnly);
+    }
 
     ALOGV("queueSecureInputBuffer #%d", it->mBufferId);
     sp<AMessage> msg = mInputBufferFilled->dup();
@@ -634,6 +642,9 @@
     if (omxFlags & OMX_BUFFERFLAG_EOS) {
         flags |= MediaCodec::BUFFER_FLAG_EOS;
     }
+    if (omxFlags & OMX_BUFFERFLAG_DECODEONLY) {
+        flags |= MediaCodec::BUFFER_FLAG_DECODE_ONLY;
+    }
     it->mClientBuffer->meta()->setInt32("flags", flags);
 
     mCallback->onOutputBufferAvailable(
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 42815b3..2370a7b 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -920,7 +920,7 @@
         return ERROR_MALFORMED;
     }
 
-    int32_t width, height, stride, srcFormat;
+    int32_t width, height, stride;
     if (outputFormat->findInt32("width", &width) == false) {
         ALOGE("MediaImageDecoder::onOutputReceived:width is missing in outputFormat");
         return ERROR_MALFORMED;
@@ -933,10 +933,9 @@
         ALOGE("MediaImageDecoder::onOutputReceived:stride is missing in outputFormat");
         return ERROR_MALFORMED;
     }
-    if (outputFormat->findInt32("color-format", &srcFormat) == false) {
-        ALOGE("MediaImageDecoder::onOutputReceived: color format is missing in outputFormat");
-        return ERROR_MALFORMED;
-    }
+
+    int32_t srcFormat;
+    CHECK(outputFormat->findInt32("color-format", &srcFormat));
 
     uint32_t bitDepth = 8;
     if (COLOR_FormatYUVP010 == srcFormat) {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 386b790..c93d033 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -155,7 +155,10 @@
     void bufferChunk(int64_t timestampUs);
     bool isAvc() const { return mIsAvc; }
     bool isHevc() const { return mIsHevc; }
+    bool isAv1() const { return mIsAv1; }
     bool isHeic() const { return mIsHeic; }
+    bool isAvif() const { return mIsAvif; }
+    bool isHeif() const { return mIsHeif; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
     bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
@@ -319,10 +322,13 @@
     volatile bool mStarted;
     bool mIsAvc;
     bool mIsHevc;
+    bool mIsAv1;
     bool mIsDovi;
     bool mIsAudio;
     bool mIsVideo;
     bool mIsHeic;
+    bool mIsAvif;
+    bool mIsHeif;
     bool mIsMPEG4;
     bool mGotStartKeyFrame;
     bool mIsMalformed;
@@ -467,6 +473,7 @@
     void writePaspBox();
     void writeAvccBox();
     void writeHvccBox();
+    void writeAv1cBox();
     void writeDoviConfigBox();
     void writeUrlBox();
     void writeDrefBox();
@@ -547,6 +554,7 @@
     mStreamableFile = false;
     mTimeScale = -1;
     mHasFileLevelMeta = false;
+    mIsAvif = false;
     mFileLevelMetaDataSize = 0;
     mPrimaryItemId = 0;
     mAssociationEntryCount = 0;
@@ -660,11 +668,15 @@
             return "avc1";
         } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
             return "hvc1";
+        } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
+            return "av01";
         }
     } else if (!strncasecmp(mime, "application/", 12)) {
         return "mett";
     } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
         return "heic";
+    } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
+        return "avif";
     } else {
         ALOGE("Track (%s) other than video/audio/metadata is not supported", mime);
     }
@@ -709,8 +721,9 @@
     Track *track = new Track(this, source, 1 + mTracks.size());
     mTracks.push_back(track);
 
-    mHasMoovBox |= !track->isHeic();
-    mHasFileLevelMeta |= track->isHeic();
+    mHasMoovBox |= !track->isHeif();
+    mHasFileLevelMeta |= track->isHeif();
+    mIsAvif |= track->isAvif();
 
     return OK;
 }
@@ -792,7 +805,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
-        if ((*it)->isHeic()) {
+        if ((*it)->isHeif()) {
             metaSize += (*it)->getMetaSizeIncrease(rotation, mTracks.size());
         }
     }
@@ -994,8 +1007,8 @@
         return err;
     }
 
-    ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d",
-            mHasMoovBox, mHasFileLevelMeta);
+    ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d, mIsAvif %d",
+            mHasMoovBox, mHasFileLevelMeta, mIsAvif);
 
     err = startWriterThread();
     if (err != OK) {
@@ -1311,7 +1324,7 @@
         }
 
         // skip image tracks
-        if ((*it)->isHeic()) continue;
+        if ((*it)->isHeif()) continue;
         nonImageTrackCount++;
 
         int64_t durationUs = (*it)->getDurationUs();
@@ -1489,7 +1502,7 @@
     int64_t minCttsOffsetTimeUs = kMaxCttsOffsetTimeUs;
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        if (!(*it)->isHeic()) {
+        if (!(*it)->isHeif()) {
             minCttsOffsetTimeUs =
                 std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
         }
@@ -1505,7 +1518,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        if (!(*it)->isHeic()) {
+        if (!(*it)->isHeif()) {
             (*it)->writeTrackHeader();
         }
     }
@@ -1525,22 +1538,41 @@
         writeFourcc("isom");
         writeFourcc("3gp4");
     } else {
-        // Only write "heic" as major brand if the client specified HEIF
-        // AND we indeed receive some image heic tracks.
+        // Only write "heic"/"avif" as major brand if the client specified HEIF/AVIF
+        // AND we indeed receive some image heic/avif tracks.
         if (fileType == OUTPUT_FORMAT_HEIF && mHasFileLevelMeta) {
-            writeFourcc("heic");
+            if (mIsAvif) {
+                writeFourcc("avif");
+            } else {
+                writeFourcc("heic");
+            }
         } else {
             writeFourcc("mp42");
         }
         writeInt32(0);
         if (mHasFileLevelMeta) {
-            writeFourcc("mif1");
-            writeFourcc("heic");
+            if (mIsAvif) {
+                writeFourcc("mif1");
+                writeFourcc("miaf");
+                writeFourcc("avif");
+            } else {
+                writeFourcc("mif1");
+                writeFourcc("heic");
+            }
         }
         if (mHasMoovBox) {
             writeFourcc("isom");
             writeFourcc("mp42");
         }
+        // If an AV1 video track is present, write "av01" as one of the
+        // compatible brands.
+        for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end();
+             ++it) {
+            if ((*it)->isAv1()) {
+                writeFourcc("av01");
+                break;
+            }
+        }
     }
 
     endBox();
@@ -2103,7 +2135,8 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
-        if (!(*it)->isHeic() && (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
+        if (!(*it)->isHeif() &&
+                (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
             return true;
         }
     }
@@ -2205,10 +2238,13 @@
     mMeta->findCString(kKeyMIMEType, &mime);
     mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsAv1 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1);
     mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
     mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+    mIsAvif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF);
+    mIsHeif = mIsHeic || mIsAvif;
     mIsMPEG4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC);
 
@@ -2220,7 +2256,7 @@
         }
     }
 
-    if (!mIsHeic) {
+    if (!mIsHeif) {
         setTimeScale();
     } else {
         CHECK(mMeta->findInt32(kKeyWidth, &mWidth) && (mWidth > 0));
@@ -2301,7 +2337,7 @@
 
 void MPEG4Writer::Track::updateTrackSizeEstimate() {
     mEstimatedTrackSizeBytes = mMdatSizeBytes;  // media data size
-    if (!isHeic() && !mOwner->isFileStreamable()) {
+    if (!isHeif() && !mOwner->isFileStreamable()) {
         mEstimatedTrackSizeBytes += trackMetaDataSize();
     }
 }
@@ -2384,7 +2420,7 @@
 
 bool MPEG4Writer::Track::isExifData(
         MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
-    if (!mIsHeic) {
+    if (!mIsHeif) {
         return false;
     }
 
@@ -2413,12 +2449,12 @@
 }
 
 void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
-    CHECK(!mIsHeic);
+    CHECK(!mIsHeif);
     mCo64TableEntries->add(hton64(offset));
 }
 
 void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
-    CHECK(mIsHeic);
+    CHECK(mIsHeif);
 
     if (offset > UINT32_MAX || size > UINT32_MAX) {
         ALOGE("offset or size is out of range: %lld, %lld",
@@ -2464,8 +2500,10 @@
 
     if (mProperties.empty()) {
         mProperties.push_back(mOwner->addProperty_l({
-            .type = FOURCC('h', 'v', 'c', 'C'),
-            .hvcc = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
+            .type = static_cast<uint32_t>(mIsAvif ?
+                  FOURCC('a', 'v', '1', 'C') :
+                  FOURCC('h', 'v', 'c', 'C')),
+            .data = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
         }));
 
         mProperties.push_back(mOwner->addProperty_l({
@@ -2485,7 +2523,7 @@
     mTileIndex++;
     if (hasGrid) {
         mDimgRefs.value.push_back(mOwner->addItem_l({
-            .itemType = "hvc1",
+            .itemType = mIsAvif ? "av01" : "hvc1",
             .itemId = mItemIdBase++,
             .isPrimary = false,
             .isHidden = true,
@@ -2521,7 +2559,7 @@
         }
     } else {
         mImageItemId = mOwner->addItem_l({
-            .itemType = "hvc1",
+            .itemType = mIsAvif ? "av01" : "hvc1",
             .itemId = mItemIdBase++,
             .isPrimary = (mIsPrimary != 0),
             .isHidden = false,
@@ -2538,7 +2576,7 @@
 // it affects the 'dimg' refs for tiled image, as we only have the refs after the
 // last tile sample is written.
 void MPEG4Writer::Track::flushItemRefs() {
-    CHECK(mIsHeic);
+    CHECK(mIsHeif);
 
     if (mImageItemId > 0) {
         mOwner->addRefs_l(mImageItemId, mDimgRefs);
@@ -2639,6 +2677,9 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
+               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+        mMeta->findData(kKeyAV1C, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         getDolbyVisionProfile();
         if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
@@ -2749,7 +2790,7 @@
         size_t bytesWritten;
         off64_t offset = addSample_l(*it, usePrefix, tiffHdrOffset, &bytesWritten);
 
-        if (chunk->mTrack->isHeic()) {
+        if (chunk->mTrack->isHeif()) {
             chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten, isExif);
         } else if (isFirstSample) {
             chunk->mTrack->addChunkOffset(offset);
@@ -2901,11 +2942,11 @@
     mStartTimeRealUs = startTimeUs;
 
     int32_t rotationDegrees;
-    if ((mIsVideo || mIsHeic) && params &&
+    if ((mIsVideo || mIsHeif) && params &&
             params->findInt32(kKeyRotation, &rotationDegrees)) {
         mRotation = rotationDegrees;
     }
-    if (mIsHeic) {
+    if (mIsHeif) {
         // Reserve the item ids, so that the item ids are ordered in the same
         // order that the image tracks are added.
         // If we leave the item ids to be assigned when the sample is written out,
@@ -3581,7 +3622,7 @@
         }
 
         // Per-frame metadata sample's size must be smaller than max allowed.
-        if (!mIsVideo && !mIsAudio && !mIsHeic &&
+        if (!mIsVideo && !mIsAudio && !mIsHeif &&
                 buffer->range_length() >= kMaxMetadataSize) {
             ALOGW("Buffer size is %zu. Maximum metadata buffer size is %lld for %s track",
                     buffer->range_length(), (long long)kMaxMetadataSize, trackName);
@@ -3705,7 +3746,7 @@
             mGotStartKeyFrame = true;
         }
 ////////////////////////////////////////////////////////////////////////////////
-        if (!mIsHeic) {
+        if (!mIsHeif) {
             if (mStszTableEntries->count() == 0) {
                 mFirstSampleTimeRealUs = systemTime() / 1000;
                 if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
@@ -3925,7 +3966,7 @@
             off64_t offset = mOwner->addSample_l(
                     copy, usePrefix, tiffHdrOffset, &bytesWritten);
 
-            if (mIsHeic) {
+            if (mIsHeif) {
                 addItemOffsetAndSize(offset, bytesWritten, isExif);
             } else {
                 if (mCo64TableEntries->count() == 0) {
@@ -3938,7 +3979,7 @@
         }
 
         mChunkSamples.push_back(copy);
-        if (mIsHeic) {
+        if (mIsHeif) {
             bufferChunk(0 /*timestampUs*/);
             ++nChunks;
         } else if (interleaveDurationUs == 0) {
@@ -3976,7 +4017,7 @@
 
     // Add final entries only for non-empty tracks.
     if (mStszTableEntries->count() > 0) {
-        if (mIsHeic) {
+        if (mIsHeif) {
             if (!mChunkSamples.empty()) {
                 bufferChunk(0);
                 ++nChunks;
@@ -4049,7 +4090,7 @@
         mOwner->mStartMeta->findInt32(kKeyEmptyTrackMalFormed, &emptyTrackMalformed) &&
         emptyTrackMalformed) {
         // MediaRecorder(sets kKeyEmptyTrackMalFormed by default) report empty tracks as malformed.
-        if (!mIsHeic && mStszTableEntries->count() == 0) {  // no samples written
+        if (!mIsHeif && mStszTableEntries->count() == 0) {  // no samples written
             ALOGE("The number of recorded samples is 0");
             mIsMalformed = true;
             return true;
@@ -4212,7 +4253,7 @@
 
 int32_t MPEG4Writer::Track::getMetaSizeIncrease(
         int32_t angle, int32_t trackCount) const {
-    CHECK(mIsHeic);
+    CHECK(mIsHeif);
 
     int32_t grid = (mTileWidth > 0);
     int32_t rotate = (angle > 0);
@@ -4262,8 +4303,10 @@
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
-        !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
+        !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
         if (!mCodecSpecificData ||
             mCodecSpecificDataSize <= 0) {
             ALOGE("Missing codec specific data");
@@ -4282,7 +4325,7 @@
 const char *MPEG4Writer::Track::getTrackType() const {
     return mIsAudio ? "Audio" :
            mIsVideo ? "Video" :
-           mIsHeic  ? "Image" :
+           mIsHeif  ? "Image" :
                       "Metadata";
 }
 
@@ -4433,6 +4476,8 @@
         writeAvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
         writeHvccBox();
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
+        writeAv1cBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
         if (mDoviProfile <= DolbyVisionProfileDvheSt) {
             writeHvccBox();
@@ -5000,6 +5045,15 @@
     mOwner->endBox();  // hvcC
 }
 
+void MPEG4Writer::Track::writeAv1cBox() {
+    CHECK(mCodecSpecificData);
+    CHECK_GE(mCodecSpecificDataSize, 4u);
+
+    mOwner->beginBox("av1C");
+    mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+    mOwner->endBox();  // av1C
+}
+
 void MPEG4Writer::Track::writeDoviConfigBox() {
     CHECK_NE(mDoviProfile, 0u);
 
@@ -5384,7 +5438,7 @@
             case FOURCC('h', 'v', 'c', 'C'):
             {
                 beginBox("hvcC");
-                sp<ABuffer> hvcc = mProperties[propIndex].hvcc;
+                sp<ABuffer> hvcc = mProperties[propIndex].data;
                 // Patch avcc's lengthSize field to match the number
                 // of bytes we use to indicate the size of a nal unit.
                 uint8_t *ptr = (uint8_t *)hvcc->data();
@@ -5393,6 +5447,14 @@
                 endBox();
                 break;
             }
+            case FOURCC('a', 'v', '1', 'C'):
+            {
+                beginBox("av1C");
+                sp<ABuffer> av1c = mProperties[propIndex].data;
+                write(av1c->data(), av1c->size());
+                endBox();
+                break;
+            }
             case FOURCC('i', 's', 'p', 'e'):
             {
                 beginBox("ispe");
@@ -5496,7 +5558,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        if ((*it)->isHeic()) {
+        if ((*it)->isHeif()) {
             (*it)->flushItemRefs();
         }
     }
diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp
index 24608a7..ed0819d 100644
--- a/media/libstagefright/MediaClock.cpp
+++ b/media/libstagefright/MediaClock.cpp
@@ -110,8 +110,12 @@
     if (mAnchorTimeRealUs != -1) {
         int64_t oldNowMediaUs =
             mAnchorTimeMediaUs + (nowUs - mAnchorTimeRealUs) * (double)mPlaybackRate;
-        if (nowMediaUs < oldNowMediaUs + kAnchorFluctuationAllowedUs
-                && nowMediaUs > oldNowMediaUs - kAnchorFluctuationAllowedUs) {
+        // earlier, we ensured that the anchor times are non-negative and the
+        // math to calculate the now/oldNow times stays non-negative.
+        // by casting into uint64_t, we gain headroom to avoid any overflows at the upper end
+        // when adding the fluctuation allowance.
+        if ((uint64_t)nowMediaUs < (uint64_t)oldNowMediaUs + kAnchorFluctuationAllowedUs
+                && (uint64_t)nowMediaUs + kAnchorFluctuationAllowedUs > (uint64_t)oldNowMediaUs) {
             return;
         }
     }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f67f717..a1ada4f 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1164,10 +1164,10 @@
 
     // update does its own mutex locking
     updateMediametrics();
+    mHdrInfoFlags = 0;
 
     // ensure mutex while we do our own work
     Mutex::Autolock _lock(mMetricsLock);
-    mHdrInfoFlags = 0;
     if (mMetricsHandle != 0) {
         if (mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
@@ -1512,6 +1512,21 @@
     }
 }
 
+bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
+    Mutex::Autolock al(mBufferLock);
+    BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
+    sp<MediaCodecBuffer> buffer = info->mData;
+    int32_t flags;
+    CHECK(buffer->meta()->findInt32("flags", &flags));
+    if (flags & BUFFER_FLAG_DECODE_ONLY) {
+        info->mOwnedByClient = false;
+        info->mData.clear();
+        mBufferChannel->discardBuffer(buffer);
+        return true;
+    }
+    return false;
+}
+
 // static
 status_t MediaCodec::PostAndAwaitResponse(
         const sp<AMessage> &msg, sp<AMessage> *response) {
@@ -3201,7 +3216,8 @@
     return true;
 }
 
-bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
+MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
+        const sp<AReplyToken> &replyID, bool newRequest) {
     if (!isExecuting() || (mFlags & kFlagIsAsync)
             || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
         PostReplyWithError(replyID, INVALID_OPERATION);
@@ -3214,7 +3230,7 @@
         sp<AMessage> response = new AMessage;
         BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
         if (!info) {
-            return false;
+            return DequeueOutputResult::kNoBuffer;
         }
 
         // In synchronous mode, output format change should be handled
@@ -3225,10 +3241,13 @@
         if (mFlags & kFlagOutputFormatChanged) {
             PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
             mFlags &= ~kFlagOutputFormatChanged;
-            return true;
+            return DequeueOutputResult::kRepliedWithError;
         }
 
         ssize_t index = dequeuePortBuffer(kPortIndexOutput);
+        if (discardDecodeOnlyOutputBuffer(index)) {
+            return DequeueOutputResult::kDiscardedBuffer;
+        }
 
         response->setSize("index", index);
         response->setSize("offset", buffer->offset());
@@ -3247,9 +3266,10 @@
         statsBufferReceived(timeUs, buffer);
 
         response->postReply(replyID);
+        return DequeueOutputResult::kSuccess;
     }
 
-    return true;
+    return DequeueOutputResult::kRepliedWithError;
 }
 
 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
@@ -3844,11 +3864,26 @@
                         handleOutputFormatChangeIfNeeded(buffer);
                         onOutputBufferAvailable();
                     } else if (mFlags & kFlagDequeueOutputPending) {
-                        CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
-
-                        ++mDequeueOutputTimeoutGeneration;
-                        mFlags &= ~kFlagDequeueOutputPending;
-                        mDequeueOutputReplyID = 0;
+                        DequeueOutputResult dequeueResult =
+                            handleDequeueOutputBuffer(mDequeueOutputReplyID);
+                        switch (dequeueResult) {
+                            case DequeueOutputResult::kNoBuffer:
+                                TRESPASS();
+                                break;
+                            case DequeueOutputResult::kDiscardedBuffer:
+                                break;
+                            case DequeueOutputResult::kRepliedWithError:
+                                [[fallthrough]];
+                            case DequeueOutputResult::kSuccess:
+                            {
+                                ++mDequeueOutputTimeoutGeneration;
+                                mFlags &= ~kFlagDequeueOutputPending;
+                                mDequeueOutputReplyID = 0;
+                                break;
+                            }
+                            default:
+                                TRESPASS();
+                        }
                     } else {
                         postActivityNotificationIfPossible();
                     }
@@ -4547,27 +4582,39 @@
                 break;
             }
 
-            if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
-                break;
-            }
+            DequeueOutputResult dequeueResult =
+                handleDequeueOutputBuffer(replyID, true /* new request */);
+            switch (dequeueResult) {
+                case DequeueOutputResult::kNoBuffer:
+                    [[fallthrough]];
+                case DequeueOutputResult::kDiscardedBuffer:
+                {
+                    int64_t timeoutUs;
+                    CHECK(msg->findInt64("timeoutUs", &timeoutUs));
 
-            int64_t timeoutUs;
-            CHECK(msg->findInt64("timeoutUs", &timeoutUs));
+                    if (timeoutUs == 0LL) {
+                        PostReplyWithError(replyID, -EAGAIN);
+                        break;
+                    }
 
-            if (timeoutUs == 0LL) {
-                PostReplyWithError(replyID, -EAGAIN);
-                break;
-            }
+                    mFlags |= kFlagDequeueOutputPending;
+                    mDequeueOutputReplyID = replyID;
 
-            mFlags |= kFlagDequeueOutputPending;
-            mDequeueOutputReplyID = replyID;
-
-            if (timeoutUs > 0LL) {
-                sp<AMessage> timeoutMsg =
-                    new AMessage(kWhatDequeueOutputTimedOut, this);
-                timeoutMsg->setInt32(
-                        "generation", ++mDequeueOutputTimeoutGeneration);
-                timeoutMsg->post(timeoutUs);
+                    if (timeoutUs > 0LL) {
+                        sp<AMessage> timeoutMsg =
+                            new AMessage(kWhatDequeueOutputTimedOut, this);
+                        timeoutMsg->setInt32(
+                                "generation", ++mDequeueOutputTimeoutGeneration);
+                        timeoutMsg->post(timeoutUs);
+                    }
+                    break;
+                }
+                case DequeueOutputResult::kRepliedWithError:
+                    [[fallthrough]];
+                case DequeueOutputResult::kSuccess:
+                    break;
+                default:
+                    TRESPASS();
             }
             break;
         }
@@ -5229,6 +5276,7 @@
 
     buffer->setRange(offset, size);
     buffer->meta()->setInt64("timeUs", timeUs);
+
     if (flags & BUFFER_FLAG_EOS) {
         buffer->meta()->setInt32("eos", true);
     }
@@ -5237,7 +5285,12 @@
         buffer->meta()->setInt32("csd", true);
     }
 
-    if (mTunneled) {
+    bool isBufferDecodeOnly = ((flags & BUFFER_FLAG_DECODE_ONLY) != 0);
+    if (isBufferDecodeOnly) {
+        buffer->meta()->setInt32("decode-only", true);
+    }
+
+    if (mTunneled && !isBufferDecodeOnly) {
         TunnelPeekState previousState = mTunnelPeekState;
         switch(mTunnelPeekState){
             case TunnelPeekState::kEnabledNoBuffer:
@@ -5550,6 +5603,9 @@
 void MediaCodec::onOutputBufferAvailable() {
     int32_t index;
     while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
+        if (discardDecodeOnlyOutputBuffer(index)) {
+            continue;
+        }
         const sp<MediaCodecBuffer> &buffer =
             mPortBuffers[kPortIndexOutput][index].mData;
         sp<AMessage> msg = mCallback->dup();
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index a3040f4..78b7288 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -354,8 +354,19 @@
 
 //static
 void MediaCodecList::findMatchingCodecs(
-        const char *mime, bool encoder, uint32_t flags, sp<AMessage> format,
+        const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
         Vector<AString> *matches) {
+    findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ true);
+    if (matches->empty()) {
+        ALOGV("no matching codec found, retrying without profile check");
+        findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ false);
+    }
+}
+
+//static
+void MediaCodecList::findMatchingCodecs(
+        const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
+        Vector<AString> *matches, bool checkProfile) {
     matches->clear();
 
     const sp<IMediaCodecList> list = getInstance();
@@ -379,7 +390,7 @@
 
         AString componentName = info->getCodecName();
 
-        if (!codecHandlesFormat(mime, info, format)) {
+        if (!codecHandlesFormat(mime, info, format, checkProfile)) {
             ALOGV("skipping codec '%s' which doesn't satisfy format %s",
                   componentName.c_str(), format->debugString(2).c_str());
             continue;
@@ -400,9 +411,10 @@
     }
 }
 
-/*static*/
-bool MediaCodecList::codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info,
-                                        sp<AMessage> format) {
+// static
+bool MediaCodecList::codecHandlesFormat(
+        const char *mime, const sp<MediaCodecInfo> &info, const sp<AMessage> &format,
+        bool checkProfile) {
 
     if (format == nullptr) {
         ALOGD("codecHandlesFormat: no format, so no extra checks");
@@ -510,9 +522,7 @@
         }
 
         int32_t profile = -1;
-        if (format->findInt32("profile", &profile)) {
-            int32_t level = -1;
-            format->findInt32("level", &level);
+        if (checkProfile && format->findInt32("profile", &profile)) {
             Vector<MediaCodecInfo::ProfileLevel> profileLevels;
             capabilities->getSupportedProfileLevels(&profileLevels);
             auto it = profileLevels.begin();
@@ -520,14 +530,11 @@
                 if (profile != it->mProfile) {
                     continue;
                 }
-                if (level > -1 && level > it->mLevel) {
-                    continue;
-                }
                 break;
             }
 
             if (it == profileLevels.end()) {
-                ALOGV("Codec does not support profile %d with level %d", profile, level);
+                ALOGV("Codec does not support profile %d", profile);
                 return false;
             }
         }
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 9f590e5..9768f97 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -162,7 +162,7 @@
         return INVALID_OPERATION;
     }
     if (!isMp4Format(mFormat)) {
-        ALOGE("setLocation() is only supported for .mp4, .3gp or .heic output.");
+        ALOGE("setLocation() is only supported for .mp4, .3gp, .heic or .avif output.");
         return INVALID_OPERATION;
     }
 
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 3509ef8..03d8b78 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -60,7 +60,7 @@
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8">
             <Alias name="OMX.google.vp8.decoder" />
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-1000000" />
@@ -70,7 +70,7 @@
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9">
             <Alias name="OMX.google.vp9.decoder" />
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-500000" />
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index 829f403..2c258e4 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -118,5 +118,13 @@
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
+        <MediaCodec name="c2.android.av1.encoder" type="video/av01">
+            <Limit name="size" min="2x2" max="2048x2048" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-3600" />
+            <Limit name="bitrate" range="1-40000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+        </MediaCodec>
     </Encoders>
 </Included>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index b29c3b6..05f2760 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -165,7 +165,7 @@
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp8.decoder" />
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
                 <Limit name="block-count" range="1-16384" />
@@ -182,7 +182,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp9.decoder" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
                 <Limit name="size" min="2x2" max="2048x2048" />
@@ -351,5 +351,14 @@
             <Feature name="bitrate-modes" value="VBR,CBR" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.av1.encoder" type="video/av01" variant="!slow-cpu">
+            <Limit name="size" min="2x2" max="2048x2048" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-8200" />
+            <Limit name="bitrate" range="1-40000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
     </Encoders>
 </MediaCodecs>
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 38a4c1e..08c7917 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -17,6 +17,7 @@
 #ifndef A_CODEC_H_
 #define A_CODEC_H_
 
+#include <set>
 #include <stdint.h>
 #include <list>
 #include <vector>
@@ -270,6 +271,7 @@
     std::vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
+    std::set<int64_t> mDecodeOnlyTimesUs;
 
     std::list<sp<AMessage>> mDeferredQueue;
 
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 7c3eca6..cf76606 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -196,7 +196,9 @@
     typedef key_value_pair_t< const char *, Vector<uint16_t> > ItemRefs;
     typedef struct _ItemInfo {
         bool isGrid() const { return !strcmp("grid", itemType); }
-        bool isImage() const { return !strcmp("hvc1", itemType) || isGrid(); }
+        bool isImage() const {
+            return !strcmp("hvc1", itemType) || !strcmp("av01", itemType) || isGrid();
+        }
         const char *itemType;
         uint16_t itemId;
         bool isPrimary;
@@ -224,10 +226,11 @@
         int32_t width;
         int32_t height;
         int32_t rotation;
-        sp<ABuffer> hvcc;
+        sp<ABuffer> data;
     } ItemProperty;
 
     bool mHasFileLevelMeta;
+    bool mIsAvif; // used to differentiate HEIC and AVIF under the same OUTPUT_FORMAT_HEIF
     uint64_t mFileLevelMetaDataSize;
     bool mHasMoovBox;
     uint32_t mPrimaryItemId;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index edb3786..dbc97db 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -90,6 +90,7 @@
         BUFFER_FLAG_EOS           = 4,
         BUFFER_FLAG_PARTIAL_FRAME = 8,
         BUFFER_FLAG_MUXER_DATA    = 16,
+        BUFFER_FLAG_DECODE_ONLY   = 32,
     };
 
     enum CVODegree {
@@ -409,6 +410,13 @@
         kBufferRendered,
     };
 
+    enum class DequeueOutputResult {
+        kNoBuffer,
+        kDiscardedBuffer,
+        kRepliedWithError,
+        kSuccess,
+    };
+
     struct ResourceManagerServiceProxy;
 
     State mState;
@@ -555,7 +563,9 @@
             sp<MediaCodecBuffer> *buffer, sp<AMessage> *format);
 
     bool handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
-    bool handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
+    DequeueOutputResult handleDequeueOutputBuffer(
+            const sp<AReplyToken> &replyID,
+            bool newRequest = false);
     void cancelPendingDequeueOperations();
 
     void extractCSD(const sp<AMessage> &format);
@@ -639,6 +649,7 @@
 
     void statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
     void statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
+    bool discardDecodeOnlyOutputBuffer(size_t index);
 
     enum {
         // the default shape of our latency histogram buckets
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 78792c5..4e9623b 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -869,6 +869,7 @@
 constexpr int32_t BUFFER_FLAG_END_OF_STREAM = 4;
 constexpr int32_t BUFFER_FLAG_KEY_FRAME = 1;
 constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
+constexpr int32_t BUFFER_FLAG_DECODE_ONLY = 32;
 constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
 constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
 constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index 3cf455c..56c6a45 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -80,11 +80,9 @@
             const char *mime,
             bool createEncoder,
             uint32_t flags,
-            sp<AMessage> format,
+            const sp<AMessage> &format,
             Vector<AString> *matchingCodecs);
 
-    static bool codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info, sp<AMessage> format);
-
     static bool isSoftwareCodec(const AString &componentName);
 
 private:
@@ -115,6 +113,20 @@
 
     MediaCodecList(const MediaCodecList&) = delete;
     MediaCodecList& operator=(const MediaCodecList&) = delete;
+
+    static void findMatchingCodecs(
+            const char *mime,
+            bool createEncoder,
+            uint32_t flags,
+            const sp<AMessage> &format,
+            Vector<AString> *matchingCodecs,
+            bool checkProfile);
+
+    static bool codecHandlesFormat(
+            const char *mime,
+            const sp<MediaCodecInfo> &info,
+            const sp<AMessage> &format,
+            bool checkProfile);
 };
 
 }  // namespace android
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bebd516..c82a303 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -316,7 +316,7 @@
     // that a new message is available on the queue. Otherwise, the message stays on the queue, but
     // the listener is not notified of it. It will process this message when a subsequent message
     // is posted with |realTime| set to true.
-    void post(const omx_message &msg, bool realTime = true);
+    void post(const omx_message &msg, bool realTime);
 
     bool loop();
 
@@ -325,18 +325,15 @@
 
 private:
     enum {
-        // This is used for frame_rendered message batching, which will eventually end up in a
-        // single AMessage in MediaCodec when it is signaled to the app. AMessage can contain
-        // up-to 64 key-value pairs, and each frame_rendered message uses 2 keys, so the max
-        // value for this would be 32. Nonetheless, limit this to 12 to which gives at least 10
-        // mseconds of batching at 120Hz.
-        kMaxQueueSize = 12,
+        // Don't delay non-realtime messages longer than 200ms
+        kMaxBatchedDelayNs = 200 * 1000 * 1000,
     };
 
     Mutex mLock;
 
     sp<OMXNodeInstance> const mOwner;
     bool mDone;
+    bool mHasBatchedMessages;
     Condition mQueueChanged;
     std::list<omx_message> mQueue;
 
@@ -350,7 +347,8 @@
 
 OMXNodeInstance::CallbackDispatcher::CallbackDispatcher(const sp<OMXNodeInstance> &owner)
     : mOwner(owner),
-      mDone(false) {
+      mDone(false),
+      mHasBatchedMessages(false) {
     mThread = new CallbackDispatcherThread(this);
     mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
 }
@@ -358,7 +356,6 @@
 OMXNodeInstance::CallbackDispatcher::~CallbackDispatcher() {
     {
         Mutex::Autolock autoLock(mLock);
-
         mDone = true;
         mQueueChanged.signal();
     }
@@ -377,8 +374,11 @@
     Mutex::Autolock autoLock(mLock);
 
     mQueue.push_back(msg);
-    if (realTime || mQueue.size() >= kMaxQueueSize) {
+    if (realTime) {
         mQueueChanged.signal();
+    } else if (!mHasBatchedMessages) {
+        mHasBatchedMessages = true;
+        mQueueChanged.signal(); // The first non-realtime message is not batched.
     }
 }
 
@@ -393,11 +393,16 @@
 bool OMXNodeInstance::CallbackDispatcher::loop() {
     for (;;) {
         std::list<omx_message> messages;
+        std::list<long long> messageTimestamps;
 
         {
             Mutex::Autolock autoLock(mLock);
             while (!mDone && mQueue.empty()) {
-                mQueueChanged.wait(mLock);
+                if (mHasBatchedMessages) {
+                    mQueueChanged.waitRelative(mLock, kMaxBatchedDelayNs);
+                } else {
+                    mQueueChanged.wait(mLock);
+                }
             }
 
             if (mDone) {
@@ -2447,7 +2452,7 @@
     msg.type = omx_message::EMPTY_BUFFER_DONE;
     msg.fenceFd = fenceFd;
     msg.u.buffer_data.buffer = instance->findBufferID(pBuffer);
-    instance->mDispatcher->post(msg);
+    instance->mDispatcher->post(msg, true /* realTime */);
 
     return OMX_ErrorNone;
 }
@@ -2475,7 +2480,7 @@
     msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
     msg.u.extended_buffer_data.flags = pBuffer->nFlags;
     msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
-    instance->mDispatcher->post(msg);
+    instance->mDispatcher->post(msg, true /* realTime */);
 
     return OMX_ErrorNone;
 }
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
index 4827d9e..0906433 100644
--- a/media/libstagefright/omx/OMXStore.cpp
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -140,7 +140,8 @@
 
         Vector<String8> roles;
         OMX_ERRORTYPE err = plugin->getRolesOfComponent(name, &roles);
-        if (err == OMX_ErrorNone) {
+        static_assert(std::string_view("OMX.google.").size() == 11);
+        if (err == OMX_ErrorNone && strncmp(name, "OMX.google.", 11) == 0) {
             bool skip = false;
             for (String8 role : roles) {
                 if (role.find("video_decoder") != -1 || role.find("video_encoder") != -1) {
diff --git a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
index 2697ff4..3a67cc2 100644
--- a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
+++ b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
@@ -60,7 +60,7 @@
     }
 
     static sp<GraphicBuffer> allocateDefaultBuffer() {
-        return new GraphicBuffer(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
+        return sp<GraphicBuffer>::make(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
                                  HAL_PIXEL_FORMAT_RGBA_8888, 1,
                                  GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
                                          GRALLOC_USAGE_HW_RENDER,
@@ -69,7 +69,7 @@
 
     // Allocates a 1x1 buffer to fill with a solid color
     static sp<GraphicBuffer> allocateSourceBuffer(uint32_t width, uint32_t height) {
-        return new GraphicBuffer(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
+        return sp<GraphicBuffer>::make(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
                                  GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
                                          GRALLOC_USAGE_HW_TEXTURE,
                                  "input");
@@ -275,7 +275,7 @@
         renderengine::DisplaySettings settings;
         std::vector<const renderengine::LayerSettings*> layers;
         // Meaningless buffer since we don't do any drawing
-        sp<GraphicBuffer> buffer = new GraphicBuffer();
+        sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
         invokeDraw(settings, layers, buffer);
     }
 
diff --git a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
index 97c7442..b82586b 100644
--- a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
+++ b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
@@ -70,7 +70,7 @@
 }
 
 TEST_F(RenderEngineThreadedTest, bindExternalBuffer_withBuffer) {
-    sp<GraphicBuffer> buf = new GraphicBuffer();
+    sp<GraphicBuffer> buf = sp<GraphicBuffer>::make();
     EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, buf, Eq(nullptr)))
             .WillOnce(Return(NO_ERROR));
     status_t result = mThreadedRE->bindExternalTextureBuffer(0, buf, nullptr);
@@ -83,7 +83,7 @@
 }
 
 TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_withBuffer) {
-    sp<GraphicBuffer> buf = new GraphicBuffer();
+    sp<GraphicBuffer> buf = sp<GraphicBuffer>::make();
     EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(buf));
     mThreadedRE->cacheExternalTextureBuffer(buf);
 }
@@ -198,7 +198,7 @@
 TEST_F(RenderEngineThreadedTest, drawLayers) {
     renderengine::DisplaySettings settings;
     std::vector<const renderengine::LayerSettings*> layers;
-    sp<GraphicBuffer> buffer = new GraphicBuffer();
+    sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
     base::unique_fd bufferFence;
     base::unique_fd drawFence;
 
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index df658ee..c4dadd0 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -11,6 +11,7 @@
     double_loadable: true,
 
     srcs: [
+        ":libgui_frame_event_aidl",
         "FrameDropper.cpp",
         "GraphicBufferSource.cpp",
     ],
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index 5637b37..f3a1723 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -61,6 +61,7 @@
         "libcodec2_soft_vp9dec",
         // "libcodec2_soft_av1dec_aom",  // replaced by the gav1 implementation
         "libcodec2_soft_av1dec_gav1",
+        "libcodec2_soft_av1enc",
         "libcodec2_soft_vp8enc",
         "libcodec2_soft_vp9enc",
         "libcodec2_soft_rawdec",
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index ca17117..dc8384d 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -110,6 +110,11 @@
                 "-DNO_IMEMORY",
             ],
         },
+        host: {
+            sanitize: {
+                cfi: false,
+            },
+        },
         apex: {
             exclude_shared_libs: [
                 "libbinder",
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index c46a692..c2093ac 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -24,6 +24,7 @@
 
 #include <android_media_Utils.h>
 #include <private/android/AHardwareBufferHelpers.h>
+#include <ui/PublicFormat.h>
 #include <utils/Log.h>
 
 using namespace android;
@@ -34,6 +35,8 @@
         int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes) :
         mReader(reader), mFormat(format), mUsage(usage), mBuffer(buffer), mLockedBuffer(nullptr),
         mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
+    PublicFormat publicFormat = static_cast<PublicFormat>(format);
+    mHalDataSpace = mapPublicFormatToHalDataspace(publicFormat);
     LOG_FATAL_IF(reader == nullptr, "AImageReader shouldn't be null while creating AImage");
 }
 
@@ -156,6 +159,20 @@
     return AMEDIA_OK;
 }
 
+media_status_t
+AImage::getDataSpace(android_dataspace* dataSpace) const {
+    if (dataSpace == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    *dataSpace = static_cast<android_dataspace>(-1);
+    if (isClosed()) {
+        ALOGE("%s: image %p has been closed!", __FUNCTION__, this);
+        return AMEDIA_ERROR_INVALID_OBJECT;
+    }
+    *dataSpace = mHalDataSpace;
+    return AMEDIA_OK;
+}
+
 media_status_t AImage::lockImage() {
     if (mBuffer == nullptr || mBuffer->mGraphicBuffer == nullptr) {
         LOG_ALWAYS_FATAL("%s: AImage %p has no buffer.", __FUNCTION__, this);
@@ -817,3 +834,15 @@
     }
     return image->getHardwareBuffer(buffer);
 }
+
+EXPORT
+media_status_t AImage_getDataSpace(
+    const AImage* image, /*out*/int32_t* dataSpace) {
+    ALOGV("%s", __FUNCTION__);
+
+    if (image == nullptr || dataSpace == nullptr) {
+        ALOGE("%s: bad argument. image %p dataSpace %p", __FUNCTION__, image, dataSpace);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    return image->getDataSpace((android_dataspace*)(dataSpace));
+}
\ No newline at end of file
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 05115b9..dc10a6a 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -82,6 +82,7 @@
     media_status_t getPlaneRowStride(int planeIdx, /*out*/int32_t* rowStride) const;
     media_status_t getPlaneData(int planeIdx,/*out*/uint8_t** data, /*out*/int* dataLength) const;
     media_status_t getHardwareBuffer(/*out*/AHardwareBuffer** buffer) const;
+    media_status_t getDataSpace(/*out*/android_dataspace* dataSpace) const;
 
   private:
     // AImage should be deleted through free() API.
@@ -101,6 +102,7 @@
     const int32_t              mWidth;
     const int32_t              mHeight;
     const int32_t              mNumPlanes;
+    android_dataspace          mHalDataSpace = HAL_DATASPACE_UNKNOWN;
     bool                       mIsClosed = false;
     mutable Mutex              mLock;
 };
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 9270499..067c8f4 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -46,6 +46,9 @@
 
 static constexpr int kWindowHalTokenSizeMax = 256;
 
+static media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
+                                         uint64_t usage, int32_t maxImages,
+                                         /*out*/ AImageReader**& reader);
 static native_handle_t *convertHalTokenToNativeHandle(const HalToken &halToken);
 
 bool
@@ -265,13 +268,17 @@
                            int32_t height,
                            int32_t format,
                            uint64_t usage,
-                           int32_t maxImages)
+                           int32_t maxImages,
+                           uint32_t hardwareBufferFormat,
+                           android_dataspace dataSpace)
     : mWidth(width),
       mHeight(height),
       mFormat(format),
       mUsage(usage),
       mMaxImages(maxImages),
       mNumPlanes(getNumPlanesForFormat(format)),
+      mHalFormat(hardwareBufferFormat),
+      mHalDataSpace(dataSpace),
       mFrameListener(new FrameListener(this)),
       mBufferRemovedListener(new BufferRemovedListener(this)) {}
 
@@ -282,9 +289,6 @@
 
 media_status_t
 AImageReader::init() {
-    PublicFormat publicFormat = static_cast<PublicFormat>(mFormat);
-    mHalFormat = mapPublicFormatToHalFormat(publicFormat);
-    mHalDataSpace = mapPublicFormatToHalDataspace(publicFormat);
     mHalUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
 
     sp<IGraphicBufferProducer> gbProducer;
@@ -648,6 +652,41 @@
     }
 }
 
+static
+media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
+                                  uint64_t usage, int32_t maxImages,
+                                  /*out*/ AImageReader**& reader) {
+    if (reader == nullptr) {
+        ALOGE("%s: reader argument is null", __FUNCTION__);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (width < 1 || height < 1) {
+        ALOGE("%s: image dimension must be positive: w:%d h:%d",
+                __FUNCTION__, width, height);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (maxImages < 1) {
+        ALOGE("%s: max outstanding image count must be at least 1 (%d)",
+                __FUNCTION__, maxImages);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (maxImages > BufferQueueDefs::NUM_BUFFER_SLOTS) {
+        ALOGE("%s: max outstanding image count (%d) cannot be larget than %d.",
+              __FUNCTION__, maxImages, BufferQueueDefs::NUM_BUFFER_SLOTS);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
+        ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
+                __FUNCTION__, format, usage);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    return AMEDIA_OK;
+}
+
 static native_handle_t *convertHalTokenToNativeHandle(
         const HalToken &halToken) {
     // We attempt to store halToken in the ints of the native_handle_t after its
@@ -698,42 +737,32 @@
 } //extern "C"
 
 EXPORT
+media_status_t AImageReader_newWithDataSpace(
+        int32_t width, int32_t height, uint64_t usage, int32_t maxImages,
+        uint32_t hardwareBufferFormat, int32_t dataSpace,
+        /*out*/ AImageReader** reader) {
+    ALOGV("%s", __FUNCTION__);
+
+    android_dataspace halDataSpace = static_cast<android_dataspace>(dataSpace);
+    int32_t format = static_cast<int32_t>(
+          mapHalFormatDataspaceToPublicFormat(hardwareBufferFormat, halDataSpace));
+    return AImageReader_newWithUsage(width, height, format, usage, maxImages, reader);
+}
+
+EXPORT
 media_status_t AImageReader_newWithUsage(
         int32_t width, int32_t height, int32_t format, uint64_t usage,
         int32_t maxImages, /*out*/ AImageReader** reader) {
     ALOGV("%s", __FUNCTION__);
 
-    if (width < 1 || height < 1) {
-        ALOGE("%s: image dimension must be positive: w:%d h:%d",
-                __FUNCTION__, width, height);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
+    validateParameters(width, height, format, usage, maxImages, reader);
 
-    if (maxImages < 1) {
-        ALOGE("%s: max outstanding image count must be at least 1 (%d)",
-                __FUNCTION__, maxImages);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (maxImages > BufferQueueDefs::NUM_BUFFER_SLOTS) {
-        ALOGE("%s: max outstanding image count (%d) cannot be larget than %d.",
-              __FUNCTION__, maxImages, BufferQueueDefs::NUM_BUFFER_SLOTS);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
-        ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
-                __FUNCTION__, format, usage);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (reader == nullptr) {
-        ALOGE("%s: reader argument is null", __FUNCTION__);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
+    PublicFormat publicFormat = static_cast<PublicFormat>(format);
+    uint32_t halFormat = mapPublicFormatToHalFormat(publicFormat);
+    android_dataspace halDataSpace = mapPublicFormatToHalDataspace(publicFormat);
 
     AImageReader* tmpReader = new AImageReader(
-        width, height, format, usage, maxImages);
+        width, height, format, usage, maxImages, halFormat, halDataSpace);
     if (tmpReader == nullptr) {
         ALOGE("%s: AImageReader allocation failed", __FUNCTION__);
         return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 37c606e..0199616 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -56,10 +56,12 @@
                  int32_t height,
                  int32_t format,
                  uint64_t usage,
-                 int32_t maxImages);
+                 int32_t maxImages,
+                 uint32_t hardwareBufferFormat,
+                 android_dataspace dataSpace);
     ~AImageReader();
 
-    // Inintialize AImageReader, uninitialized or failed to initialize AImageReader
+    // Initialize AImageReader, uninitialized or failed to initialize AImageReader
     // should never be passed to application
     media_status_t init();
 
@@ -79,7 +81,6 @@
     void           close();
 
   private:
-
     friend struct AImage; // for grabing reader lock
 
     BufferItem* getBufferItemLocked();
@@ -118,13 +119,16 @@
 
     const int32_t mWidth;
     const int32_t mHeight;
-    const int32_t mFormat;
+    int32_t mFormat;
     const uint64_t mUsage;  // AHARDWAREBUFFER_USAGE_* flags.
     const int32_t mMaxImages;
 
     // TODO(jwcai) Seems completely unused in AImageReader class.
     const int32_t mNumPlanes;
 
+    uint32_t mHalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
+    android_dataspace mHalDataSpace = HAL_DATASPACE_UNKNOWN;
+
     struct FrameListener : public ConsumerBase::FrameAvailableListener {
       public:
         explicit FrameListener(AImageReader* parent) : mReader(parent) {}
@@ -155,8 +159,6 @@
     };
     sp<BufferRemovedListener> mBufferRemovedListener;
 
-    int mHalFormat;
-    android_dataspace mHalDataSpace;
     uint64_t mHalUsage;
 
     sp<IGraphicBufferProducer> mProducer;
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index ed31c02..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -64,6 +64,10 @@
 
         if (untranslated.find(err) == untranslated.end()) {
             ALOGE("untranslated sf error code: %d", err);
+            char err_as_string[32];
+            snprintf(err_as_string, sizeof(err_as_string), "%d", err);
+            android_errorWriteWithInfoLog(0x534e4554, "224869524", -1,
+                                          err_as_string, strlen(err_as_string));
             untranslated.insert(err);
         }
     }
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 6d3c348..386e42c 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -125,6 +125,7 @@
 AMediaFormat* AMediaExtractor_getFileFormat(AMediaExtractor *mData) {
     sp<AMessage> format;
     mData->mImpl->getFileFormat(&format);
+    // ignore any error, we want to return the empty format
     return AMediaFormat_fromMsg(&format);
 }
 
@@ -247,7 +248,10 @@
     }
 
     sp<AMessage> format;
-    ex->mImpl->getFileFormat(&format);
+    if (ex->mImpl->getFileFormat(&format) != OK) {
+        android_errorWriteWithInfoLog(0x534e4554, "243222985", -1, nullptr, 0);
+        return NULL;
+    }
     sp<ABuffer> buffer;
     if(!format->findBuffer("pssh", &buffer)) {
         return NULL;
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index a95e874..c0de4e4 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -332,6 +332,9 @@
     if (name == nullptr) {
         return;
     }
+    if (value == nullptr) {
+        return;
+    }
     // AMessage::setString() makes a copy of the string
     format->mFormat->setString(name, value, strlen(value));
 }
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
new file mode 100644
index 0000000..a3d6a96
--- /dev/null
+++ b/media/ndk/fuzzer/Android.bp
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_ndk_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_ndk_license"],
+}
+
+cc_defaults {
+     name: "libmediandk_fuzzer_defaults",
+     shared_libs: [
+        "libandroid_runtime_lazy",
+        "libbase",
+        "libdatasource",
+        "libmedia",
+        "libmediadrm",
+        "libmedia_omx",
+        "libmedia_jni_utils",
+        "libstagefright",
+        "libstagefright_foundation",
+        "liblog",
+        "libutils",
+        "libcutils",
+        "libnativewindow",
+        "libhidlbase",
+        "libgui",
+        "libui",
+        "libmediandk",
+     ],
+     static_libs: [
+        "libmediandk_utils",
+        "libnativehelper_lazy",
+     ],
+     header_libs: [
+         "media_ndk_headers",
+     ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "ndk_crypto_fuzzer",
+    srcs: ["ndk_crypto_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+}
+
+cc_fuzz {
+     name: "ndk_image_reader_fuzzer",
+     srcs: [
+        "ndk_image_reader_fuzzer.cpp",
+     ],
+     shared_libs: [
+        "android.hidl.token@1.0-utils",
+        "android.hardware.graphics.bufferqueue@1.0",
+     ],
+     cflags: [
+        "-D__ANDROID_VNDK__",
+     ],
+     defaults: ["libmediandk_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "ndk_extractor_fuzzer",
+    srcs: ["ndk_extractor_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+    shared_libs: ["libbinder_ndk",],
+    corpus: ["corpus/*"],
+}
+
+cc_fuzz {
+    name: "ndk_mediaformat_fuzzer",
+    srcs: ["ndk_mediaformat_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
+
+cc_fuzz {
+    name: "ndk_drm_fuzzer",
+    srcs: ["ndk_drm_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
+
+cc_fuzz {
+    name: "ndk_mediamuxer_fuzzer",
+    srcs: ["ndk_mediamuxer_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+    shared_libs: ["libbinder_ndk",],
+}
+
+cc_fuzz {
+    name: "ndk_sync_codec_fuzzer",
+    srcs: [
+            "ndk_sync_codec_fuzzer.cpp",
+             "NdkMediaCodecFuzzerBase.cpp",
+          ],
+    header_libs: ["libnativewindow_headers",],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
diff --git a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
new file mode 100644
index 0000000..fa81cd8
--- /dev/null
+++ b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
@@ -0,0 +1,414 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+
+static const std::string kMimeTypes[] = {
+        MIMETYPE_AUDIO_AMR_NB, MIMETYPE_AUDIO_AMR_WB,    MIMETYPE_AUDIO_MPEG,
+        MIMETYPE_AUDIO_AAC,    MIMETYPE_AUDIO_FLAC,      MIMETYPE_AUDIO_VORBIS,
+        MIMETYPE_AUDIO_OPUS,   MIMETYPE_AUDIO_RAW,       MIMETYPE_AUDIO_MSGSM,
+        MIMETYPE_AUDIO_EAC3,   MIMETYPE_AUDIO_SCRAMBLED, MIMETYPE_VIDEO_VP8,
+        MIMETYPE_VIDEO_VP9,    MIMETYPE_VIDEO_AV1,       MIMETYPE_VIDEO_AVC,
+        MIMETYPE_VIDEO_HEVC,   MIMETYPE_VIDEO_MPEG4,     MIMETYPE_VIDEO_H263,
+        MIMETYPE_VIDEO_MPEG2,  MIMETYPE_VIDEO_RAW,       MIMETYPE_VIDEO_SCRAMBLED};
+
+static const std::string kEncoderNames[] = {
+        "c2.android.avc.encoder",    "c2.android.vp8.encoder",   "c2.android.vp9.encoder",
+        "c2.android.hevc.encoder",   "c2.android.mpeg2.encoder", "c2.android.mpeg4.encoder",
+        "c2.android.opus.encoder",   "c2.android.amrnb.encoder", "c2.android.flac.encoder",
+        "c2.android.av1-aom.encoder"};
+
+static const std::string kDecoderNames[] = {"c2.android.avc.decoder",
+                                            "c2.android.vp8.decoder",
+                                            "c2.android.vp9.decoder"
+                                            "c2.android.hevc.decoder",
+                                            "c2.android.mpeg2.decoder",
+                                            "c2.android.mpeg4.decoder",
+                                            "c2.android.opus.decoder",
+                                            "c2.android.amrnb.decoder",
+                                            "c2.android.flac.decoder",
+                                            "c2.android.av1-aom.decoder"};
+
+static const std::string kFormatIntKeys[] = {AMEDIAFORMAT_KEY_BIT_RATE,
+                                             AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                             AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
+                                             AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                             AMEDIAFORMAT_KEY_WIDTH,
+                                             AMEDIAFORMAT_KEY_HEIGHT,
+                                             AMEDIAFORMAT_KEY_FRAME_RATE,
+                                             AMEDIAFORMAT_KEY_COLOR_FORMAT,
+                                             AMEDIAFORMAT_VIDEO_QP_P_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_P_MAX,
+                                             AMEDIAFORMAT_VIDEO_QP_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_MAX,
+                                             AMEDIAFORMAT_VIDEO_QP_I_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_I_MAX,
+                                             AMEDIAFORMAT_VIDEO_QP_B_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_B_MAX,
+                                             AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE,
+                                             AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
+                                             AMEDIAFORMAT_KEY_VALID_SAMPLES,
+                                             AMEDIAFORMAT_KEY_TRACK_INDEX,
+                                             AMEDIAFORMAT_KEY_TRACK_ID,
+                                             AMEDIAFORMAT_KEY_TILE_WIDTH,
+                                             AMEDIAFORMAT_KEY_TILE_HEIGHT,
+                                             AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH,
+                                             AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT,
+                                             AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID,
+                                             AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT,
+                                             AMEDIAFORMAT_KEY_STRIDE,
+                                             AMEDIAFORMAT_KEY_SLICE_HEIGHT,
+                                             AMEDIAFORMAT_KEY_SAR_WIDTH,
+                                             AMEDIAFORMAT_KEY_SAR_HEIGHT,
+                                             AMEDIAFORMAT_KEY_ROTATION,
+                                             AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN,
+                                             AMEDIAFORMAT_KEY_PROFILE,
+                                             AMEDIAFORMAT_KEY_PRIORITY,
+                                             AMEDIAFORMAT_KEY_PICTURE_TYPE,
+                                             AMEDIAFORMAT_KEY_PCM_ENCODING,
+                                             AMEDIAFORMAT_KEY_OPERATING_RATE,
+                                             AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                                             AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                                             AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER,
+                                             AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
+                                             AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER,
+                                             AMEDIAFORMAT_KEY_LOW_LATENCY,
+                                             AMEDIAFORMAT_KEY_LOOP,
+                                             AMEDIAFORMAT_KEY_LEVEL,
+                                             AMEDIAFORMAT_KEY_LATENCY,
+                                             AMEDIAFORMAT_KEY_IS_SYNC_FRAME,
+                                             AMEDIAFORMAT_KEY_IS_DEFAULT,
+                                             AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
+                                             AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT,
+                                             AMEDIAFORMAT_KEY_GRID_ROWS,
+                                             AMEDIAFORMAT_KEY_GRID_COLUMNS,
+                                             AMEDIAFORMAT_KEY_FRAME_COUNT,
+                                             AMEDIAFORMAT_KEY_ENCODER_PADDING,
+                                             AMEDIAFORMAT_KEY_ENCODER_DELAY,
+                                             AMEDIAFORMAT_KEY_DISPLAY_WIDTH,
+                                             AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+                                             AMEDIAFORMAT_KEY_DISPLAY_CROP,
+                                             AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK,
+                                             AMEDIAFORMAT_KEY_CRYPTO_MODE,
+                                             AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK,
+                                             AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE,
+                                             AMEDIAFORMAT_KEY_COLOR_TRANSFER,
+                                             AMEDIAFORMAT_KEY_COLOR_STANDARD,
+                                             AMEDIAFORMAT_KEY_COLOR_RANGE,
+                                             AMEDIAFORMAT_KEY_CHANNEL_MASK,
+                                             AMEDIAFORMAT_KEY_BITS_PER_SAMPLE,
+                                             AMEDIAFORMAT_KEY_BITRATE_MODE,
+                                             AMEDIAFORMAT_KEY_AUDIO_SESSION_ID,
+                                             AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID,
+                                             AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID,
+                                             AMEDIAFORMAT_KEY_AAC_SBR_MODE,
+                                             AMEDIAFORMAT_KEY_AAC_PROFILE,
+                                             AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT,
+                                             AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR,
+                                             AMEDIAFORMAT_KEY_XMP_SIZE,
+                                             AMEDIAFORMAT_KEY_XMP_OFFSET,
+                                             AMEDIAFORMAT_KEY_TIME_US,
+                                             AMEDIAFORMAT_KEY_THUMBNAIL_TIME,
+                                             AMEDIAFORMAT_KEY_TARGET_TIME,
+                                             AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND,
+                                             AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET,
+                                             AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK,
+                                             AMEDIAFORMAT_KEY_EXIF_SIZE,
+                                             AMEDIAFORMAT_KEY_EXIF_OFFSET,
+                                             AMEDIAFORMAT_KEY_DURATION};
+
+static const std::string kFormatBufferKeys[] = {
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C,
+        AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA,
+        AMEDIAFORMAT_KEY_SEI,
+        AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP,
+        AMEDIAFORMAT_KEY_PSSH,
+        AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+        AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER,
+        AMEDIAFORMAT_KEY_MPEG_USER_DATA,
+        AMEDIAFORMAT_KEY_ICC_PROFILE,
+        AMEDIAFORMAT_KEY_HDR10_PLUS_INFO,
+        AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
+        AMEDIAFORMAT_KEY_ESDS,
+        AMEDIAFORMAT_KEY_D263,
+        AMEDIAFORMAT_KEY_CSD_HEVC,
+        AMEDIAFORMAT_KEY_CSD_AVC,
+        AMEDIAFORMAT_KEY_CSD_2,
+        AMEDIAFORMAT_KEY_CSD_1,
+        AMEDIAFORMAT_KEY_CSD_0,
+        AMEDIAFORMAT_KEY_CSD,
+        AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
+        AMEDIAFORMAT_KEY_CRYPTO_KEY,
+        AMEDIAFORMAT_KEY_CRYPTO_IV,
+        AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
+        AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO,
+        AMEDIAFORMAT_KEY_ALBUMART,
+};
+
+static const std::string kFormatFloatKeys[] = {AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                                               AMEDIAFORMAT_KEY_CAPTURE_RATE};
+
+static const std::string kFormatStringKeys[] = {AMEDIAFORMAT_KEY_YEAR,
+                                                AMEDIAFORMAT_KEY_TITLE,
+                                                AMEDIAFORMAT_KEY_TEMPORAL_LAYERING,
+                                                AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS,
+                                                AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
+                                                AMEDIAFORMAT_KEY_MANUFACTURER,
+                                                AMEDIAFORMAT_KEY_LYRICIST,
+                                                AMEDIAFORMAT_KEY_LOCATION,
+                                                AMEDIAFORMAT_KEY_LANGUAGE,
+                                                AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE,
+                                                AMEDIAFORMAT_KEY_IS_AUTOSELECT,
+                                                AMEDIAFORMAT_KEY_IS_ADTS,
+                                                AMEDIAFORMAT_KEY_GENRE,
+                                                AMEDIAFORMAT_KEY_DISCNUMBER,
+                                                AMEDIAFORMAT_KEY_DATE,
+                                                AMEDIAFORMAT_KEY_COMPOSER,
+                                                AMEDIAFORMAT_KEY_COMPILATION,
+                                                AMEDIAFORMAT_KEY_COMPLEXITY,
+                                                AMEDIAFORMAT_KEY_CDTRACKNUMBER,
+                                                AMEDIAFORMAT_KEY_AUTHOR,
+                                                AMEDIAFORMAT_KEY_ARTIST,
+                                                AMEDIAFORMAT_KEY_ALBUMARTIST,
+                                                AMEDIAFORMAT_KEY_ALBUM};
+
+void formatSetString(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        std::string keyValue = fdp->ConsumeRandomLengthString(kMaxBytes);
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY, keyValue.c_str());
+    }
+}
+
+void formatSetInt(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        int32_t keyValue = fdp->ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY, keyValue);
+    }
+}
+
+void formatSetFloat(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        float keyValue =
+                fdp->ConsumeFloatingPointInRange<float>(kMinFloatKeyValue, kMaxFloatKeyValue);
+        AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY, keyValue);
+    }
+}
+
+void formatSetBuffer(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        std::vector<uint8_t> buffer = fdp->ConsumeBytes<uint8_t>(
+                fdp->ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+        AMediaFormat_setBuffer(format, AMEDIAFORMAT_KEY, buffer.data(), buffer.size());
+    }
+}
+
+AMediaCodec* NdkMediaCodecFuzzerBase::createAMediaCodecByname(bool isEncoder,
+                                                              bool isCodecForClient) {
+    std::string name;
+    if (isEncoder) {
+        name = mFdp->ConsumeBool() ? mFdp->PickValueInArray(kEncoderNames)
+                                   : mFdp->ConsumeRandomLengthString(kMaxBytes);
+    } else {
+        name = mFdp->ConsumeBool() ? mFdp->PickValueInArray(kDecoderNames)
+                                   : mFdp->ConsumeRandomLengthString(kMaxBytes);
+    }
+
+    if (isCodecForClient) {
+        pid_t pid = mFdp->ConsumeIntegral<pid_t>();
+        uid_t uid = mFdp->ConsumeIntegral<uid_t>();
+        return AMediaCodec_createCodecByNameForClient(name.c_str(), pid, uid);
+
+    } else {
+        return AMediaCodec_createCodecByName(name.c_str());
+    }
+}
+
+AMediaCodec* NdkMediaCodecFuzzerBase::createAMediaCodecByType(bool isEncoder,
+                                                              bool isCodecForClient) {
+    std::string mimeType;
+    const char* mime = nullptr;
+
+    if (mFdp->ConsumeBool()) {
+        mimeType = mFdp->ConsumeRandomLengthString(kMaxBytes);
+        mime = mimeType.c_str();
+    } else {
+        AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+    }
+
+    if (isCodecForClient) {
+        pid_t pid = mFdp->ConsumeIntegral<pid_t>();
+        uid_t uid = mFdp->ConsumeIntegral<uid_t>();
+        return isEncoder ? AMediaCodec_createEncoderByTypeForClient(mime, pid, uid)
+                         : AMediaCodec_createDecoderByTypeForClient(mime, pid, uid);
+    } else {
+        return isEncoder ? AMediaCodec_createEncoderByType(mime)
+                         : AMediaCodec_createDecoderByType(mime);
+    }
+}
+
+void NdkMediaCodecFuzzerBase::setCodecFormat() {
+    std::string value;
+    int32_t count = 0;
+    int32_t maxFormatKeys = 0;
+    AMediaFormat_clear(mFormat);
+
+    /*set mimeType*/
+    if (mFdp->ConsumeBool()) {
+        value = mFdp->ConsumeRandomLengthString(kMaxBytes);
+    } else {
+        value = mFdp->PickValueInArray(kMimeTypes);
+    }
+    if (mFdp->ConsumeBool()) {
+        AMediaFormat_setString(mFormat, AMEDIAFORMAT_KEY_MIME, value.c_str());
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatStringKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatStringKeys);
+        formatSetString(mFormat, formatKey.c_str(), mFdp);
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatIntKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatIntKeys);
+        formatSetInt(mFormat, formatKey.c_str(), mFdp);
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatFloatKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatFloatKeys);
+        formatSetFloat(mFormat, formatKey.c_str(), mFdp);
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatBufferKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatBufferKeys);
+        formatSetBuffer(mFormat, formatKey.c_str(), mFdp);
+    }
+}
+
+AMediaCodec* NdkMediaCodecFuzzerBase::createCodec(bool isEncoder, bool isCodecForClient) {
+    setCodecFormat();
+    return (mFdp->ConsumeBool() ? createAMediaCodecByname(isEncoder, isCodecForClient)
+                                : createAMediaCodecByType(isEncoder, isCodecForClient));
+}
+
+void NdkMediaCodecFuzzerBase::invokeCodecFormatAPI(AMediaCodec* codec) {
+    AMediaFormat* codecFormat = nullptr;
+    size_t codecFormatAPI = mFdp->ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxCodecFormatAPIs);
+    switch (codecFormatAPI) {
+        case 0: {
+            codecFormat = AMediaCodec_getInputFormat(codec);
+            break;
+        }
+        case 1: {
+            codecFormat = AMediaCodec_getOutputFormat(codec);
+            break;
+        }
+        case 2:
+        default: {
+            AMediaCodecBufferInfo info;
+            int64_t timeOutUs = mFdp->ConsumeIntegralInRange<size_t>(kMinTimeOutUs, kMaxTimeOutUs);
+            ssize_t bufferIndex = 0;
+            if (mFdp->ConsumeBool()) {
+                bufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &info, timeOutUs);
+            } else {
+                bufferIndex =
+                        mFdp->ConsumeIntegralInRange<size_t>(kMinBufferIndex, kMaxBufferIndex);
+            }
+            codecFormat = AMediaCodec_getBufferFormat(codec, bufferIndex);
+            break;
+        }
+    }
+    if (codecFormat) {
+        AMediaFormat_delete(codecFormat);
+    }
+}
+
+void NdkMediaCodecFuzzerBase::invokeInputBufferOperationAPI(AMediaCodec* codec) {
+    size_t bufferSize = 0;
+    ssize_t bufferIndex = 0;
+    int64_t timeOutUs = mFdp->ConsumeIntegralInRange<size_t>(kMinTimeOutUs, kMaxTimeOutUs);
+    if (mFdp->ConsumeBool()) {
+        bufferIndex = AMediaCodec_dequeueInputBuffer(codec, timeOutUs);
+    } else {
+        bufferIndex = mFdp->ConsumeIntegralInRange<size_t>(kMinBufferIndex, kMaxBufferIndex);
+    }
+
+    uint8_t* buffer = AMediaCodec_getInputBuffer(codec, bufferIndex, &bufferSize);
+    if (buffer) {
+        std::vector<uint8_t> bytesRead = mFdp->ConsumeBytes<uint8_t>(
+                std::min(mFdp->ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes), bufferSize));
+        memcpy(buffer, bytesRead.data(), bytesRead.size());
+        bufferSize = bytesRead.size();
+    }
+
+    int32_t flag = mFdp->ConsumeIntegralInRange<size_t>(AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG,
+                                                        AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME);
+    if (mFdp->ConsumeBool()) {
+        AMediaCodec_queueInputBuffer(codec, bufferIndex, 0 /* offset */, bufferSize, 0 /* time */,
+                                     flag);
+    } else {
+        AMediaCodecCryptoInfo* cryptoInfo = getAMediaCodecCryptoInfo();
+        AMediaCodec_queueSecureInputBuffer(codec, bufferIndex, 0 /* offset */, cryptoInfo,
+                                           0 /* time */, flag);
+        AMediaCodecCryptoInfo_delete(cryptoInfo);
+    }
+}
+
+void NdkMediaCodecFuzzerBase::invokeOutputBufferOperationAPI(AMediaCodec* codec) {
+    ssize_t bufferIndex = 0;
+    int64_t timeOutUs = mFdp->ConsumeIntegralInRange<size_t>(kMinTimeOutUs, kMaxTimeOutUs);
+    if (mFdp->ConsumeBool()) {
+        AMediaCodecBufferInfo info;
+        bufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &info, timeOutUs);
+    } else {
+        bufferIndex = mFdp->ConsumeIntegralInRange<size_t>(kMinBufferIndex, kMaxBufferIndex);
+    }
+
+    if (mFdp->ConsumeBool()) {
+        size_t bufferSize = 0;
+        (void)AMediaCodec_getOutputBuffer(codec, bufferIndex, &bufferSize);
+    }
+
+    if (mFdp->ConsumeBool()) {
+        AMediaCodec_releaseOutputBuffer(codec, bufferIndex, mFdp->ConsumeBool());
+    } else {
+        AMediaCodec_releaseOutputBufferAtTime(codec, bufferIndex, timeOutUs);
+    }
+}
+
+AMediaCodecCryptoInfo* NdkMediaCodecFuzzerBase::getAMediaCodecCryptoInfo() {
+    uint8_t key[kMaxCryptoKey];
+    uint8_t iv[kMaxCryptoKey];
+    size_t clearBytes[kMaxCryptoKey];
+    size_t encryptedBytes[kMaxCryptoKey];
+
+    for (int32_t i = 0; i < kMaxCryptoKey; ++i) {
+        key[i] = mFdp->ConsumeIntegral<uint8_t>();
+        iv[i] = mFdp->ConsumeIntegral<uint8_t>();
+        clearBytes[i] = mFdp->ConsumeIntegral<size_t>();
+        encryptedBytes[i] = mFdp->ConsumeIntegral<size_t>();
+    }
+
+    return AMediaCodecCryptoInfo_new(kMaxCryptoKey, key, iv, AMEDIACODECRYPTOINFO_MODE_CLEAR,
+                                     clearBytes, encryptedBytes);
+}
diff --git a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
new file mode 100644
index 0000000..2875f9f
--- /dev/null
+++ b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#include <android/native_window.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCodecPlatform.h>
+#include <media/NdkMediaFormat.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+constexpr int32_t kMinBytes = 1;
+constexpr int32_t kMaxBytes = 256;
+constexpr int32_t kMinIntKeyValue = 0;
+constexpr int32_t kMaxIntKeyValue = 6000000;
+constexpr int32_t kMinFloatKeyValue = 1.0f;
+constexpr int32_t kMaxFloatKeyValue = 500.f;
+constexpr int32_t kMinTimeOutUs = 0;
+constexpr int32_t kMaxTimeOutUs = 5000;
+constexpr int32_t kMinAPICase = 0;
+constexpr int32_t kMaxCodecFormatAPIs = 2;
+constexpr int32_t kMaxCryptoKey = 16;
+constexpr int32_t kMinIterations = 10;
+constexpr int32_t kMaxIterations = 100;
+constexpr size_t kMinBufferIndex = 1;
+constexpr size_t kMaxBufferIndex = 128;
+
+class NdkMediaCodecFuzzerBase {
+  public:
+    NdkMediaCodecFuzzerBase() { mFormat = AMediaFormat_new(); }
+    void invokeCodecFormatAPI(AMediaCodec* codec);
+    void invokeInputBufferOperationAPI(AMediaCodec* codec);
+    void invokeOutputBufferOperationAPI(AMediaCodec* codec);
+    AMediaCodecCryptoInfo* getAMediaCodecCryptoInfo();
+    AMediaCodec* createCodec(bool isEncoder, bool isCodecForClient);
+    AMediaFormat* getCodecFormat() { return mFormat; };
+    void setFdp(FuzzedDataProvider* fdp) { mFdp = fdp; }
+    ~NdkMediaCodecFuzzerBase() {
+        if (mFormat) {
+            AMediaFormat_delete(mFormat);
+        }
+    }
+
+  private:
+    AMediaCodec* createAMediaCodecByname(bool isEncoder, bool isCodecForClient);
+    AMediaCodec* createAMediaCodecByType(bool isEncoder, bool isCodecForClient);
+    AMediaFormat* getSampleAudioFormat();
+    AMediaFormat* getSampleVideoFormat();
+    void setCodecFormat();
+    AMediaFormat* mFormat = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+};
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
new file mode 100644
index 0000000..0fd08b0
--- /dev/null
+++ b/media/ndk/fuzzer/README.md
@@ -0,0 +1,158 @@
+# Fuzzers for libmediandk
+
+## Table of contents
++ [ndk_crypto_fuzzer](#NdkCrypto)
++ [ndk_image_reader_fuzzer](#NdkImageReader)
++ [ndk_extractor_fuzzer](#NdkExtractor)
++ [ndk_mediaformat_fuzzer](#NdkMediaFormat)
++ [ndk_drm_fuzzer](#NdkDrm)
++ [ndk_mediamuxer_fuzzer](#NdkMediaMuxer)
++ [ndk_sync_codec_fuzzer](#NdkSyncCodec)
+
+# <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
+
+NdkCrypto supports the following parameters:
+    UniversalIdentifier (parameter name: "uuid")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `uuid`| `Array`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_crypto_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_crypto_fuzzer/ndk_crypto_fuzzer
+```
+
+# <a name="NdkImageReader"></a> Fuzzer for NdkImageReader
+
+NdkImageReader supports the following parameters:
+1. Width (parameter name: "imageWidth")
+2. Height (parameter name: "imageHeight")
+3. Format (parameter name: "imageFormat")
+4. Usage (parameter name: "imageUsage")
+5. Max images (parameter name: "imageMaxCount")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `width`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `height`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `format`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `usage`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `maxImages`| `1 to android::BufferQueue::MAX_MAX_ACQUIRED_BUFFERS`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_image_reader_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_image_reader_fuzzer/ndk_image_reader_fuzzer
+```
+
+# <a name="NdkExtractor"></a>Fuzzer for NdkExtractor
+
+NdkExtractor supports the following parameters:
+1. SeekMode (parameter name: "mode")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`mode`|0.`AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC`,<br/>1.`AMEDIAEXTRACTOR_SEEK_NEXT_SYNC`,<br/>2.`AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_extractor_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_extractor_fuzzer/ndk_extractor_fuzzer /data/fuzz/${TARGET_ARCH}/ndk_extractor_fuzzer/corpus
+```
+
+
+# <a name="NdkMediaFormat"></a>Fuzzer for NdkMediaFormat
+
+NdkMediaFormat supports the following parameters:
+1. Name (parameter name: "name")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`name`|1.`AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR`, 2.`AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR`, 3.`AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION`, 4.`AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL`, 5.`AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL`, 6.`AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT`, 7.`AMEDIAFORMAT_KEY_AAC_PROFILE`, 8.`AMEDIAFORMAT_KEY_AAC_SBR_MODE`, 9.`AMEDIAFORMAT_KEY_ALBUM`, 10.`AMEDIAFORMAT_KEY_ALBUMART`, 11.`AMEDIAFORMAT_KEY_ALBUMARTIST`, 12.`AMEDIAFORMAT_KEY_ARTIST`, 13.`AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO`, 14.`AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID`, 15.`AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID`, 16.`AMEDIAFORMAT_KEY_AUDIO_SESSION_ID`, 17.`AMEDIAFORMAT_KEY_AUTHOR`, 18.`AMEDIAFORMAT_KEY_BITRATE_MODE`, 19.`AMEDIAFORMAT_KEY_BIT_RATE`, 20.`AMEDIAFORMAT_KEY_BITS_PER_SAMPLE`, 21.`AMEDIAFORMAT_KEY_CAPTURE_RATE`, 22.`AMEDIAFORMAT_KEY_CDTRACKNUMBER`, 23.`AMEDIAFORMAT_KEY_CHANNEL_COUNT`, 24.`AMEDIAFORMAT_KEY_CHANNEL_MASK`, 25.`AMEDIAFORMAT_KEY_COLOR_FORMAT`, 26.`AMEDIAFORMAT_KEY_COLOR_RANGE`, 27.`AMEDIAFORMAT_KEY_COLOR_STANDARD`, 28.`AMEDIAFORMAT_KEY_COLOR_TRANSFER`, 29.`AMEDIAFORMAT_KEY_COMPILATION`, 30.`AMEDIAFORMAT_KEY_COMPLEXITY`, 31.`AMEDIAFORMAT_KEY_COMPOSER`, 32.`AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED`, 33.`AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE`, 34.`AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK`, 35.`AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES`, 36.`AMEDIAFORMAT_KEY_CRYPTO_IV`, 37.`AMEDIAFORMAT_KEY_CRYPTO_KEY`, 38.`AMEDIAFORMAT_KEY_CRYPTO_MODE`, 39.`AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES`, 40.`AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK`, 41.`AMEDIAFORMAT_KEY_CSD`, 42.`AMEDIAFORMAT_KEY_CSD_0`, 43.`AMEDIAFORMAT_KEY_CSD_1`, 44.`AMEDIAFORMAT_KEY_CSD_2`, 45.`AMEDIAFORMAT_KEY_CSD_AVC`, 46.`AMEDIAFORMAT_KEY_CSD_HEVC`, 47.`AMEDIAFORMAT_KEY_D263`, 48.`AMEDIAFORMAT_KEY_DATE`, 49.`AMEDIAFORMAT_KEY_DISCNUMBER`, 50.`AMEDIAFORMAT_KEY_DISPLAY_CROP`, 51.`AMEDIAFORMAT_KEY_DISPLAY_HEIGHT`, 52.`AMEDIAFORMAT_KEY_DISPLAY_WIDTH`, 53.`AMEDIAFORMAT_KEY_DURATION`, 54.`AMEDIAFORMAT_KEY_ENCODER_DELAY`, 55.`AMEDIAFORMAT_KEY_ENCODER_PADDING`, 56.`AMEDIAFORMAT_KEY_ESDS`, 57.`AMEDIAFORMAT_KEY_EXIF_OFFSET`, 58.`AMEDIAFORMAT_KEY_EXIF_SIZE`, 59.`AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL`, 60.`AMEDIAFORMAT_KEY_FRAME_COUNT`, 61.`AMEDIAFORMAT_KEY_FRAME_RATE`, 62.`AMEDIAFORMAT_KEY_GENRE`, 63.`AMEDIAFORMAT_KEY_GRID_COLUMNS`, 64.`AMEDIAFORMAT_KEY_GRID_ROWS`, 65.`AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT`, 66.`AMEDIAFORMAT_KEY_HDR_STATIC_INFO`, 67.`AMEDIAFORMAT_KEY_HDR10_PLUS_INFO`, 68.`AMEDIAFORMAT_KEY_HEIGHT`, 69.`AMEDIAFORMAT_KEY_ICC_PROFILE`, 70.`AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD`, 71.`AMEDIAFORMAT_KEY_IS_ADTS`, 72.`AMEDIAFORMAT_KEY_IS_AUTOSELECT`, 73.`AMEDIAFORMAT_KEY_IS_DEFAULT`, 74.`AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE`, 75.`AMEDIAFORMAT_KEY_IS_SYNC_FRAME`, 76.`AMEDIAFORMAT_KEY_I_FRAME_INTERVAL`, 77.`AMEDIAFORMAT_KEY_LANGUAGE`, 78.`AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK`, 79.`AMEDIAFORMAT_KEY_LATENCY`, 80.`AMEDIAFORMAT_KEY_LEVEL`, 81.`AMEDIAFORMAT_KEY_LOCATION`, 82.`AMEDIAFORMAT_KEY_LOOP`, 83.`AMEDIAFORMAT_KEY_LOW_LATENCY`, 84.`AMEDIAFORMAT_KEY_LYRICIST`, 85.`AMEDIAFORMAT_KEY_MANUFACTURER`, 86.`AMEDIAFORMAT_KEY_MAX_BIT_RATE`, 87.`AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER`, 88.`AMEDIAFORMAT_KEY_MAX_HEIGHT`, 89.`AMEDIAFORMAT_KEY_MAX_INPUT_SIZE`, 90.`AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER`, 91.`AMEDIAFORMAT_KEY_MAX_WIDTH`, 92.`AMEDIAFORMAT_KEY_MIME`, 93.`AMEDIAFORMAT_KEY_MPEG_USER_DATA`, 94.`AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER`, 95.`AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS`, 96.`AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION`, 97.`AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT`, 98.`AMEDIAFORMAT_KEY_OPERATING_RATE`, 99.`AMEDIAFORMAT_KEY_PCM_ENCODING`, 100.`AMEDIAFORMAT_KEY_PICTURE_TYPE`, 101.`AMEDIAFORMAT_KEY_PRIORITY`, 102.`AMEDIAFORMAT_KEY_PROFILE`, 103.`AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN`, 104.`AMEDIAFORMAT_KEY_PSSH`, 105.`AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP`, 106.`AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER`, 107.`AMEDIAFORMAT_KEY_ROTATION`, 108.`AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET`, 109.`AMEDIAFORMAT_KEY_SAMPLE_RATE`, 110.`AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND`, 111.`AMEDIAFORMAT_KEY_SAR_HEIGHT`, 112.`AMEDIAFORMAT_KEY_SAR_WIDTH`, 113.`AMEDIAFORMAT_KEY_SEI`, 114.`AMEDIAFORMAT_KEY_SLICE_HEIGHT`, 115.`AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS`, 116.`AMEDIAFORMAT_KEY_STRIDE`, 117.`AMEDIAFORMAT_KEY_TARGET_TIME`, 118.`AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT`, 119.`AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID`, 120.`AMEDIAFORMAT_KEY_TEMPORAL_LAYERING`, 121.`AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA`, 122.`AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C`, 123.`AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC`, 124.`AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT`, 125.`AMEDIAFORMAT_KEY_THUMBNAIL_TIME`, 126.`AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH`, 127.`AMEDIAFORMAT_KEY_TILE_HEIGHT`, 128.`AMEDIAFORMAT_KEY_TILE_WIDTH`, 129.`AMEDIAFORMAT_KEY_TIME_US`, 130.`AMEDIAFORMAT_KEY_TITLE`, 131.`AMEDIAFORMAT_KEY_TRACK_ID`, 132.`AMEDIAFORMAT_KEY_TRACK_INDEX`, 133.`AMEDIAFORMAT_KEY_VALID_SAMPLES`, 134.`AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL`, 135.`AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE`, 136.`AMEDIAFORMAT_VIDEO_QP_B_MAX`, 137.`AMEDIAFORMAT_VIDEO_QP_B_MIN`, 138.`AMEDIAFORMAT_VIDEO_QP_I_MAX`, 139.`AMEDIAFORMAT_VIDEO_QP_I_MIN`, 140.`AMEDIAFORMAT_VIDEO_QP_MAX`, 141.`AMEDIAFORMAT_VIDEO_QP_MIN`, 142.`AMEDIAFORMAT_VIDEO_QP_P_MAX`, 143.`AMEDIAFORMAT_VIDEO_QP_P_MIN`, 144.`AMEDIAFORMAT_KEY_WIDTH`, 145.`AMEDIAFORMAT_KEY_XMP_OFFSET`, 146.`AMEDIAFORMAT_KEY_XMP_SIZE`, 147.`AMEDIAFORMAT_KEY_YEAR`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_mediaformat_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/ndk_mediaformat_fuzzer/ndk_mediaformat_fuzzer /data/fuzz/${TARGET_ARCH}/ndk_mediaformat_fuzzer/corpus
+```
+
+# <a name="NdkDrm"></a> Fuzzer for NdkDrm
+
+NdkDrm supports the following parameters:
+1. ValidUUID(parameter name: "kCommonPsshBoxUUID" and "kClearKeyUUID")
+2. MimeType(parameter name: "kMimeType")
+3. MediaUUID(parameter name: "MediaUUID")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`ValidUUID`| 0.`kCommonPsshBoxUUID`,<br/> 1.`kClearKeyUUID`,<br/> 2.`kInvalidUUID`|Value obtained from FuzzedDataProvider|
+|`kMimeType`| 0.`video/mp4`,<br/> 1.`audio/mp4`|Value obtained from FuzzedDataProvider|
+|`MediaUUID`| 0.`INVALID_UUID`,<br/> 1.`PSSH_BOX_UUID`,<br/> 2.`CLEARKEY_UUID`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_drm_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_drm_fuzzer/ndk_drm_fuzzer
+```
+
+# <a name="NdkMediaMuxer"></a>Fuzzer for NdkMediaMuxer
+
+NdkMediaMuxer supports the following parameters:
+1. OutputFormat (parameter name: "outputFormat")
+2. AppendMode (parameter name: "appendMode")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`outputFormat`|0.`AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4`,<br/>1.`AMEDIAMUXER_OUTPUT_FORMAT_WEBM`,<br/>2.`AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP`| Value obtained from FuzzedDataProvider|
+|`appendMode`|0.`AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP`,<br/>1.`AMEDIAMUXER_APPEND_TO_EXISTING_DATA`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_mediamuxer_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_mediamuxer_fuzzer/ndk_mediamuxer_fuzzer
+```
+
+# <a name="NdkSyncCodec"></a>Fuzzer for NdkSyncCodec
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_sync_codec_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_sync_codec_fuzzer/ndk_sync_codec_fuzzer
+```
diff --git a/media/ndk/fuzzer/corpus/2822a2c3bcf57f46cb2bf142448d5baeead4d738 b/media/ndk/fuzzer/corpus/2822a2c3bcf57f46cb2bf142448d5baeead4d738
new file mode 100755
index 0000000..47d79c8
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/2822a2c3bcf57f46cb2bf142448d5baeead4d738
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/58833c3691292c199fa601fd51339d85c1f11ca6 b/media/ndk/fuzzer/corpus/58833c3691292c199fa601fd51339d85c1f11ca6
new file mode 100755
index 0000000..17c934c
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/58833c3691292c199fa601fd51339d85c1f11ca6
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/8556a97764e65bf337b5593058fa92adb68074ce b/media/ndk/fuzzer/corpus/8556a97764e65bf337b5593058fa92adb68074ce
new file mode 100755
index 0000000..00a32e2
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/8556a97764e65bf337b5593058fa92adb68074ce
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/8f76e2e87f79fe213f5cc8c71e5f91d1dcfc5950 b/media/ndk/fuzzer/corpus/8f76e2e87f79fe213f5cc8c71e5f91d1dcfc5950
new file mode 100755
index 0000000..86d4001
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/8f76e2e87f79fe213f5cc8c71e5f91d1dcfc5950
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/d702878cb53fb474230fb7b1a5c035bbb7c21c8d b/media/ndk/fuzzer/corpus/d702878cb53fb474230fb7b1a5c035bbb7c21c8d
new file mode 100755
index 0000000..496c7f3
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/d702878cb53fb474230fb7b1a5c035bbb7c21c8d
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/edc2485f3927e07d7ab705337f16f0b978c57d0a b/media/ndk/fuzzer/corpus/edc2485f3927e07d7ab705337f16f0b978c57d0a
new file mode 100755
index 0000000..55437ac
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/edc2485f3927e07d7ab705337f16f0b978c57d0a
Binary files differ
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
new file mode 100644
index 0000000..2b22f0f
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaCrypto.h>
+
+constexpr size_t kMaxString = 256;
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    AMediaUUID uuid = {};
+    int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
+    for (size_t idx = 0; idx < maxLen; ++idx) {
+        uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
+    }
+    std::vector<uint8_t> initData =
+            fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+    AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
+    while (fdp.remaining_bytes()) {
+        auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    AMediaCrypto_requiresSecureDecoderComponent(
+                            fdp.ConsumeRandomLengthString(kMaxString).c_str());
+                },
+                [&]() { AMediaCrypto_isCryptoSchemeSupported(uuid); },
+        });
+        invokeNdkCryptoFuzzer();
+    }
+    AMediaCrypto_delete(crypto);
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_drm_fuzzer.cpp b/media/ndk/fuzzer/ndk_drm_fuzzer.cpp
new file mode 100644
index 0000000..8c11c9d
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_drm_fuzzer.cpp
@@ -0,0 +1,355 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <media/NdkMediaCrypto.h>
+#include <media/NdkMediaDrm.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+constexpr int32_t kMinBytes = 1;
+constexpr int32_t kMaxBytes = 256;
+constexpr int32_t kMinParamVal = 0;
+constexpr int32_t kMaxParamVal = 3;
+constexpr int32_t kMediaUUIdSize = sizeof(AMediaUUID);
+constexpr int32_t kMinProvisionResponseSize = 0;
+constexpr int32_t kMaxProvisionResponseSize = 16;
+constexpr int32_t kMessageSize = 16;
+constexpr int32_t kMinAPIcase = 0;
+constexpr int32_t kMaxdecryptEncryptAPIs = 10;
+constexpr int32_t kMaxpropertyAPIs = 3;
+constexpr int32_t kMaxsetListenerAPIs = 2;
+constexpr int32_t kMaxndkDrmAPIs = 3;
+uint8_t signature[kMessageSize];
+
+enum MediaUUID { INVALID_UUID = 0, PSSH_BOX_UUID, CLEARKEY_UUID, kMaxValue = CLEARKEY_UUID };
+
+constexpr uint8_t kCommonPsshBoxUUID[] = {0x10, 0x77, 0xEF, 0xEC, 0xC0, 0xB2, 0x4D, 0x02,
+                                          0xAC, 0xE3, 0x3C, 0x1E, 0x52, 0xE2, 0xFB, 0x4B};
+
+constexpr uint8_t kClearKeyUUID[] = {0xE2, 0x71, 0x9D, 0x58, 0xA9, 0x85, 0xB3, 0xC9,
+                                     0x78, 0x1A, 0xB0, 0x30, 0xAF, 0x78, 0xD3, 0x0E};
+
+constexpr uint8_t kInvalidUUID[] = {0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80,
+                                    0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80};
+
+uint8_t kClearkeyPssh[] = {
+        // BMFF box header (4 bytes size + 'pssh')
+        0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
+        // full box header (version = 1 flags = 0)
+        0x01, 0x00, 0x00, 0x00,
+        // system id
+        0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02,
+        0xac, 0xe3, 0x3c, 0x1e, 0x52, 0xe2, 0xfb, 0x4b,
+        // number of key ids
+        0x00, 0x00, 0x00, 0x01,
+        // key id
+        0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+        0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+        // size of data, must be zero
+        0x00, 0x00, 0x00, 0x00};
+
+std::string kPropertyName = "clientId";
+std::string kMimeType[] = {"video/mp4", "audio/mp4"};
+std::string kCipherAlgorithm[] = {"AES/CBC/NoPadding", ""};
+std::string kMacAlgorithm[] = {"HmacSHA256", ""};
+
+class NdkMediaDrmFuzzer {
+  public:
+    NdkMediaDrmFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void invokeNdkDrm();
+    static void KeysChangeListener(AMediaDrm* drm, const AMediaDrmSessionId* sessionId,
+                                   const AMediaDrmKeyStatus* keysStatus, size_t numKeys,
+                                   bool hasNewUsableKey) {
+        (void)drm;
+        (void)sessionId;
+        (void)keysStatus;
+        (void)numKeys;
+        (void)hasNewUsableKey;
+    };
+
+    static void ExpirationUpdateListener(AMediaDrm* drm, const AMediaDrmSessionId* sessionId,
+                                         int64_t expiryTimeInMS) {
+        (void)drm;
+        (void)sessionId;
+        (void)expiryTimeInMS;
+    };
+
+    static void listener(AMediaDrm* drm, const AMediaDrmSessionId* sessionId,
+                         AMediaDrmEventType eventType, int extra, const uint8_t* data,
+                         size_t dataSize) {
+        (void)drm;
+        (void)sessionId;
+        (void)eventType;
+        (void)extra;
+        (void)data;
+        (void)dataSize;
+    }
+
+  private:
+    FuzzedDataProvider mFdp;
+    void invokeDrmCreatePlugin();
+    void invokeDrmSetListener();
+    void invokeDrmPropertyAPI();
+    void invokeDrmDecryptEncryptAPI();
+    void invokeDrmSecureStopAPI();
+    AMediaDrmSessionId mSessionId = {};
+    AMediaDrm* mDrm = nullptr;
+};
+
+void NdkMediaDrmFuzzer::invokeDrmCreatePlugin() {
+    const uint8_t* mediaUUID = nullptr;
+    uint32_t uuidEnum = mFdp.ConsumeEnum<MediaUUID>();
+    switch (uuidEnum) {
+        case INVALID_UUID: {
+            mediaUUID = kInvalidUUID;
+            break;
+        }
+        case PSSH_BOX_UUID: {
+            mediaUUID = kCommonPsshBoxUUID;
+            break;
+        }
+        case CLEARKEY_UUID:
+        default: {
+            mediaUUID = kClearKeyUUID;
+            break;
+        }
+    }
+    mDrm = AMediaDrm_createByUUID(mediaUUID);
+}
+
+void NdkMediaDrmFuzzer::invokeDrmSecureStopAPI() {
+    // get maximum number of secure stops
+    AMediaDrmSecureStop secureStops;
+    size_t numSecureStops = kMaxParamVal;
+    // The API behavior could change based on the drm object (clearkey or
+    // psshbox) This API detects secure stops msg and release them.
+    AMediaDrm_getSecureStops(mDrm, &secureStops, &numSecureStops);
+    AMediaDrm_releaseSecureStops(mDrm, &secureStops);
+}
+
+void NdkMediaDrmFuzzer::invokeDrmSetListener() {
+    int32_t setListenerAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxsetListenerAPIs);
+    switch (setListenerAPI) {
+        case 0: {  // set on key change listener
+            AMediaDrm_setOnKeysChangeListener(mDrm, KeysChangeListener);
+            break;
+        }
+        case 1: {  // set on expiration on update listener
+            AMediaDrm_setOnExpirationUpdateListener(mDrm, ExpirationUpdateListener);
+            break;
+        }
+        case 2:
+        default: {  // set on event listener
+            AMediaDrm_setOnEventListener(mDrm, listener);
+            break;
+        }
+    }
+}
+
+void NdkMediaDrmFuzzer::invokeDrmPropertyAPI() {
+    int32_t propertyAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxpropertyAPIs);
+    switch (propertyAPI) {
+        case 0: {  // set property byte array
+            uint8_t value[kMediaUUIdSize];
+            std::string name =
+                    mFdp.ConsumeBool() ? kPropertyName : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            const char* propertyName = name.c_str();
+            AMediaDrm_setPropertyByteArray(mDrm, propertyName, value, sizeof(value));
+            break;
+        }
+        case 1: {  // get property in byte array
+            AMediaDrmByteArray array;
+            std::string name =
+                    mFdp.ConsumeBool() ? kPropertyName : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            const char* propertyName = name.c_str();
+            AMediaDrm_getPropertyByteArray(mDrm, propertyName, &array);
+            break;
+        }
+        case 2: {  // set string type property
+            std::string propertyName = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            std::string value = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            AMediaDrm_setPropertyString(mDrm, propertyName.c_str(), value.c_str());
+            break;
+        }
+        case 3:
+        default: {  //  get property in string
+            const char* stringValue = nullptr;
+            std::string propertyName = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            AMediaDrm_getPropertyString(mDrm, propertyName.c_str(), &stringValue);
+            break;
+        }
+    }
+}
+
+void NdkMediaDrmFuzzer::invokeDrmDecryptEncryptAPI() {
+    int32_t decryptEncryptAPI =
+            mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxdecryptEncryptAPIs);
+    switch (decryptEncryptAPI) {
+        case 0: {  // Check if crypto scheme is supported
+            std::string mimeType = mFdp.ConsumeBool() ? mFdp.PickValueInArray(kMimeType)
+                                                      : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            AMediaDrm_isCryptoSchemeSupported(kClearKeyUUID, mimeType.c_str());
+            break;
+        }
+        case 1: {  // get a provision request byte array
+            const uint8_t* legacyRequest;
+            size_t legacyRequestSize = 1;
+            const char* legacyDefaultUrl;
+            AMediaDrm_getProvisionRequest(mDrm, &legacyRequest, &legacyRequestSize,
+                                          &legacyDefaultUrl);
+            break;
+        }
+        case 2: {  // provide a response to the DRM engine plugin
+            const int32_t provisionresponseSize = mFdp.ConsumeIntegralInRange<size_t>(
+                    kMinProvisionResponseSize, kMaxProvisionResponseSize);
+            uint8_t provisionResponse[provisionresponseSize];
+            AMediaDrm_provideProvisionResponse(mDrm, provisionResponse, sizeof(provisionResponse));
+            break;
+        }
+        case 3: {  // get key request
+            const uint8_t* keyRequest = nullptr;
+            size_t keyRequestSize = 0;
+            std::string mimeType = mFdp.ConsumeBool() ? mFdp.PickValueInArray(kMimeType)
+                                                      : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            size_t numOptionalParameters =
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinParamVal, kMaxParamVal);
+            AMediaDrmKeyValue optionalParameters[numOptionalParameters];
+            std::string keys[numOptionalParameters];
+            std::string values[numOptionalParameters];
+            for (int i = 0; i < numOptionalParameters; ++i) {
+                keys[i] = mFdp.ConsumeRandomLengthString(kMaxBytes);
+                values[i] = mFdp.ConsumeRandomLengthString(kMaxBytes);
+                optionalParameters[i].mKey = keys[i].c_str();
+                optionalParameters[i].mValue = values[i].c_str();
+            }
+            AMediaDrmKeyType keyType = (AMediaDrmKeyType)mFdp.ConsumeIntegralInRange<int>(
+                    KEY_TYPE_STREAMING, KEY_TYPE_RELEASE);
+            AMediaDrm_getKeyRequest(mDrm, &mSessionId, kClearkeyPssh, sizeof(kClearkeyPssh),
+                                    mimeType.c_str(), keyType, optionalParameters,
+                                    numOptionalParameters, &keyRequest, &keyRequestSize);
+            break;
+        }
+        case 4: {  // query key status
+            size_t numPairs = mFdp.ConsumeIntegralInRange<size_t>(kMinParamVal, kMaxParamVal);
+            AMediaDrmKeyValue keyStatus[numPairs];
+            AMediaDrm_queryKeyStatus(mDrm, &mSessionId, keyStatus, &numPairs);
+            break;
+        }
+        case 5: {  // provide key response
+            std::string key = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            const char* keyResponse = key.c_str();
+            AMediaDrmKeySetId keySetId;
+            AMediaDrm_provideKeyResponse(mDrm, &mSessionId,
+                                         reinterpret_cast<const uint8_t*>(keyResponse),
+                                         sizeof(keyResponse), &keySetId);
+            break;
+        }
+        case 6: {  // restore key
+            AMediaDrmKeySetId keySetId;
+            AMediaDrm_restoreKeys(mDrm, &mSessionId, &keySetId);
+            break;
+        }
+
+        case 7: {  // Check signature verification using the specified Algorithm
+            std::string algorithm = kMacAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> message = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            AMediaDrm_verify(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), message.data(),
+                             message.size(), signature, sizeof(signature));
+            break;
+        }
+        case 8: {  // Generate a signature using the specified Algorithm
+            std::string algorithm = kMacAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> message = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            size_t signatureSize = sizeof(signature);
+            AMediaDrm_sign(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), message.data(),
+                           message.size(), signature, &signatureSize);
+            break;
+        }
+        case 9: {  // Decrypt the data using algorithm
+            std::string algorithm = kCipherAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> iv = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> input = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            uint8_t output[kMessageSize];
+            AMediaDrm_decrypt(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), iv.data(),
+                              input.data(), output, input.size());
+            break;
+        }
+        case 10:
+        default: {  // Encrypt the data using algorithm
+            std::string algorithm = kCipherAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> iv = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> input = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            uint8_t output[kMessageSize];
+            AMediaDrm_encrypt(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), iv.data(),
+                              input.data(), output, input.size());
+            break;
+        }
+    }
+    AMediaDrm_removeKeys(mDrm, &mSessionId);
+}
+
+void NdkMediaDrmFuzzer::invokeNdkDrm() {
+    while (mFdp.remaining_bytes() > 0) {
+        // The API is called at start as it creates a AMediaDrm Object.
+        // mDrm AMediaDrm object is used in the below APIs.
+        invokeDrmCreatePlugin();
+        if (mDrm) {
+            // The API opens session and returns "mSessionId" session Id.
+            // "mSessionId" is required in the below APIs.
+            AMediaDrm_openSession(mDrm, &mSessionId);
+            int32_t ndkDrmAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxndkDrmAPIs);
+            switch (ndkDrmAPI) {
+                case 0: {
+                    invokeDrmDecryptEncryptAPI();
+                    break;
+                }
+                case 1: {
+                    invokeDrmPropertyAPI();
+                    break;
+                }
+                case 2: {
+                    invokeDrmSetListener();
+                    break;
+                }
+                case 3:
+                default: {
+                    invokeDrmSecureStopAPI();
+                    break;
+                }
+            }
+            AMediaDrm_closeSession(mDrm, &mSessionId);
+            AMediaDrm_release(mDrm);
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkMediaDrmFuzzer ndkMediaDrmFuzzer(data, size);
+    ndkMediaDrmFuzzer.invokeNdkDrm();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_extractor_fuzzer.cpp b/media/ndk/fuzzer/ndk_extractor_fuzzer.cpp
new file mode 100644
index 0000000..9bbb79c
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_extractor_fuzzer.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_process.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaExtractor.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+constexpr int32_t kCaseStart = 0;
+constexpr int32_t kCaseEnd = 8;
+constexpr float kMinDataSizeFactor = 0.5;
+constexpr int32_t kMaxIterations = 1000;
+const std::string kPathPrefix = "file://";
+
+constexpr SeekMode kSeekMode[] = {AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC,
+                                  AMEDIAEXTRACTOR_SEEK_NEXT_SYNC,
+                                  AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC};
+
+class NdkExtractorFuzzer {
+  public:
+    NdkExtractorFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mDataSourceFd = mkstemp(mTestPath);
+        std::vector<char> dataBuffer = mFdp.ConsumeBytes<char>(
+                mFdp.ConsumeIntegralInRange<int32_t>(kMinDataSizeFactor * size, size));
+        mDataSize = dataBuffer.size();
+        write(mDataSourceFd, dataBuffer.data(), dataBuffer.size());
+    };
+
+    ~NdkExtractorFuzzer() {
+        close(mDataSourceFd);
+        remove(mTestPath);
+    };
+
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    int32_t mDataSourceFd = 0;
+    int32_t mDataSize = 0;
+
+    // Defined a mutable TestSource file path for mkstemp().
+    char mTestPath[64] = "/data/local/tmp/TestSource_XXXXXX";
+};
+
+void NdkExtractorFuzzer::process() {
+    AMediaExtractor* mMediaExtractor = AMediaExtractor_new();
+    AMediaDataSource* mDataSource = nullptr;
+
+    if (mFdp.ConsumeBool()) {
+        AMediaExtractor_setDataSourceFd(mMediaExtractor, mDataSourceFd, 0, mDataSize);
+    } else {
+        mDataSource = AMediaDataSource_newUri((kPathPrefix + mTestPath).c_str(), 0 /* numkeys */,
+                                              nullptr /* keyvalues */);
+        AMediaExtractor_setDataSourceCustom(mMediaExtractor, mDataSource);
+    }
+
+    /**
+     * Limiting the number of iterations of while loop
+     * to prevent a possible timeout.
+     */
+    int32_t count = 0;
+    while (mFdp.remaining_bytes() && count++ < kMaxIterations) {
+        switch (mFdp.ConsumeIntegralInRange<int32_t>(kCaseStart, kCaseEnd)) {
+            case 0:{
+                AMediaExtractor_selectTrack(mMediaExtractor,
+                                            mFdp.ConsumeIntegral<size_t>() /* idx */);
+                break;
+            }
+            case 1:{
+                AMediaExtractor_unselectTrack(mMediaExtractor,
+                                              mFdp.ConsumeIntegral<size_t>() /* idx */);
+                break;
+            }
+            case 2:{
+                int32_t sampleSize = AMediaExtractor_getSampleSize(mMediaExtractor);
+                if (sampleSize > 0) {
+                    std::vector<uint8_t> buffer(sampleSize);
+                    AMediaExtractor_readSampleData(
+                            mMediaExtractor, buffer.data(),
+                            mFdp.ConsumeIntegralInRange<size_t>(0, sampleSize) /* capacity */);
+                }
+                break;
+            }
+            case 3:{
+                AMediaExtractor_getSampleFlags(mMediaExtractor);
+                break;
+            }
+            case 4:{
+                AMediaExtractor_getSampleCryptoInfo(mMediaExtractor);
+                break;
+            }
+            case 5:{
+                AMediaExtractor_getPsshInfo(mMediaExtractor);
+                break;
+            }
+            case 6:{
+                AMediaExtractor_advance(mMediaExtractor);
+                break;
+            }
+            case 7:{
+                AMediaFormat* mediaFormat = mFdp.ConsumeBool() ? AMediaFormat_new() : nullptr;
+                AMediaExtractor_getSampleFormat(mMediaExtractor, mediaFormat);
+                AMediaFormat_delete(mediaFormat);
+                break;
+            }
+            case 8:{
+                AMediaExtractor_seekTo(mMediaExtractor,
+                                       mFdp.ConsumeIntegral<int64_t>() /* seekPosUs */,
+                                       mFdp.PickValueInArray(kSeekMode) /* mode */);
+                break;
+            }
+        };
+    }
+
+    AMediaDataSource_delete(mDataSource);
+    AMediaExtractor_delete(mMediaExtractor);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    /**
+     * Create a threadpool for incoming binder transactions,
+     * without this extractor results in a DoS after few instances.
+     */
+    ABinderProcess_startThreadPool();
+
+    NdkExtractorFuzzer ndkExtractorFuzzer(data, size);
+    ndkExtractorFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
new file mode 100644
index 0000000..6c11798
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/native_handle.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/BufferQueue.h>
+#include <media/NdkImageReader.h>
+
+constexpr int32_t kMaxSize = INT_MAX;
+constexpr int32_t kMinSize = 1;
+constexpr int32_t kMinImages = 1;
+
+class NdkImageReaderFuzzer {
+  public:
+    NdkImageReaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    static void onImageAvailable(void*, AImageReader*){};
+    static void onBufferRemoved(void*, AImageReader*, AHardwareBuffer*){};
+};
+
+void NdkImageReaderFuzzer::process() {
+    AImageReader* reader = nullptr;
+    AImage* img = nullptr;
+    native_handle_t* handle = nullptr;
+    int32_t* acquireFenceFd = nullptr;
+    int32_t imageWidth = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageHeight = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageFormat = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageUsage = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageMaxCount = mFdp.ConsumeIntegralInRange<int32_t>(
+            kMinImages, android::BufferQueue::MAX_MAX_ACQUIRED_BUFFERS);
+    AImageReader_ImageListener readerAvailableCb{this, NdkImageReaderFuzzer::onImageAvailable};
+    AImageReader_BufferRemovedListener readerDetachedCb{this, onBufferRemoved};
+
+    if (mFdp.ConsumeBool()) {
+        AImageReader_new(imageWidth, imageHeight, imageFormat, imageMaxCount, &reader);
+    } else {
+        AImageReader_newWithUsage(imageWidth, imageHeight, imageFormat, imageUsage, imageMaxCount,
+                                  &reader);
+    }
+    while (mFdp.remaining_bytes()) {
+        auto ndkImageFunction = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { AImageReader_acquireNextImage(reader, &img); },
+                [&]() { AImageReader_acquireLatestImage(reader, &img); },
+                [&]() { AImageReader_setImageListener(reader, &readerAvailableCb); },
+                [&]() { AImageReader_acquireNextImageAsync(reader, &img, acquireFenceFd); },
+                [&]() { AImageReader_acquireLatestImageAsync(reader, &img, acquireFenceFd); },
+                [&]() { AImageReader_setBufferRemovedListener(reader, &readerDetachedCb); },
+                [&]() { AImageReader_getWindowNativeHandle(reader, &handle); },
+        });
+        ndkImageFunction();
+    }
+    AImageReader_delete(reader);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkImageReaderFuzzer ndkImageReaderFuzzer(data, size);
+    ndkImageReaderFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
new file mode 100644
index 0000000..c19ea13
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -0,0 +1,257 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <datasource/FileSource.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaFormat.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <utils/Log.h>
+#include <fstream>
+
+const char* kValidKeys[] = {
+        AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR,
+        AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR,
+        AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION,
+        AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL,
+        AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL,
+        AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT,
+        AMEDIAFORMAT_KEY_AAC_PROFILE,
+        AMEDIAFORMAT_KEY_AAC_SBR_MODE,
+        AMEDIAFORMAT_KEY_ALBUM,
+        AMEDIAFORMAT_KEY_ALBUMART,
+        AMEDIAFORMAT_KEY_ALBUMARTIST,
+        AMEDIAFORMAT_KEY_ARTIST,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID,
+        AMEDIAFORMAT_KEY_AUDIO_SESSION_ID,
+        AMEDIAFORMAT_KEY_AUTHOR,
+        AMEDIAFORMAT_KEY_BITRATE_MODE,
+        AMEDIAFORMAT_KEY_BIT_RATE,
+        AMEDIAFORMAT_KEY_BITS_PER_SAMPLE,
+        AMEDIAFORMAT_KEY_CAPTURE_RATE,
+        AMEDIAFORMAT_KEY_CDTRACKNUMBER,
+        AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+        AMEDIAFORMAT_KEY_CHANNEL_MASK,
+        AMEDIAFORMAT_KEY_COLOR_FORMAT,
+        AMEDIAFORMAT_KEY_COLOR_RANGE,
+        AMEDIAFORMAT_KEY_COLOR_STANDARD,
+        AMEDIAFORMAT_KEY_COLOR_TRANSFER,
+        AMEDIAFORMAT_KEY_COMPILATION,
+        AMEDIAFORMAT_KEY_COMPLEXITY,
+        AMEDIAFORMAT_KEY_COMPOSER,
+        AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED,
+        AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE,
+        AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK,
+        AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
+        AMEDIAFORMAT_KEY_CRYPTO_IV,
+        AMEDIAFORMAT_KEY_CRYPTO_KEY,
+        AMEDIAFORMAT_KEY_CRYPTO_MODE,
+        AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
+        AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK,
+        AMEDIAFORMAT_KEY_CSD,
+        AMEDIAFORMAT_KEY_CSD_0,
+        AMEDIAFORMAT_KEY_CSD_1,
+        AMEDIAFORMAT_KEY_CSD_2,
+        AMEDIAFORMAT_KEY_CSD_AVC,
+        AMEDIAFORMAT_KEY_CSD_HEVC,
+        AMEDIAFORMAT_KEY_D263,
+        AMEDIAFORMAT_KEY_DATE,
+        AMEDIAFORMAT_KEY_DISCNUMBER,
+        AMEDIAFORMAT_KEY_DISPLAY_CROP,
+        AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+        AMEDIAFORMAT_KEY_DISPLAY_WIDTH,
+        AMEDIAFORMAT_KEY_DURATION,
+        AMEDIAFORMAT_KEY_ENCODER_DELAY,
+        AMEDIAFORMAT_KEY_ENCODER_PADDING,
+        AMEDIAFORMAT_KEY_ESDS,
+        AMEDIAFORMAT_KEY_EXIF_OFFSET,
+        AMEDIAFORMAT_KEY_EXIF_SIZE,
+        AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
+        AMEDIAFORMAT_KEY_FRAME_COUNT,
+        AMEDIAFORMAT_KEY_FRAME_RATE,
+        AMEDIAFORMAT_KEY_GENRE,
+        AMEDIAFORMAT_KEY_GRID_COLUMNS,
+        AMEDIAFORMAT_KEY_GRID_ROWS,
+        AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT,
+        AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
+        AMEDIAFORMAT_KEY_HDR10_PLUS_INFO,
+        AMEDIAFORMAT_KEY_HEIGHT,
+        AMEDIAFORMAT_KEY_ICC_PROFILE,
+        AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
+        AMEDIAFORMAT_KEY_IS_ADTS,
+        AMEDIAFORMAT_KEY_IS_AUTOSELECT,
+        AMEDIAFORMAT_KEY_IS_DEFAULT,
+        AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE,
+        AMEDIAFORMAT_KEY_IS_SYNC_FRAME,
+        AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+        AMEDIAFORMAT_KEY_LANGUAGE,
+        AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK,
+        AMEDIAFORMAT_KEY_LATENCY,
+        AMEDIAFORMAT_KEY_LEVEL,
+        AMEDIAFORMAT_KEY_LOCATION,
+        AMEDIAFORMAT_KEY_LOOP,
+        AMEDIAFORMAT_KEY_LOW_LATENCY,
+        AMEDIAFORMAT_KEY_LYRICIST,
+        AMEDIAFORMAT_KEY_MANUFACTURER,
+        AMEDIAFORMAT_KEY_MAX_BIT_RATE,
+        AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER,
+        AMEDIAFORMAT_KEY_MAX_HEIGHT,
+        AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
+        AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER,
+        AMEDIAFORMAT_KEY_MAX_WIDTH,
+        AMEDIAFORMAT_KEY_MIME,
+        AMEDIAFORMAT_KEY_MPEG_USER_DATA,
+        AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER,
+        AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+        AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+        AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+        AMEDIAFORMAT_KEY_OPERATING_RATE,
+        AMEDIAFORMAT_KEY_PCM_ENCODING,
+        AMEDIAFORMAT_KEY_PICTURE_TYPE,
+        AMEDIAFORMAT_KEY_PRIORITY,
+        AMEDIAFORMAT_KEY_PROFILE,
+        AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN,
+        AMEDIAFORMAT_KEY_PSSH,
+        AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP,
+        AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
+        AMEDIAFORMAT_KEY_ROTATION,
+        AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET,
+        AMEDIAFORMAT_KEY_SAMPLE_RATE,
+        AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND,
+        AMEDIAFORMAT_KEY_SAR_HEIGHT,
+        AMEDIAFORMAT_KEY_SAR_WIDTH,
+        AMEDIAFORMAT_KEY_SEI,
+        AMEDIAFORMAT_KEY_SLICE_HEIGHT,
+        AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS,
+        AMEDIAFORMAT_KEY_STRIDE,
+        AMEDIAFORMAT_KEY_TARGET_TIME,
+        AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT,
+        AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID,
+        AMEDIAFORMAT_KEY_TEMPORAL_LAYERING,
+        AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA,
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C,
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+        AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT,
+        AMEDIAFORMAT_KEY_THUMBNAIL_TIME,
+        AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH,
+        AMEDIAFORMAT_KEY_TILE_HEIGHT,
+        AMEDIAFORMAT_KEY_TILE_WIDTH,
+        AMEDIAFORMAT_KEY_TIME_US,
+        AMEDIAFORMAT_KEY_TITLE,
+        AMEDIAFORMAT_KEY_TRACK_ID,
+        AMEDIAFORMAT_KEY_TRACK_INDEX,
+        AMEDIAFORMAT_KEY_VALID_SAMPLES,
+        AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
+        AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE,
+        AMEDIAFORMAT_VIDEO_QP_B_MAX,
+        AMEDIAFORMAT_VIDEO_QP_B_MIN,
+        AMEDIAFORMAT_VIDEO_QP_I_MAX,
+        AMEDIAFORMAT_VIDEO_QP_I_MIN,
+        AMEDIAFORMAT_VIDEO_QP_MAX,
+        AMEDIAFORMAT_VIDEO_QP_MIN,
+        AMEDIAFORMAT_VIDEO_QP_P_MAX,
+        AMEDIAFORMAT_VIDEO_QP_P_MIN,
+        AMEDIAFORMAT_KEY_WIDTH,
+        AMEDIAFORMAT_KEY_XMP_OFFSET,
+        AMEDIAFORMAT_KEY_XMP_SIZE,
+        AMEDIAFORMAT_KEY_YEAR,
+};
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMinChoice = 0;
+constexpr size_t kMaxChoice = 9;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    AMediaFormat* mediaFormat = AMediaFormat_new();
+    while (fdp.remaining_bytes()) {
+        const char* name = nullptr;
+        std::string nameString;
+        if (fdp.ConsumeBool()) {
+            nameString =
+                    fdp.ConsumeBool()
+                            ? fdp.PickValueInArray(kValidKeys)
+                            : fdp.ConsumeRandomLengthString(
+                                      fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            name = nameString.c_str();
+        }
+        switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
+            case 0: {
+                AMediaFormat_setInt32(mediaFormat, name,
+                                      fdp.ConsumeIntegral<int32_t>() /* value */);
+                break;
+            }
+            case 1: {
+                AMediaFormat_setInt64(mediaFormat, name,
+                                      fdp.ConsumeIntegral<int64_t>() /* value */);
+                break;
+            }
+            case 2: {
+                AMediaFormat_setFloat(mediaFormat, name,
+                                      fdp.ConsumeFloatingPoint<float>() /* value */);
+                break;
+            }
+            case 3: {
+                AMediaFormat_setDouble(mediaFormat, name,
+                                       fdp.ConsumeFloatingPoint<double>() /* value */);
+                break;
+            }
+            case 4: {
+                AMediaFormat_setSize(mediaFormat, name, fdp.ConsumeIntegral<size_t>() /* value */);
+                break;
+            }
+            case 5: {
+                std::string value;
+                if (fdp.ConsumeBool()) {
+                    value = fdp.ConsumeRandomLengthString(
+                            fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                }
+                AMediaFormat_setString(mediaFormat, name,
+                                       fdp.ConsumeBool() ? nullptr : value.c_str());
+                break;
+            }
+            case 6: {
+                AMediaFormat_setRect(mediaFormat, name, fdp.ConsumeIntegral<int32_t>() /* left */,
+                                     fdp.ConsumeIntegral<int32_t>() /* top */,
+                                     fdp.ConsumeIntegral<int32_t>() /* bottom */,
+                                     fdp.ConsumeIntegral<int32_t>() /* right */);
+                break;
+            }
+            case 7: {
+                std::vector<uint8_t> bufferData = fdp.ConsumeBytes<uint8_t>(
+                        fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                AMediaFormat_setBuffer(mediaFormat, name, bufferData.data(), bufferData.size());
+                break;
+            }
+            case 8: {
+                AMediaFormat_toString(mediaFormat);
+                break;
+            }
+            default: {
+                AMediaFormat* format = fdp.ConsumeBool() ? nullptr : AMediaFormat_new();
+                AMediaFormat_copy(format, mediaFormat);
+                AMediaFormat_delete(format);
+                break;
+            }
+        }
+    }
+    AMediaFormat_clear(mediaFormat);
+    AMediaFormat_delete(mediaFormat);
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_mediamuxer_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediamuxer_fuzzer.cpp
new file mode 100644
index 0000000..8c49d28
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_mediamuxer_fuzzer.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaMuxer.h>
+#include <sys/mman.h>
+#include <unistd.h>
+
+const std::string kMuxerFile = "mediaMuxer";
+const std::string kAppendFile = "mediaAppend";
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMinChoice = 0;
+constexpr size_t kMaxChoice = 7;
+constexpr size_t kMaxStringLength = 20;
+constexpr size_t kOffset = 0;
+
+constexpr OutputFormat kOutputFormat[] = {AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4,
+                                          AMEDIAMUXER_OUTPUT_FORMAT_WEBM,
+                                          AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP};
+constexpr AppendMode kAppendMode[] = {AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP,
+                                      AMEDIAMUXER_APPEND_TO_EXISTING_DATA};
+
+const std::string kAudioMimeType[] = {"audio/3gpp", "audio/amr-wb", "audio/mp4a-latm",
+                                      "audio/flac", "audio/vorbis", "audio/opus"};
+
+const std::string kVideoMimeType[] = {"video/x-vnd.on2.vp8", "video/x-vnd.on2.vp9", "video/av01",
+                                      "video/avc",           "video/hevc",          "video/mp4v-es",
+                                      "video/3gpp"};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMediaFormat* format) {
+    std::string mimeType = fdp.ConsumeBool() ? fdp.ConsumeRandomLengthString(kMaxStringLength)
+                                             : fdp.PickValueInArray(kAudioMimeType);
+    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mimeType.c_str());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt64(format, AMEDIAFORMAT_KEY_DURATION, fdp.ConsumeIntegral<int64_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMediaFormat* format) {
+    std::string mimeType = fdp.ConsumeBool() ? fdp.ConsumeRandomLengthString(kMaxStringLength)
+                                             : fdp.PickValueInArray(kAudioMimeType);
+    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mimeType.c_str());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                          fdp.ConsumeFloatingPoint<float>());
+    AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_CAPTURE_RATE, fdp.ConsumeFloatingPoint<float>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, fdp.ConsumeIntegral<int32_t>());
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    /**
+     * Create a threadpool for incoming binder transactions,
+     * without this muxer results in a DoS after few instances.
+     */
+    ABinderProcess_startThreadPool();
+    FuzzedDataProvider fdp(data, size);
+    /**
+     * memfd_create() creates an anonymous file and returns a file
+     * descriptor that refers to it. MFD_ALLOW_SEALING allow sealing
+     * operations on this file.
+     */
+    int32_t fd = -1;
+    AMediaMuxer* muxer = nullptr;
+    if (fdp.ConsumeBool()) {
+        fd = memfd_create(kMuxerFile.c_str(), MFD_ALLOW_SEALING);
+        muxer = AMediaMuxer_new(fd, fdp.ConsumeBool()
+                                            ? fdp.PickValueInArray(kOutputFormat)
+                                            : (OutputFormat)fdp.ConsumeIntegral<int32_t>());
+    } else {
+        fd = memfd_create(kAppendFile.c_str(), MFD_ALLOW_SEALING);
+        std::vector<uint8_t> appendData =
+                fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+        write(fd, appendData.data(), appendData.size());
+        muxer = AMediaMuxer_append(fd, fdp.PickValueInArray(kAppendMode) /* mode */);
+    }
+    if (!muxer) {
+        close(fd);
+        return 0;
+    }
+    AMediaFormat* mediaFormat = nullptr;
+    ssize_t trackIdx = 0;
+    while (fdp.remaining_bytes()) {
+        int32_t kSwitchChoice = fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice);
+        switch (kSwitchChoice) {
+            case 0: {
+                AMediaMuxer_setLocation(muxer, fdp.ConsumeFloatingPoint<float>() /* latitude */,
+                                        fdp.ConsumeFloatingPoint<float>() /* longitude */);
+                break;
+            }
+            case 1: {
+                AMediaMuxer_setOrientationHint(muxer, fdp.ConsumeIntegral<int32_t>() /* degrees */);
+                break;
+            }
+            case 2: {
+                AMediaMuxer_start(muxer);
+                break;
+            }
+            case 3: {
+                AMediaMuxer_stop(muxer);
+                break;
+            }
+            case 4: {
+                AMediaMuxer_getTrackCount(muxer);
+                break;
+            }
+            case 5: {
+                AMediaFormat* getFormat =
+                        AMediaMuxer_getTrackFormat(muxer, fdp.ConsumeIntegral<size_t>() /* idx */);
+                AMediaFormat_delete(getFormat);
+                break;
+            }
+            case 6: {
+                mediaFormat = AMediaFormat_new();
+                fdp.ConsumeBool() ? getSampleAudioFormat(fdp, mediaFormat)
+                                  : getSampleVideoFormat(fdp, mediaFormat);
+                trackIdx = AMediaMuxer_addTrack(muxer, mediaFormat);
+                AMediaFormat_delete(mediaFormat);
+                break;
+            }
+            default: {
+                std::vector<uint8_t> sampleData = fdp.ConsumeBytes<uint8_t>(
+                        fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                AMediaCodecBufferInfo codecBuffer;
+                codecBuffer.size = sampleData.size();
+                codecBuffer.offset = kOffset;
+                codecBuffer.presentationTimeUs = fdp.ConsumeIntegral<int64_t>();
+                codecBuffer.flags = fdp.ConsumeIntegral<uint32_t>();
+                AMediaMuxer_writeSampleData(
+                        muxer,
+                        fdp.ConsumeBool() ? trackIdx : fdp.ConsumeIntegral<size_t>() /* trackIdx */,
+                        sampleData.data(), &codecBuffer);
+                break;
+            }
+        }
+    }
+    AMediaMuxer_delete(muxer);
+    close(fd);
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
new file mode 100644
index 0000000..d348f66
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+
+constexpr int32_t kMaxNdkCodecAPIs = 12;
+
+class NdkSyncCodecFuzzer : public NdkMediaCodecFuzzerBase {
+  public:
+    NdkSyncCodecFuzzer(const uint8_t* data, size_t size)
+        : NdkMediaCodecFuzzerBase(), mFdp(data, size) {
+        setFdp(&mFdp);
+    };
+    void invokeSyncCodeConfigAPI();
+
+    static void CodecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
+                                     int64_t systemNano) {
+        (void)codec;
+        (void)userdata;
+        (void)mediaTimeUs;
+        (void)systemNano;
+    };
+
+  private:
+    FuzzedDataProvider mFdp;
+    AMediaCodec* mCodec = nullptr;
+    void invokekSyncCodecAPIs(bool isEncoder);
+};
+
+void NdkSyncCodecFuzzer::invokekSyncCodecAPIs(bool isEncoder) {
+    ANativeWindow* nativeWindow = nullptr;
+    int32_t numOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
+    int32_t count = 0;
+    while (++count <= numOfFrames) {
+        int32_t ndkcodecAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
+        switch (ndkcodecAPI) {
+            case 0: {  // configure the codec
+                AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow, nullptr /* crypto */,
+                                      (isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0));
+                break;
+            }
+            case 1: {  // start codec
+                AMediaCodec_start(mCodec);
+                break;
+            }
+            case 2: {  // stop codec
+                AMediaCodec_stop(mCodec);
+                break;
+            }
+            case 3: {  // create persistent input surface
+                AMediaCodec_createPersistentInputSurface(&nativeWindow);
+                break;
+            }
+            case 4: {  // buffer operation APIs
+                invokeInputBufferOperationAPI(mCodec);
+                break;
+            }
+            case 5: {
+                invokeOutputBufferOperationAPI(mCodec);
+                break;
+            }
+            case 6: {  // get input and output Format
+                invokeCodecFormatAPI(mCodec);
+                break;
+            }
+            case 7: {
+                AMediaCodec_signalEndOfInputStream(mCodec);
+                break;
+            }
+            case 8: {  // set parameters
+                // Create a new parameter and set
+                AMediaFormat* params = AMediaFormat_new();
+                AMediaFormat_setInt32(
+                        params, "video-bitrate",
+                        mFdp.ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue));
+                AMediaCodec_setParameters(mCodec, params);
+                AMediaFormat_delete(params);
+                break;
+            }
+            case 9: {  // flush codec
+                AMediaCodec_flush(mCodec);
+                if (mFdp.ConsumeBool()) {
+                    AMediaCodec_start(mCodec);
+                }
+                break;
+            }
+            case 10: {  // get the codec name
+                char* name = nullptr;
+                AMediaCodec_getName(mCodec, &name);
+                AMediaCodec_releaseName(mCodec, name);
+                break;
+            }
+            case 11: {  // set callback API for frame render output
+                std::vector<uint8_t> userData = mFdp.ConsumeBytes<uint8_t>(
+                        mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                AMediaCodecOnFrameRendered callback = CodecOnFrameRendered;
+                AMediaCodec_setOnFrameRenderedCallback(mCodec, callback, userData.data());
+                break;
+            }
+            case 12:
+            default: {  // set persistent input surface
+                AMediaCodec_setInputSurface(mCodec, nativeWindow);
+            }
+        }
+    }
+    if (nativeWindow) {
+        ANativeWindow_release(nativeWindow);
+    }
+}
+
+void NdkSyncCodecFuzzer::invokeSyncCodeConfigAPI() {
+    while (mFdp.remaining_bytes() > 0) {
+        bool isEncoder = mFdp.ConsumeBool();
+        mCodec = createCodec(isEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
+        if (mCodec) {
+            invokekSyncCodecAPIs(isEncoder);
+            AMediaCodec_delete(mCodec);
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkSyncCodecFuzzer ndkSyncCodecFuzzer(data, size);
+    ndkSyncCodecFuzzer.invokeSyncCodeConfigAPI();
+    return 0;
+}
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 71bc6d9..814a327 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -583,7 +583,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param width the width of the image will be filled here if the method call succeeeds.
+ * @param width the width of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -599,7 +599,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param height the height of the image will be filled here if the method call succeeeds.
+ * @param height the height of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -617,7 +617,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param format the format of the image will be filled here if the method call succeeeds.
+ * @param format the format of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -636,7 +636,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param rect the cropped rectangle of the image will be filled here if the method call succeeeds.
+ * @param rect the cropped rectangle of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -662,7 +662,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param timestampNs the timestamp of the image will be filled here if the method call succeeeds.
+ * @param timestampNs the timestamp of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -682,7 +682,7 @@
  *
  * @param image the {@link AImage} of interest.
  * @param numPlanes the number of planes of the image will be filled here if the method call
- *         succeeeds.
+ *         succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -706,7 +706,7 @@
  *
  * @param image the {@link AImage} of interest.
  * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
- * @param pixelStride the pixel stride of the image will be filled here if the method call succeeeds.
+ * @param pixelStride the pixel stride of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -735,7 +735,7 @@
  *
  * @param image the {@link AImage} of interest.
  * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
- * @param rowStride the row stride of the image will be filled here if the method call succeeeds.
+ * @param rowStride the row stride of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -762,8 +762,8 @@
  *
  * @param image the {@link AImage} of interest.
  * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
- * @param data the data pointer of the image will be filled here if the method call succeeeds.
- * @param dataLength the valid length of data will be filled here if the method call succeeeds.
+ * @param data the data pointer of the image will be filled here if the method call succeeds.
+ * @param dataLength the valid length of data will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -826,6 +826,25 @@
  */
 media_status_t AImage_getHardwareBuffer(const AImage* image, /*out*/AHardwareBuffer** buffer) __INTRODUCED_IN(26);
 
+/**
+ * Query the dataspace of the input {@link AImage}.
+ *
+ * Available since API level 33.
+ *
+ * @param image the {@link AImage} of interest.
+ * @param dataSpace the dataspace of the image will be filled here if the method call succeeds.
+ *                  This must be one of the ADATASPACE_* enum value defined in
+ *                  {@link ADataSpace}.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or dataSpace is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
+media_status_t AImage_getDataSpace(const AImage* image,
+                                   /*out*/int32_t* dataSpace) __INTRODUCED_IN(33);
+
 __END_DECLS
 
 #endif //_NDK_IMAGE_H
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 4bd7f2a..992955b 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -79,7 +79,7 @@
  *            by the user, one of them has to be released before a new {@link AImage} will become
  *            available for access through {@link AImageReader_acquireLatestImage} or
  *            {@link AImageReader_acquireNextImage}. Must be greater than 0.
- * @param reader The created image reader will be filled here if the method call succeeeds.
+ * @param reader The created image reader will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -133,7 +133,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param width the default width of the reader will be filled here if the method call succeeeds.
+ * @param width the default width of the reader will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -151,7 +151,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param height the default height of the reader will be filled here if the method call succeeeds.
+ * @param height the default height of the reader will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -165,7 +165,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param format the fromat of the reader will be filled here if the method call succeeeds. The
+ * @param format the format of the reader will be filled here if the method call succeeds. The
  *                value will be one of the AIMAGE_FORMAT_* enum value defiend in {@link NdkImage.h}.
  *
  * @return <ul>
@@ -181,7 +181,7 @@
  *
  * @param reader The image reader of interest.
  * @param maxImages the maximum number of concurrently acquired images of the reader will be filled
- *                here if the method call succeeeds.
+ *                here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -212,7 +212,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -257,7 +257,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -387,6 +387,44 @@
         /*out*/ AImageReader** reader) __INTRODUCED_IN(26);
 
 /**
+ * AImageReader constructor similar to {@link AImageReader_newWithUsage} that takes
+ * two additional parameters to build the format of the Image. All other parameters
+ * and the return values are identical to those passed to {@link AImageReader_newWithUsage}.
+ *
+ * <p>Instead of passing {@code format} parameter, this constructor accepts
+ * the combination of {@code hardwareBufferFormat} and {@code dataSpace} for the
+ * format of the Image that the reader will produce.</p>
+ *
+ * Available since API level 33.
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param usage specifies how the consumer will access the AImage.
+ *              See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @param maxImages The maximum number of images the user will want to access simultaneously.
+ *                  See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @param hardwareBufferFormat The hardware buffer format passed by the producer.
+ *                             This must be one of the AHARDWAREBUFFER_FORMAT_* enum values defined
+ *                             in {@link hardware_buffer.h}.
+ * @param dataSpace The dataspace of the Image passed by the producer.
+ *                  This must be one of the ADATASPACE_* enum values defined in
+ *                  {@link ADataSpace}.
+ * @param reader The created image reader will be filled here if the method call succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ *                 height, maxImages, hardwareBufferFormat or dataSpace arguments
+ *                 is not supported.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImageReader_newWithUsage
+ */
+media_status_t AImageReader_newWithDataSpace(int32_t width, int32_t height, uint64_t usage,
+        int32_t maxImages, uint32_t hardwareBufferFormat, int32_t dataSpace,
+        /*out*/ AImageReader** reader) __INTRODUCED_IN(33);
+
+/**
  * Acquire the next {@link AImage} from the image reader's queue asynchronously.
  *
  * <p>AImageReader acquire method similar to {@link AImageReader_acquireNextImage} that takes an
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index c8faced..bac4b22 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -13,11 +13,13 @@
     AImageReader_getWindow; # introduced=24
     AImageReader_new; # introduced=24
     AImageReader_newWithUsage; # introduced=26
+    AImageReader_newWithDataSpace; # introduced=Tiramisu
     AImageReader_setBufferRemovedListener; # introduced=26
     AImageReader_setImageListener; # introduced=24
     AImage_delete; # introduced=24
     AImage_deleteAsync; # introduced=26
     AImage_getCropRect; # introduced=24
+    AImage_getDataSpace; # introduced=Tiramisu
     AImage_getFormat; # introduced=24
     AImage_getHardwareBuffer; # introduced=26
     AImage_getHeight; # introduced=24
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 4b44dcf..d41a7f9 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,6 +69,6 @@
 java_defaults {
     name: "MediaBenchmark-defaults",
 
-    min_sdk_version: "28",
+    min_sdk_version: "29",
     target_sdk_version: "30",
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
index eea9914..772a29b 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
@@ -20,8 +20,6 @@
     package="com.android.media.benchmark">
     <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
-    <uses-permission android:name="android.permission.READ_INTERNAL_STORAGE" />
-    <uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
 
     <application
         tools:ignore="AllowBackup,GoogleAppIndexingWarning,MissingApplicationIcon"
@@ -31,4 +29,4 @@
     <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
             android:targetPackage="com.android.media.benchmark"
             android:label="Benchmark Media Test"/>
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b222d47..a2af701 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -30,7 +30,7 @@
     compileSdkVersion 30
     defaultConfig {
         applicationId "com.android.media.benchmark"
-        minSdkVersion 28
+        minSdkVersion 29
         targetSdkVersion 30
         versionCode 1
         versionName "1.0"
@@ -73,4 +73,4 @@
     testImplementation 'junit:junit:4.13.2'
     androidTestImplementation 'androidx.test:runner:1.3.0'
     androidTestImplementation 'androidx.test.ext:junit:1.1.2'
-}
\ No newline at end of file
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
index a0628fa..0b8e7b2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
@@ -80,7 +80,6 @@
         vector<AMediaCodecBufferInfo> frameInfo;
         AMediaCodecBufferInfo info;
         uint32_t inputBufferOffset = 0;
-
         // Get frame data
         while (1) {
             status = extractor->getFrameSample(info);
@@ -111,7 +110,7 @@
         const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
         string sInputReference = string(inputReference);
         decoder->dumpStatistics(sInputReference, sCodecName, (asyncMode ? "async" : "sync"),
-                                statsFile);
+                                (statsFile == nullptr ? "" : statsFile));
         env->ReleaseStringUTFChars(jCodecName, codecName);
         env->ReleaseStringUTFChars(jStatsFile, statsFile);
         env->ReleaseStringUTFChars(jFileName, inputReference);
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
index 08035c9..1e10b37 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
@@ -2,13 +2,12 @@
 
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecList;
+import android.media.MediaFormat;
 import android.os.Build;
-
 import java.util.ArrayList;
 
 public class CodecUtils {
     private CodecUtils() {}
-
     /**
      * Queries the MediaCodecList and returns codec names of supported codecs.
      *
@@ -36,4 +35,46 @@
         }
         return supportedCodecs;
     }
+    /**
+     * Returns a decoder that supports the given MediaFormat along with the "features".
+     *
+     * @param format  MediaFormat that the codec should support
+     * @param isSoftware Specifies if this is a software / hardware decoder
+     * @param isEncoder Specifies if the request is for encoders or not.
+     * @param features is the feature that should be supported.
+     * @return name of the codec.
+     */
+    public static String getMediaCodec(MediaFormat format, boolean isSoftware,
+                                  String[] features, boolean isEncoder) {
+        MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        MediaCodecInfo[] codecInfos = mcl.getCodecInfos();
+        String mime = format.getString(MediaFormat.KEY_MIME);
+        for (MediaCodecInfo codecInfo : codecInfos) {
+            if (codecInfo.isEncoder() != isEncoder) continue;
+            if (isSoftware != codecInfo.isSoftwareOnly()) continue;
+            String[] types = codecInfo.getSupportedTypes();
+            for (String type : types) {
+                if (type.equalsIgnoreCase(mime)) {
+                    boolean isOk = true;
+                    MediaCodecInfo.CodecCapabilities codecCapabilities =
+                        codecInfo.getCapabilitiesForType(type);
+                    if (!codecCapabilities.isFormatSupported(format)) {
+                        isOk = false;
+                    }
+                    if (features != null) {
+                        for (String feature : features) {
+                            if (!codecCapabilities.isFeatureSupported(feature)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (isOk) {
+                        return codecInfo.getName();
+                    }
+                }
+            }
+        }
+        return null;
+    }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 66fee33..a4ba36a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -16,6 +16,8 @@
 
 package com.android.media.benchmark.library;
 
+import android.view.Surface;
+
 import android.media.MediaCodec;
 import android.media.MediaCodec.BufferInfo;
 import android.media.MediaFormat;
@@ -28,13 +30,17 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 
-public class Decoder {
+import com.android.media.benchmark.library.IBufferXfer;
+
+public class Decoder implements IBufferXfer.IReceiveBuffer {
     private static final String TAG = "Decoder";
     private static final boolean DEBUG = false;
     private static final int kQueueDequeueTimeoutUs = 1000;
 
     private final Object mLock = new Object();
     private MediaCodec mCodec;
+    private Surface mSurface = null;
+    private boolean mRender = false;
     private ArrayList<BufferInfo> mInputBufferInfo;
     private Stats mStats;
 
@@ -47,9 +53,29 @@
 
     private ArrayList<ByteBuffer> mInputBuffer;
     private FileOutputStream mOutputStream;
+    private FrameReleaseQueue mFrameReleaseQueue = null;
+    private IBufferXfer.ISendBuffer mIBufferSend = null;
 
+    /* success for decoder */
+    public static final int DECODE_SUCCESS = 0;
+    /* some error happened during decoding */
+    public static final int DECODE_DECODER_ERROR = -1;
+    /* error while creating a decoder */
+    public static final int DECODE_CREATE_ERROR = -2;
     public Decoder() { mStats = new Stats(); }
-
+    public Stats getStats() { return mStats; };
+    @Override
+    public boolean receiveBuffer(IBufferXfer.BufferXferInfo info) {
+        MediaCodec codec = (MediaCodec)info.obj;
+        codec.releaseOutputBuffer(info.idx, mRender);
+        return true;
+    }
+    @Override
+    public boolean connect(IBufferXfer.ISendBuffer receiver) {
+        Log.d(TAG,"Setting interface of the sender");
+        mIBufferSend = receiver;
+        return true;
+    }
     /**
      * Setup of decoder
      *
@@ -59,6 +85,17 @@
         mSignalledError = false;
         mOutputStream = outputStream;
     }
+    public void setupDecoder(Surface surface, boolean render,
+            boolean useFrameReleaseQueue, int frameRate) {
+        mSignalledError = false;
+        mOutputStream = null;
+        mSurface = surface;
+        mRender = render;
+        if (useFrameReleaseQueue) {
+            Log.i(TAG, "Using FrameReleaseQueue with frameRate " + frameRate);
+            mFrameReleaseQueue = new FrameReleaseQueue(mRender, frameRate);
+        }
+    }
 
     private MediaCodec createCodec(String codecName, MediaFormat format) throws IOException {
         String mime = format.getString(MediaFormat.KEY_MIME);
@@ -95,7 +132,8 @@
      * @param asyncMode       Will run on async implementation if true
      * @param format          For creating the decoder if codec name is empty and configuring it
      * @param codecName       Will create the decoder with codecName
-     * @return 0 if decode was successful , -1 for fail, -2 for decoder not created
+     * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+     *         DECODE_CREATE_ERROR for decoder not created
      * @throws IOException if the codec cannot be created.
      */
     public int decode(@NonNull ArrayList<ByteBuffer> inputBuffer,
@@ -112,7 +150,10 @@
         long sTime = mStats.getCurTime();
         mCodec = createCodec(codecName, format);
         if (mCodec == null) {
-            return -2;
+            return DECODE_CREATE_ERROR;
+        }
+        if (mFrameReleaseQueue != null) {
+            mFrameReleaseQueue.setMediaCodec(mCodec);
         }
         if (asyncMode) {
             mCodec.setCallback(new MediaCodec.Callback() {
@@ -158,7 +199,7 @@
         if (DEBUG) {
             Log.d(TAG, "Media Format : " + format.toString());
         }
-        mCodec.configure(format, null, null, isEncoder);
+        mCodec.configure(format, mSurface, null, isEncoder);
         mCodec.start();
         Log.i(TAG, "Codec started ");
         long eTime = mStats.getCurTime();
@@ -168,7 +209,7 @@
             try {
                 synchronized (mLock) { mLock.wait(); }
                 if (mSignalledError) {
-                    return -1;
+                    return DECODE_DECODER_ERROR;
                 }
             } catch (InterruptedException e) {
                 e.printStackTrace();
@@ -201,7 +242,7 @@
                         Log.e(TAG,
                                 "MediaCodec.dequeueOutputBuffer"
                                         + " returned invalid index " + outputBufferId);
-                        return -1;
+                        return DECODE_DECODER_ERROR;
                     }
                 } else {
                     mStats.addOutputTime();
@@ -212,9 +253,13 @@
                 }
             }
         }
+        if (mFrameReleaseQueue != null) {
+            Log.i(TAG, "Ending FrameReleaseQueue");
+            mFrameReleaseQueue.stopFrameRelease();
+        }
         mInputBuffer.clear();
         mInputBufferInfo.clear();
-        return 0;
+        return DECODE_SUCCESS;
     }
 
     /**
@@ -290,7 +335,9 @@
         if (DEBUG) {
             Log.d(TAG,
                     "In OutputBufferAvailable ,"
-                            + " output frame number = " + mNumOutputFrame);
+                            + " output frame number = " + mNumOutputFrame
+                            + " timestamp = " + outputBufferInfo.presentationTimeUs
+                            + " size = " + outputBufferInfo.size);
         }
         if (mOutputStream != null) {
             try {
@@ -303,7 +350,21 @@
                 Log.d(TAG, "Error Dumping File: Exception " + e.toString());
             }
         }
-        mediaCodec.releaseOutputBuffer(outputBufferId, false);
+        if (mFrameReleaseQueue != null) {
+            mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
+                                            outputBufferInfo.presentationTimeUs);
+        } else if (mIBufferSend != null) {
+            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+            info.buf = mediaCodec.getOutputBuffer(outputBufferId);
+            info.idx = outputBufferId;
+            info.obj = mediaCodec;
+            info.bytesRead = outputBufferInfo.size;
+            info.presentationTimeUs = outputBufferInfo.presentationTimeUs;
+            info.flag = outputBufferInfo.flags;
+            mIBufferSend.sendBuffer(this, info);
+        } else {
+            mediaCodec.releaseOutputBuffer(outputBufferId, mRender);
+        }
         mSawOutputEOS = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
         if (mSawOutputEOS) {
             Log.i(TAG, "Saw output EOS");
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
index 754cd8e..63d17ee 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
@@ -19,6 +19,7 @@
 import android.media.MediaCodec;
 import android.media.MediaCodec.CodecException;
 import android.media.MediaFormat;
+import android.view.Surface;
 import android.util.Log;
 
 import androidx.annotation.NonNull;
@@ -28,34 +29,43 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-public class Encoder {
+public class Encoder implements IBufferXfer.IReceiveBuffer {
     // Change in AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE should also be taken to
     // kDefaultAudioEncodeFrameSize present in BenchmarkCommon.h
     private static final int AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE = 4096;
     private static final String TAG = "Encoder";
     private static final boolean DEBUG = false;
     private static final int kQueueDequeueTimeoutUs = 1000;
-
     private final Object mLock = new Object();
-    private MediaCodec mCodec;
+    private MediaCodec mCodec = null;
     private String mMime;
     private Stats mStats;
 
     private int mOffset;
     private int mFrameSize;
     private int mNumInputFrame;
-    private int mNumFrames;
+    private int mNumFrames = 0;
     private int mFrameRate;
     private int mSampleRate;
     private long mInputBufferSize;
 
+    private int mMinOutputBuffers = 0;
+    private int mNumOutputBuffers = 0;
+    private boolean mUseSurface = false;
+
     private boolean mSawInputEOS;
     private boolean mSawOutputEOS;
     private boolean mSignalledError;
 
-    private FileInputStream mInputStream;
-    private FileOutputStream mOutputStream;
-
+    private FileInputStream mInputStream = null;
+    private FileOutputStream mOutputStream = null;
+    private IBufferXfer.ISendBuffer mIBufferSend = null;
+    /* success for encoder */
+    public static final int ENCODE_SUCCESS = 0;
+    /* some error happened during encoding */
+    public static final int ENCODE_ENCODER_ERROR = -1;
+    /* error while creating an encoder */
+    public static final int ENCODE_CREATE_ERROR = -2;
     public Encoder() {
         mStats = new Stats();
         mNumInputFrame = 0;
@@ -63,6 +73,25 @@
         mSawOutputEOS = false;
         mSignalledError = false;
     }
+    @Override
+    public boolean receiveBuffer(IBufferXfer.BufferXferInfo info) {
+        if (DEBUG) {
+            Log.d(TAG,"Encoder Getting buffers from external: "
+                + " Bytes Read: " + info.bytesRead
+                + " PresentationUs " + info.presentationTimeUs
+                + " flags: " + info.flag);
+        }
+        MediaCodec codec = (MediaCodec)info.obj;
+        codec.queueInputBuffer(info.idx, 0, info.bytesRead,
+            info.presentationTimeUs, info.flag);
+        return true;
+    }
+    @Override
+    public boolean connect(IBufferXfer.ISendBuffer receiver) {
+        mIBufferSend = receiver;
+        return true;
+    }
+    public Stats getStats() { return mStats; };
 
     /**
      * Setup of encoder
@@ -75,6 +104,17 @@
         this.mInputStream = fileInputStream;
         this.mOutputStream = encoderOutputStream;
     }
+    /**
+     * Setup of encoder
+     *
+     * @param useSurface, indicates that application is using surface for input
+     * @param numOutputBuffers indicate the minimum buffers to signal Output
+     * end of stream
+     */
+    public void setupEncoder(boolean useSurface, int numOutputBuffers) {
+        this.mUseSurface = useSurface;
+        this.mMinOutputBuffers = numOutputBuffers;
+    }
 
     private MediaCodec createCodec(String codecName, String mime) throws IOException {
         try {
@@ -100,7 +140,52 @@
             return null;
         }
     }
+    /**
+     * Creates and configures the encoder with the given name, format and mime.
+     * provided a valid list of parameters are passed as inputs. This is needed
+     * to first configure the codec and then may be get surface etc and then
+     * use for encode.
+     *
+     * @param codecName    Will create the encoder with codecName
+     * @param encodeFormat Format of the output data
+     * @param mime         For creating encode format
+     * @return ENCODE_SUCCESS if encode was successful,
+     *         ENCODE_CREATE_ERROR for encoder not created
+     * @throws IOException If the codec cannot be created.
+     */
 
+    public int createAndConfigure(String codecName, MediaFormat encodeFormat,
+                                  String mime) throws IOException {
+        if (mCodec == null) {
+            mMime = mime;
+            mCodec = createCodec(codecName, mime);
+            if (mCodec == null) {
+                return ENCODE_CREATE_ERROR;
+            }
+            /*Configure Codec*/
+            try {
+                mCodec.configure(encodeFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+            } catch(IllegalArgumentException
+                  | IllegalStateException
+                  | MediaCodec.CryptoException e) {
+                Log.e(TAG, "Failed to configure " + mCodec.getName() + " encoder.");
+                e.printStackTrace();
+                return ENCODE_CREATE_ERROR;
+            }
+        }
+        return ENCODE_SUCCESS;
+    }
+    /**
+     * Requests the surface to use as input to the encoder
+     * @return a valid surface or null if not called after configure.
+     */
+    public Surface getInputSurface() {
+        Surface inputSurface = null;
+        if (mCodec != null) {
+            inputSurface = mCodec.createInputSurface();
+        }
+        return inputSurface;
+    }
     /**
      * Encodes the given raw input file and measures the performance of encode operation,
      * provided a valid list of parameters are passed as inputs.
@@ -110,43 +195,39 @@
      * @param encodeFormat Format of the output data
      * @param frameSize    Size of the frame
      * @param asyncMode    Will run on async implementation if true
-     * @return 0 if encode was successful , -1 for fail, -2 for encoder not created
+     * @return ENCODE_SUCCESS if encode was successful ,ENCODE_ENCODER_ERROR for fail,
+     *         ENCODE_CREATE_ERROR for encoder not created
      * @throws IOException If the codec cannot be created.
      */
     public int encode(String codecName, MediaFormat encodeFormat, String mime, int frameRate,
                       int sampleRate, int frameSize, boolean asyncMode) throws IOException {
-        mInputBufferSize = mInputStream.getChannel().size();
-        mMime = mime;
+        mInputBufferSize = (mInputStream != null) ? mInputStream.getChannel().size() : 0;
         mOffset = 0;
         mFrameRate = frameRate;
         mSampleRate = sampleRate;
         long sTime = mStats.getCurTime();
-        mCodec = createCodec(codecName, mime);
         if (mCodec == null) {
-            return -2;
-        }
-        /*Configure Codec*/
-        try {
-            mCodec.configure(encodeFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
-        } catch (IllegalArgumentException | IllegalStateException | MediaCodec.CryptoException e) {
-            Log.e(TAG, "Failed to configure " + mCodec.getName() + " encoder.");
-            e.printStackTrace();
-            return -2;
-        }
-        if (mMime.startsWith("video/")) {
-            mFrameSize = frameSize;
-        } else {
-            int maxInputSize = AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE;
-            MediaFormat format = mCodec.getInputFormat();
-            if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
-                maxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
-            }
-            mFrameSize = frameSize;
-            if (mFrameSize > maxInputSize && maxInputSize > 0) {
-                mFrameSize = maxInputSize;
+            int status = createAndConfigure(codecName, encodeFormat, mime);
+            if(status != ENCODE_SUCCESS) {
+              return status;
             }
         }
-        mNumFrames = (int) ((mInputBufferSize + mFrameSize - 1) / mFrameSize);
+        if (!mUseSurface) {
+            if (mMime.startsWith("video/")) {
+                mFrameSize = frameSize;
+            } else {
+                int maxInputSize = AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE;
+                MediaFormat format = mCodec.getInputFormat();
+                if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
+                    maxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
+                }
+                mFrameSize = frameSize;
+                if (mFrameSize > maxInputSize && maxInputSize > 0) {
+                    mFrameSize = maxInputSize;
+                }
+            }
+            mNumFrames = (int) ((mInputBufferSize + mFrameSize - 1) / mFrameSize);
+        }
         if (asyncMode) {
             mCodec.setCallback(new MediaCodec.Callback() {
                 @Override
@@ -196,7 +277,7 @@
             try {
                 synchronized (mLock) { mLock.wait(); }
                 if (mSignalledError) {
-                    return -1;
+                    return ENCODE_ENCODER_ERROR;
                 }
             } catch (InterruptedException e) {
                 e.printStackTrace();
@@ -204,12 +285,12 @@
         } else {
             while (!mSawOutputEOS && !mSignalledError) {
                 /* Queue input data */
-                if (!mSawInputEOS) {
+                if (!mSawInputEOS && !mUseSurface) {
                     int inputBufferId = mCodec.dequeueInputBuffer(kQueueDequeueTimeoutUs);
                     if (inputBufferId < 0 && inputBufferId != MediaCodec.INFO_TRY_AGAIN_LATER) {
                         Log.e(TAG, "MediaCodec.dequeueInputBuffer " + "returned invalid index : " +
                                 inputBufferId);
-                        return -1;
+                        return ENCODE_ENCODER_ERROR;
                     }
                     mStats.addInputTime();
                     onInputAvailable(mCodec, inputBufferId);
@@ -225,7 +306,7 @@
                     } else if (outputBufferId != MediaCodec.INFO_TRY_AGAIN_LATER) {
                         Log.e(TAG, "MediaCodec.dequeueOutputBuffer" + " returned invalid index " +
                                 outputBufferId);
-                        return -1;
+                        return ENCODE_ENCODER_ERROR;
                     }
                 } else {
                     mStats.addOutputTime();
@@ -236,7 +317,7 @@
                 }
             }
         }
-        return 0;
+        return ENCODE_SUCCESS;
     }
 
     private void onOutputAvailable(MediaCodec mediaCodec, int outputBufferId,
@@ -260,13 +341,25 @@
                 return;
             }
         }
+        mNumOutputBuffers++;
+        if (DEBUG) {
+            Log.d(TAG,
+                "In OutputBufferAvailable ,"
+                + " timestamp = " + outputBufferInfo.presentationTimeUs
+                + " size = " + outputBufferInfo.size
+                + " flags = " + outputBufferInfo.flags);
+        }
+
         mStats.addFrameSize(outputBuffer.remaining());
         mediaCodec.releaseOutputBuffer(outputBufferId, false);
         mSawOutputEOS = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+        if (mUseSurface && !mSawOutputEOS) {
+            mSawOutputEOS = (mNumOutputBuffers >= mMinOutputBuffers) ? true : false;
+        }
     }
 
     private void onInputAvailable(MediaCodec mediaCodec, int inputBufferId) throws IOException {
-        if (mSawInputEOS || inputBufferId < 0) {
+        if (mSawInputEOS || inputBufferId < 0 || this.mUseSurface) {
             if (mSawInputEOS) {
                 Log.i(TAG, "Saw input EOS");
             }
@@ -282,6 +375,14 @@
             mSignalledError = true;
             return;
         }
+        if (mIBufferSend != null) {
+            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+            info.buf = inputBuffer;
+            info.idx = inputBufferId;
+            info.obj = mediaCodec;
+            mIBufferSend.sendBuffer(this, info);
+            return;
+        }
         int bufSize = inputBuffer.capacity();
         int bytesToRead = mFrameSize;
         if (mInputBufferSize - mOffset < mFrameSize) {
@@ -356,9 +457,11 @@
         mOffset = 0;
         mInputBufferSize = 0;
         mNumInputFrame = 0;
+        mMinOutputBuffers = 0;
         mSawInputEOS = false;
         mSawOutputEOS = false;
         mSignalledError = false;
+        mUseSurface = false;
         mStats.reset();
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
new file mode 100644
index 0000000..4b9b505
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+
+import android.media.MediaCodec;
+import android.util.Log;
+import androidx.annotation.NonNull;
+import java.nio.ByteBuffer;
+import java.util.concurrent.LinkedBlockingQueue;
+
+public class FrameReleaseQueue {
+    private static final String TAG = "FrameReleaseQueue";
+
+    private MediaCodec mCodec;
+    private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
+    private ReleaseThread mReleaseThread;
+    private boolean doFrameRelease = false;
+    private boolean mRender = false;
+    private int mWaitTime = 40; // milliseconds per frame
+    private int firstReleaseTime = -1;
+    private int THRESHOLD_TIME = 5;
+
+    private static class FrameInfo {
+        private int number;
+        private int bufferId;
+        private int displayTime;
+        public FrameInfo(int frameNumber, int frameBufferId, int frameDisplayTime) {
+            this.number = frameNumber;
+            this.bufferId = frameBufferId;
+            this.displayTime = frameDisplayTime;
+        }
+    }
+
+    private class ReleaseThread extends Thread {
+        public void run() {
+            int nextReleaseTime = 0;
+            while (doFrameRelease || mFrameInfoQueue.size() > 0) {
+                FrameInfo curFrameInfo = mFrameInfoQueue.peek();
+                if (curFrameInfo == null) {
+                    nextReleaseTime += mWaitTime;
+                } else {
+                    if (firstReleaseTime == -1) {
+                        firstReleaseTime = getCurSysTime();
+                        nextReleaseTime = firstReleaseTime + mWaitTime;
+                        popAndRelease(curFrameInfo);
+                    } else {
+                        nextReleaseTime += mWaitTime;
+                        int curSysTime = getCurSysTime();
+                        int curMediaTime = curSysTime - firstReleaseTime;
+                        while (curFrameInfo != null && curFrameInfo.displayTime <= curMediaTime) {
+                            if (!((curMediaTime - curFrameInfo.displayTime) < THRESHOLD_TIME)) {
+                                Log.d(TAG, "Dropping expired frame " + curFrameInfo.number);
+                            }
+                            popAndRelease(curFrameInfo);
+                            curFrameInfo = mFrameInfoQueue.peek();
+                        }
+                        if (curFrameInfo != null && curFrameInfo.displayTime > curMediaTime) {
+                            if ((curFrameInfo.displayTime - curMediaTime) < THRESHOLD_TIME) {
+                                popAndRelease(curFrameInfo);
+                            }
+                        }
+                    }
+                }
+                int sleepTime = nextReleaseTime - getCurSysTime();
+                if (sleepTime > 0) {
+                    try {
+                        mReleaseThread.sleep(sleepTime);
+                    } catch (InterruptedException e) {
+                        Log.e(TAG, "Threw InterruptedException on sleep");
+                    }
+                } else {
+                    Log.d(TAG, "Thread sleep time less than 1");
+                }
+            }
+        }
+    }
+
+    public FrameReleaseQueue(boolean render, int frameRate) {
+        this.mFrameInfoQueue = new LinkedBlockingQueue();
+        this.mReleaseThread = new ReleaseThread();
+        this.doFrameRelease = true;
+        this.mRender = render;
+        this.mWaitTime = (int)(1.0f/frameRate * 1000); // wait time in milliseconds per frame
+        Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
+    }
+
+    public void setMediaCodec(MediaCodec mediaCodec) {
+        this.mCodec = mediaCodec;
+    }
+
+    public boolean pushFrame(int frameNumber, int frameBufferId, long frameDisplayTime) {
+        int frameDisplayTimeMs = (int)(frameDisplayTime/1000);
+        FrameInfo curFrameInfo = new FrameInfo(frameNumber, frameBufferId, frameDisplayTimeMs);
+        boolean pushSuccess = mFrameInfoQueue.offer(curFrameInfo);
+        if (!pushSuccess) {
+            Log.e(TAG, "Failed to push frame with buffer id " + curFrameInfo.bufferId);
+            return false;
+        }
+        if (!mReleaseThread.isAlive()) {
+            mReleaseThread.start();
+            Log.i(TAG, "Started frame release thread");
+        }
+        return true;
+    }
+
+    private int getCurSysTime() {
+        return (int)(System.nanoTime()/1000000);
+    }
+
+    private void popAndRelease(FrameInfo curFrameInfo) {
+        try {
+            curFrameInfo = mFrameInfoQueue.take();
+        } catch (InterruptedException e) {
+            Log.e(TAG, "Threw InterruptedException on take");
+        }
+        try {
+            mCodec.releaseOutputBuffer(curFrameInfo.bufferId, mRender);
+        } catch (IllegalStateException e) {
+            Log.e(TAG,
+                    "Threw IllegalStateException on releaseOutputBuffer for frame "
+                            + curFrameInfo.number);
+        }
+    }
+
+    public void stopFrameRelease() {
+        doFrameRelease = false;
+        try {
+            mReleaseThread.join();
+            Log.i(TAG, "Joined frame release thread");
+        } catch (InterruptedException e) {
+            Log.e(TAG, "Threw InterruptedException on thread join");
+        }
+    }
+}
+
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
new file mode 100644
index 0000000..a75962c
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+import android.media.MediaCodec;
+import java.nio.ByteBuffer;
+/**
+ * interfaces that can be used to implement
+ * sending of buffers to external and receive using callbacks
+ */
+public class IBufferXfer {
+  static class BufferXferInfo {
+      public ByteBuffer buf;
+      public int idx;
+      public Object obj;
+      int flag;
+      int bytesRead;
+      long presentationTimeUs;
+  }
+
+  public interface IReceiveBuffer {
+      // Implemented by sender to get buffers back
+      boolean receiveBuffer(BufferXferInfo info);
+      // Establishes a connection between the buffer sender and receiver.
+      // Implemented by the entity that sends the buffers to receiver.
+      // the receiverInterface is the interface of the receiver.
+      // The sender uses this interface to send buffers.
+      boolean connect(IBufferXfer.ISendBuffer receiverInterface);
+  }
+  // Implemented by an entity that does not own the buffers and only
+  // wants to manage the buffers. ( Usually the receiver)
+  // The receiver uses returnIface to return the buffers to sender
+  public interface ISendBuffer {
+      boolean sendBuffer(IBufferXfer.IReceiveBuffer returnIface,
+                              BufferXferInfo info);
+  }
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
new file mode 100644
index 0000000..ab55df5
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+
+/**
+ * Class that manages the buffer senders
+*/
+import com.android.media.benchmark.library.IBufferXfer;
+import java.util.ArrayDeque;
+import android.util.Log;
+public class IBufferXferImpl implements IBufferXfer.ISendBuffer {
+
+  private static class BufferInfo {
+      public IBufferXfer.IReceiveBuffer rIface;
+      public IBufferXfer.BufferXferInfo info;
+  }
+  private final String TAG = "IBufferXferImpl";
+  private final ArrayDeque<BufferInfo> mProducerQueue = new ArrayDeque<>();
+  private final ArrayDeque<BufferInfo> mConsumerQueue = new ArrayDeque<>();
+  private IBufferXfer.IReceiveBuffer mProducer = null;
+  private IBufferXfer.IReceiveBuffer mConsumer = null;
+  private final Object mLock = new Object();
+
+  public IBufferXferImpl(IBufferXfer.IReceiveBuffer producer,
+      IBufferXfer.IReceiveBuffer consumer) {
+      mProducer = producer;
+      mConsumer = consumer;
+      // Attach this to be their receiver
+      mProducer.connect(this);
+      mConsumer.connect(this);
+  }
+  @Override
+  public boolean sendBuffer(IBufferXfer.IReceiveBuffer rIface,
+                     IBufferXfer.BufferXferInfo bufferInfo) {
+      if (rIface != mProducer && rIface != mConsumer) {
+         Log.e(TAG, "Interfaces does not match");
+        return false;
+      }
+      boolean status = true;
+      BufferInfo pBuf = null, cBuf = null;
+      synchronized(mLock) {
+          // see which interface this buffer belongs to
+          // producer has a filled buffer and the consumer
+          // buffer needs to be filled.
+          if ( rIface == mProducer ) {
+              if (mConsumerQueue.size() > 0) {
+                  cBuf = mConsumerQueue.remove();
+                  pBuf = new BufferInfo();
+                  pBuf.rIface = rIface;
+                  pBuf.info = bufferInfo;
+              } else {
+                  BufferInfo info = new BufferInfo();
+                  info.rIface = rIface;
+                  info.info = bufferInfo;
+                  mProducerQueue.add(info);
+              }
+          } else if(rIface == mConsumer) {
+              if (mProducerQueue.size() > 0) {
+                  pBuf = mProducerQueue.remove();
+                  cBuf = new BufferInfo();
+                  cBuf.rIface = rIface;
+                  cBuf.info = bufferInfo;
+              } else {
+                  BufferInfo info = new BufferInfo();
+                  info.rIface = rIface;
+                  info.info = bufferInfo;
+                  mConsumerQueue.add(info);
+              }
+          } else {
+              status = false;
+          }
+      }
+
+      if ( pBuf != null && cBuf != null) {
+          int bytesRead = 0;
+          if (cBuf.info.buf != null && pBuf.info.buf != null) {
+              if (cBuf.info.buf.remaining() >= pBuf.info.buf.remaining()) {
+                  bytesRead = pBuf.info.buf.remaining();
+                  cBuf.info.buf.put(pBuf.info.buf);
+              } else {
+                  Log.e(TAG, "Something is wrong with the sizes P:" +
+                      pBuf.info.buf.remaining() +" C:" + cBuf.info.buf.remaining());
+              }
+          }
+          cBuf.info.bytesRead = bytesRead;
+          cBuf.info.presentationTimeUs = pBuf.info.presentationTimeUs;
+          cBuf.info.flag = pBuf.info.flag;
+
+          if (pBuf.rIface != null) {
+              pBuf.rIface.receiveBuffer(pBuf.info);
+          }
+          if (cBuf.rIface != null) {
+              cBuf.rIface.receiveBuffer(cBuf.info);
+          }
+      }
+      return status;
+  }
+  public boolean resetAll() {
+      synchronized(mLock) {
+          while (mProducerQueue.size() > 0) {
+              BufferInfo info = mProducerQueue.remove();
+              info.rIface.receiveBuffer(info.info);
+          }
+          while (mConsumerQueue.size() > 0) {
+              BufferInfo info = mConsumerQueue.remove();
+              info.rIface.receiveBuffer(info.info);
+          }
+          mProducer = null;
+          mConsumer = null;
+      }
+  return true;
+  }
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
index 7245a3a..0ebf798 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
@@ -33,7 +33,17 @@
     private long mDeInitTimeNs;
     private long mStartTimeNs;
     private ArrayList<Integer> mFrameSizes;
+    /*
+     * Array for holding the wallclock time
+     * for each input buffer available.
+     */
     private ArrayList<Long> mInputTimer;
+    /*
+     * Array for holding the wallclock time
+     * for each output buffer available.
+     * This is used for determining the decoded
+     * frame intervals.
+     */
     private ArrayList<Long> mOutputTimer;
 
     public Stats() {
@@ -76,9 +86,15 @@
 
     public long getDeInitTime() { return mDeInitTimeNs; }
 
+    public long getStartTime() { return mStartTimeNs; }
+
+    public ArrayList<Long> getOutputTimers() { return mOutputTimer; }
+
+    public ArrayList<Long> getInputTimers() { return mInputTimer; }
+
     public long getTimeDiff(long sTime, long eTime) { return (eTime - sTime); }
 
-    private long getTotalTime() {
+    public long getTotalTime() {
         if (mOutputTimer.size() == 0) {
             return -1;
         }
@@ -86,7 +102,7 @@
         return lastTime - mStartTimeNs;
     }
 
-    private long getTotalSize() {
+    public long getTotalSize() {
         long totalSize = 0;
         for (long size : mFrameSizes) {
             totalSize += size;
diff --git a/media/tests/benchmark/src/native/common/Stats.cpp b/media/tests/benchmark/src/native/common/Stats.cpp
index bfde125..d55a22d 100644
--- a/media/tests/benchmark/src/native/common/Stats.cpp
+++ b/media/tests/benchmark/src/native/common/Stats.cpp
@@ -35,13 +35,18 @@
  * \param mode           the operating mode: sync/async.
  * \param statsFile      the file where the stats data is to be written.
  */
-void Stats::dumpStatistics(string operation, string inputReference, int64_t durationUs,
-                           string componentName, string mode, string statsFile) {
+void Stats::dumpStatistics(const string& operation, const string& inputReference,
+                           int64_t durationUs, const string& componentName,
+                           const string& mode, const string& statsFile) {
     ALOGV("In %s", __func__);
     if (!mOutputTimer.size()) {
         ALOGE("No output produced");
         return;
     }
+    if (statsFile.empty()) {
+        return uploadMetrics(operation, inputReference, durationUs, componentName,
+                              mode);
+    }
     nsecs_t totalTimeTakenNs = getTotalTime();
     nsecs_t timeTakenPerSec = (totalTimeTakenNs * 1000000) / durationUs;
     nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
@@ -87,3 +92,67 @@
     out << rowData;
     out.close();
 }
+
+/**
+ * Dumps the stats of the operation for a given input media to a listener.
+ *
+ * \param operation      describes the operation performed on the input media
+ *                       (i.e. extract/mux/decode/encode)
+ * \param inputReference input media
+ * \param durationUs     is a duration of the input media in microseconds.
+ * \param componentName  describes the codecName/muxFormat/mimeType.
+ * \param mode           the operating mode: sync/async.
+ *
+ */
+
+#define LOG_METRIC(...) \
+    __android_log_print(ANDROID_LOG_INFO, "ForTimingCollector", __VA_ARGS__)
+
+void Stats::uploadMetrics(const string& operation, const string& inputReference,
+                          const int64_t& durationUs, const string& componentName,
+                          const string& mode) {
+
+    ALOGV("In %s", __func__);
+    (void)durationUs;
+    (void)componentName;
+    if (!mOutputTimer.size()) {
+        ALOGE("No output produced");
+        return;
+    }
+    nsecs_t totalTimeTakenNs = getTotalTime();
+    nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
+    int32_t size = std::accumulate(mFrameSizes.begin(), mFrameSizes.end(), 0);
+    // get min and max output intervals.
+    nsecs_t intervalNs;
+    nsecs_t minTimeTakenNs = INT64_MAX;
+    nsecs_t maxTimeTakenNs = 0;
+    nsecs_t prevIntervalNs = mStartTimeNs;
+    for (int32_t idx = 0; idx < mOutputTimer.size() - 1; idx++) {
+        intervalNs = mOutputTimer.at(idx) - prevIntervalNs;
+        prevIntervalNs = mOutputTimer.at(idx);
+        if (minTimeTakenNs > intervalNs) minTimeTakenNs = intervalNs;
+        else if (maxTimeTakenNs < intervalNs) maxTimeTakenNs = intervalNs;
+    }
+
+    // Write the stats data to file.
+    int64_t dataSize = size;
+    int64_t bytesPerSec = ((int64_t)dataSize * 1000000000) / totalTimeTakenNs;
+    (void)mode;
+    (void)operation;
+    (void)inputReference;
+    string prefix = "CodecStats_NativeDec";
+    prefix.append("_").append(componentName);
+    // Reports the time taken to initialize the codec.
+    LOG_METRIC("%s_CodecInitTimeNs:%lld", prefix.c_str(), (long long)mInitTimeNs);
+    // Reports the time taken to free the codec.
+    LOG_METRIC("%s_CodecDeInitTimeNs:%lld", prefix.c_str(), (long long)mDeInitTimeNs);
+    // Reports the min time taken between output frames from the codec
+    LOG_METRIC("%s_CodecMinTimeNs:%lld", prefix.c_str(), (long long)minTimeTakenNs);
+    // Reports the max time between the output frames from the codec
+    LOG_METRIC("%s_CodecMaxTimeNs:%lld", prefix.c_str(), (long long)maxTimeTakenNs);
+    // Report raw throughout ( bytes/sec ) of the codec for the entire media
+    LOG_METRIC("%s_ProcessedBytesPerSec:%lld", prefix.c_str(), (long long)bytesPerSec);
+    // Reports the time taken to get the first frame from the codec
+    LOG_METRIC("%s_TimeforFirstFrame:%lld", prefix.c_str(), (long long)timeToFirstFrameNs);
+
+}
diff --git a/media/tests/benchmark/src/native/common/Stats.h b/media/tests/benchmark/src/native/common/Stats.h
index 18e4b06..0ba511f 100644
--- a/media/tests/benchmark/src/native/common/Stats.h
+++ b/media/tests/benchmark/src/native/common/Stats.h
@@ -102,8 +102,12 @@
         return (*(mOutputTimer.end() - 1) - mStartTimeNs);
     }
 
-    void dumpStatistics(string operation, string inputReference, int64_t duarationUs,
-                        string codecName = "", string mode = "", string statsFile = "");
-};
+    void dumpStatistics(const string& operation, const string& inputReference,
+                        int64_t duarationUs, const string& componentName = "",
+                        const string& mode = "", const string& statsFile = "");
 
+    void uploadMetrics(const string& operation, const string& inputReference,
+                      const int64_t& durationUs, const string& componentName = "",
+                      const string& mode = "");
+};
 #endif  // __STATS_H__
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index a38ef57..698752f 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -21,17 +21,34 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library {
-    name: "libmediautils",
+cc_defaults {
+    name: "libmediautils_defaults",
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+    sanitize: {
+        cfi: true,
+        integer_overflow: true,
+    },
+    target: {
+        host: {
+            sanitize: {
+                cfi: false,
+            },
+        },
+    },
+}
 
+filegroup {
+    name: "libmediautils_core_srcs",
     srcs: [
         "AImageReaderUtils.cpp",
         "BatteryNotifier.cpp",
         "ISchedulingPolicyService.cpp",
         "Library.cpp",
-        "LimitProcessMemory.cpp",
         "MediaUtilsDelayed.cpp",
-        "MemoryLeakTrackUtil.cpp",
         "MethodStatistics.cpp",
         "Process.cpp",
         "ProcessInfo.cpp",
@@ -41,20 +58,41 @@
         "TimeCheck.cpp",
         "TimerThread.cpp",
     ],
+}
+
+cc_library_headers {
+    name: "libmediautils_headers",
+    host_supported: true,
+    vendor_available: true, // required for platform/hardware/interfaces
+    shared_libs: [
+        "liblog",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
+
+
+cc_library {
+    name: "libmediautils",
+    host_supported: true,
+    defaults: ["libmediautils_defaults"],
+    srcs: [
+        ":libmediautils_core_srcs",
+    ],
     static_libs: [
-        "libc_malloc_debug_backtrace",
         "libbatterystats_aidl",
         "libprocessinfoservice_aidl",
     ],
     shared_libs: [
         "libaudioclient_aidl_conversion",
         "libaudioutils", // for clock.h, Statistics.h
+        "libbase",
         "libbinder",
         "libcutils",
-        "liblog",
-        "libutils",
         "libhidlbase",
+        "liblog",
         "libpermission",
+        "libutils",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
         "packagemanager_aidl-cpp",
@@ -65,43 +103,44 @@
 
     logtags: ["EventLogTags.logtags"],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-
-    header_libs: [
-        "bionic_libc_platform_headers",
-        "libmedia_headers",
-    ],
-
     export_shared_lib_headers: [
         "libpermission",
     ],
 
     required: [
-        "libmediautils_delayed",  // lazy loaded
+        "libmediautils_delayed", // lazy loaded
     ],
 
-    include_dirs: [
-        // For DEBUGGER_SIGNAL
-        "system/core/debuggerd/include",
-    ],
+    target: {
+        android: {
+            srcs: [
+                "LimitProcessMemory.cpp",
+                "MemoryLeakTrackUtil.cpp",
+            ],
+            static_libs: [
+                "libc_malloc_debug_backtrace",
+            ],
+            include_dirs: [
+                // For DEBUGGER_SIGNAL
+                "system/core/debuggerd/include",
+            ],
+            header_libs: [
+                "bionic_libc_platform_headers",
+            ],
+        },
+    },
+
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
 
 cc_library {
     name: "libmediautils_delayed", // match with MEDIAUTILS_DELAYED_LIBRARY_NAME
+    host_supported: true,
+    defaults: ["libmediautils_defaults"],
     srcs: [
         "MediaUtilsDelayedLibrary.cpp",
     ],
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
     shared_libs: [
         "liblog",
         "libutils",
@@ -111,16 +150,12 @@
 
 cc_library {
     name: "libmediautils_vendor",
-    vendor_available: true,  // required for platform/hardware/interfaces
+    defaults: ["libmediautils_defaults"],
+    vendor_available: true, // required for platform/hardware/interfaces
     srcs: [
         "MemoryLeakTrackUtil.cpp",
     ],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
     shared_libs: [
         "liblog",
         "libutils",
@@ -137,23 +172,3 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
-
-
-cc_library_headers {
-    name: "libmediautils_headers",
-    vendor_available: true,  // required for platform/hardware/interfaces
-
-    export_include_dirs: ["include"],
-}
-
-cc_test {
-    name: "libmediautils_test",
-    srcs: [
-        "memory-test.cpp",
-        "TimerThread-test.cpp",
-    ],
-    shared_libs: [
-      "libmediautils",
-      "libutils",
-    ]
-}
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index da199c4..3baa4b4 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "ProcessInfo"
 #include <utils/Log.h>
 
-#include <media/stagefright/ProcessInfo.h>
+#include <mediautils/ProcessInfo.h>
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 6823f4f..0cf5bd9 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -14,20 +14,36 @@
  * limitations under the License.
  */
 
+#include <csignal>
+#include "mediautils/TimerThread.h"
 #define LOG_TAG "TimeCheck"
 
 #include <optional>
 
 #include <android-base/logging.h>
+#include <android-base/strings.h>
 #include <audio_utils/clock.h>
 #include <mediautils/EventLog.h>
 #include <mediautils/FixedString.h>
 #include <mediautils/MethodStatistics.h>
 #include <mediautils/TimeCheck.h>
+#include <mediautils/TidWrapper.h>
 #include <utils/Log.h>
+
+#if defined(__ANDROID__)
 #include "debuggerd/handler.h"
+#endif
+
 
 namespace android::mediautils {
+// This function appropriately signals a pid to dump a backtrace if we are
+// running on device (and the HAL exists). If we are not running on an Android
+// device, there is no HAL to signal (so we do nothing).
+static inline void signalAudioHAL([[maybe_unused]] pid_t pid) {
+#if defined(__ANDROID__)
+    sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+#endif
+}
 
 /**
  * Returns the std::string "HH:MM:SS.MSc" from a system_clock time_point.
@@ -136,14 +152,14 @@
 std::string TimeCheck::toString() {
     // note pending and retired are individually locked for maximum concurrency,
     // snapshot is not instantaneous at a single time.
-    return getTimeCheckThread().toString();
+    return getTimeCheckThread().getSnapshotAnalysis().toString();
 }
 
 TimeCheck::TimeCheck(std::string_view tag, OnTimerFunc&& onTimer, Duration requestedTimeoutDuration,
         Duration secondChanceDuration, bool crashOnTimeout)
     : mTimeCheckHandler{ std::make_shared<TimeCheckHandler>(
             tag, std::move(onTimer), crashOnTimeout, requestedTimeoutDuration,
-            secondChanceDuration, std::chrono::system_clock::now(), gettid()) }
+            secondChanceDuration, std::chrono::system_clock::now(), getThreadIdWrapper()) }
     , mTimerHandle(requestedTimeoutDuration.count() == 0
               /* for TimeCheck we don't consider a non-zero secondChanceDuration here */
               ? getTimeCheckThread().trackTask(mTimeCheckHandler->tag)
@@ -241,7 +257,7 @@
 
     // Generate the TimerThread summary string early before sending signals to the
     // HAL processes which can affect thread behavior.
-    const std::string summary = getTimeCheckThread().toString(4 /* retiredCount */);
+    const auto snapshotAnalysis = getTimeCheckThread().getSnapshotAnalysis(4 /* retiredCount */);
 
     // Generate audio HAL processes tombstones and allow time to complete
     // before forcing restart
@@ -251,7 +267,7 @@
         for (const auto& pid : pids) {
             ALOGI("requesting tombstone for pid: %d", pid);
             halPids.append(std::to_string(pid)).append(" ");
-            sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+            signalAudioHAL(pid);
         }
         sleep(1);
     } else {
@@ -269,7 +285,7 @@
             .append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
                     elapsedSteadyMs, elapsedSystemMs)).append("\n")
             .append(halPids).append("\n")
-            .append(summary);
+            .append(snapshotAnalysis.toString());
 
     // Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
     // Log message text may be truncated to less than an
@@ -279,7 +295,20 @@
     // to avoid the size limitation. LOG(FATAL) does an abort whereas
     // LOG(FATAL_WITHOUT_ABORT) does not abort.
 
-    LOG(FATAL) << abortMessage;
+    static constexpr pid_t invalidPid = TimerThread::SnapshotAnalysis::INVALID_PID;
+    pid_t tidToAbort = invalidPid;
+    if (snapshotAnalysis.suspectTid != invalidPid) {
+        tidToAbort = snapshotAnalysis.suspectTid;
+    } else if (snapshotAnalysis.timeoutTid != invalidPid) {
+        tidToAbort = snapshotAnalysis.timeoutTid;
+    }
+
+    LOG(FATAL_WITHOUT_ABORT) << abortMessage;
+    const auto ret = abortTid(tidToAbort);
+    if (ret < 0) {
+        LOG(FATAL) << "TimeCheck thread signal failed, aborting process. "
+                       "errno: " << errno << base::ErrnoNumberAsString(errno);
+    }
 }
 
 // Automatically create a TimeCheck class for a class and method.
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index 5e58a3d..b760ee2 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -22,6 +22,7 @@
 #include <vector>
 
 #include <mediautils/MediaUtilsDelayed.h>
+#include <mediautils/TidWrapper.h>
 #include <mediautils/TimerThread.h>
 #include <utils/Log.h>
 #include <utils/ThreadDefs.h>
@@ -39,14 +40,14 @@
     const auto now = std::chrono::system_clock::now();
     auto request = std::make_shared<const Request>(now, now +
             std::chrono::duration_cast<std::chrono::system_clock::duration>(timeoutDuration),
-            secondChanceDuration, gettid(), tag);
+            secondChanceDuration, getThreadIdWrapper(), tag);
     return mMonitorThread.add(std::move(request), std::move(func), timeoutDuration);
 }
 
 TimerThread::Handle TimerThread::trackTask(std::string_view tag) {
     const auto now = std::chrono::system_clock::now();
     auto request = std::make_shared<const Request>(now, now,
-            Duration{} /* secondChanceDuration */, gettid(), tag);
+            Duration{} /* secondChanceDuration */, getThreadIdWrapper(), tag);
     return mNoTimeoutMap.add(std::move(request));
 }
 
@@ -58,39 +59,29 @@
     return true;
 }
 
-std::string TimerThread::toString(size_t retiredCount) const {
+
+std::string TimerThread::SnapshotAnalysis::toString() const {
     // Note: These request queues are snapshot very close together but
     // not at "identical" times as we don't use a class-wide lock.
-
-    std::vector<std::shared_ptr<const Request>> timeoutRequests;
-    std::vector<std::shared_ptr<const Request>> retiredRequests;
-    mTimeoutQueue.copyRequests(timeoutRequests);
-    mRetiredQueue.copyRequests(retiredRequests, retiredCount);
-    std::vector<std::shared_ptr<const Request>> pendingRequests =
-        getPendingRequests();
-
-    struct Analysis analysis = analyzeTimeout(timeoutRequests, pendingRequests);
-    std::string analysisSummary;
-    if (!analysis.summary.empty()) {
-        analysisSummary = std::string("\nanalysis [ ").append(analysis.summary).append(" ]");
-    }
+    std::string analysisSummary = std::string("\nanalysis [ ").append(description).append(" ]");
     std::string timeoutStack;
-    if (analysis.timeoutTid != -1) {
-        timeoutStack = std::string("\ntimeout(")
-                .append(std::to_string(analysis.timeoutTid)).append(") callstack [\n")
-                .append(getCallStackStringForTid(analysis.timeoutTid)).append("]");
-    }
     std::string blockedStack;
-    if (analysis.HALBlockedTid != -1) {
+    if (timeoutTid != -1) {
+        timeoutStack = std::string(suspectTid == timeoutTid ? "\ntimeout/blocked(" : "\ntimeout(")
+                .append(std::to_string(timeoutTid)).append(") callstack [\n")
+                .append(getCallStackStringForTid(timeoutTid)).append("]");
+    }
+
+    if (suspectTid != -1 && suspectTid != timeoutTid) {
         blockedStack = std::string("\nblocked(")
-                .append(std::to_string(analysis.HALBlockedTid)).append(")  callstack [\n")
-                .append(getCallStackStringForTid(analysis.HALBlockedTid)).append("]");
+                .append(std::to_string(suspectTid)).append(")  callstack [\n")
+                .append(getCallStackStringForTid(suspectTid)).append("]");
     }
 
     return std::string("now ")
             .append(formatTime(std::chrono::system_clock::now()))
             .append("\nsecondChanceCount ")
-            .append(std::to_string(mMonitorThread.getSecondChanceCount()))
+            .append(std::to_string(secondChanceCount))
             .append(analysisSummary)
             .append("\ntimeout [ ")
             .append(requestsToString(timeoutRequests))
@@ -120,16 +111,23 @@
     return separatorPos != std::string::npos;
 }
 
-/* static */
-struct TimerThread::Analysis TimerThread::analyzeTimeout(
-    const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
-    const std::vector<std::shared_ptr<const Request>>& pendingRequests) {
-
-    if (timeoutRequests.empty() || pendingRequests.empty()) return {}; // nothing to say.
-
+struct TimerThread::SnapshotAnalysis TimerThread::getSnapshotAnalysis(size_t retiredCount) const {
+    struct SnapshotAnalysis analysis{};
+    // The following snapshot of the TimerThread state will be utilized for
+    // analysis. Note, there is no lock around these calls, so there could be
+    // a state update between them.
+    mTimeoutQueue.copyRequests(analysis.timeoutRequests);
+    mRetiredQueue.copyRequests(analysis.retiredRequests, retiredCount);
+    analysis.pendingRequests = getPendingRequests();
+    analysis.secondChanceCount = mMonitorThread.getSecondChanceCount();
+    // No call has timed out, so there is no analysis to be done.
+    if (analysis.timeoutRequests.empty())
+        return analysis;
     // for now look at last timeout (in our case, the only timeout)
-    const std::shared_ptr<const Request> timeout = timeoutRequests.back();
-
+    const std::shared_ptr<const Request> timeout = analysis.timeoutRequests.back();
+    analysis.timeoutTid = timeout->tid;
+    if (analysis.pendingRequests.empty())
+      return analysis;
     // pending Requests that are problematic.
     std::vector<std::shared_ptr<const Request>> pendingExact;
     std::vector<std::shared_ptr<const Request>> pendingPossible;
@@ -140,7 +138,7 @@
     // such as HAL write() and read().
     //
     constexpr Duration kPendingDuration = 1000ms;
-    for (const auto& pending : pendingRequests) {
+    for (const auto& pending : analysis.pendingRequests) {
         // If the pending tid is the same as timeout tid, problem identified.
         if (pending->tid == timeout->tid) {
             pendingExact.emplace_back(pending);
@@ -153,29 +151,27 @@
         }
     }
 
-    struct Analysis analysis{};
-
-    analysis.timeoutTid = timeout->tid;
-    std::string& summary = analysis.summary;
+    std::string& description = analysis.description;
     if (!pendingExact.empty()) {
         const auto& request = pendingExact.front();
         const bool hal = isRequestFromHal(request);
 
         if (hal) {
-            summary = std::string("Blocked directly due to HAL call: ")
+            description = std::string("Blocked directly due to HAL call: ")
                 .append(request->toString());
+            analysis.suspectTid= request->tid;
         }
     }
-    if (summary.empty() && !pendingPossible.empty()) {
+    if (description.empty() && !pendingPossible.empty()) {
         for (const auto& request : pendingPossible) {
             const bool hal = isRequestFromHal(request);
             if (hal) {
                 // The first blocked call is the most likely one.
                 // Recent calls might be temporarily blocked
                 // calls such as write() or read() depending on kDuration.
-                summary = std::string("Blocked possibly due to HAL call: ")
+                description = std::string("Blocked possibly due to HAL call: ")
                     .append(request->toString());
-                analysis.HALBlockedTid = request->tid;
+                analysis.suspectTid= request->tid;
             }
        }
     }
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index d26e6c2..bd9a462 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -9,14 +9,13 @@
 
 cc_defaults {
     name: "libmediautils_fuzzer_defaults",
+    host_supported: true,
     shared_libs: [
-        "libbatterystats_aidl",
         "libbinder",
-        "libcutils",
         "liblog",
+        "libcutils",
         "libmediautils",
         "libutils",
-        "libbinder",
         "framework-permission-aidl-cpp",
         "packagemanager_aidl-cpp",
     ],
@@ -27,11 +26,6 @@
         "-Werror",
         "-Wno-c++2a-extensions",
     ],
-
-    header_libs: [
-        "bionic_libc_platform_headers",
-        "libmedia_headers",
-    ],
 }
 
 cc_fuzz {
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 32fc3be..d672fb0 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -19,6 +19,7 @@
 #include <utils/String16.h>
 #include <android/log.h>
 #include <mediautils/SchedulingPolicyService.h>
+#include <mediautils/TidWrapper.h>
 #include "fuzzer/FuzzedDataProvider.h"
 using android::IBatteryStats;
 using android::IBinder;
@@ -55,7 +56,8 @@
     int32_t priority = data_provider.ConsumeIntegral<int32_t>();
     bool is_for_app = data_provider.ConsumeBool();
     bool async = data_provider.ConsumeBool();
-    requestPriority(getpid(), gettid(), priority, is_for_app, async);
+    requestPriority(getpid(), android::mediautils::getThreadIdWrapper(), priority, is_for_app,
+                    async);
     // TODO: Verify and re-enable in AOSP (R).
     // bool enable = data_provider.ConsumeBool();
     // We are just using batterystats to avoid the need
diff --git a/media/utils/include/mediautils/ExtendedAccumulator.h b/media/utils/include/mediautils/ExtendedAccumulator.h
new file mode 100644
index 0000000..7e3e170
--- /dev/null
+++ b/media/utils/include/mediautils/ExtendedAccumulator.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <cstdint>
+#include <tuple>
+#include <type_traits>
+
+#include <log/log.h>
+
+namespace android::mediautils {
+
+// The goal of this class is to detect and accumulate wraparound occurrences on a
+// lower sized integer.
+
+// This class assumes that the underlying unsigned type is either incremented or
+// decremented by at most the underlying signed type between any two subsequent
+// polls (or construction). This is well-defined as the modular nature of
+// unsigned arithmetic ensures that every new value maps 1-1 to an
+// increment/decrement over the same sized signed type. It also ensures that our
+// counter will be equivalent mod the size of the integer even if the underlying
+// type is modified outside of this range.
+//
+// For convenience, this class is thread compatible. Additionally, it is safe
+// as long as there is only one writer.
+template <typename Integral = uint32_t, typename AccumulatingType = uint64_t>
+class ExtendedAccumulator {
+    static_assert(sizeof(Integral) < sizeof(AccumulatingType),
+                  "Accumulating type should be larger than underlying type");
+    static_assert(std::is_integral_v<Integral> && std::is_unsigned_v<Integral>,
+                  "Wraparound behavior is only well-defiend for unsigned ints");
+    static_assert(std::is_integral_v<AccumulatingType>);
+
+  public:
+    enum class Wrap {
+        NORMAL = 0,
+        UNDERFLOW = 1,
+        OVERFLOW = 2,
+    };
+
+    using UnsignedInt = Integral;
+    using SignedInt = std::make_signed_t<UnsignedInt>;
+
+    explicit ExtendedAccumulator(AccumulatingType initial = 0) : mAccumulated(initial) {}
+
+    // Returns a pair of the calculated change on the accumulating value, and a
+    // Wrap type representing the type of wraparound (if any) which occurred.
+    std::pair<SignedInt, Wrap> poll(UnsignedInt value) {
+        auto acc = mAccumulated.load(std::memory_order_relaxed);
+        const auto bottom_bits = static_cast<UnsignedInt>(acc);
+        std::pair<SignedInt, Wrap> res = {0, Wrap::NORMAL};
+        const bool overflow = __builtin_sub_overflow(value, bottom_bits, &res.first);
+
+        if (overflow) {
+            res.second = (res.first > 0) ? Wrap::OVERFLOW : Wrap::UNDERFLOW;
+        }
+
+        const bool acc_overflow = __builtin_add_overflow(acc, res.first, &acc);
+        // If our *accumulating* type overflows or underflows (depending on its
+        // signedness), we should abort.
+        if (acc_overflow) LOG_ALWAYS_FATAL("Unexpected overflow/underflow in %s", __func__);
+
+        mAccumulated.store(acc, std::memory_order_relaxed);
+        return res;
+    }
+
+    AccumulatingType getValue() const { return mAccumulated.load(std::memory_order_relaxed); }
+
+  private:
+    // Invariant - the bottom underlying bits of accumulated are the same as the
+    // last value provided to poll.
+    std::atomic<AccumulatingType> mAccumulated;
+};
+
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/InPlaceFunction.h b/media/utils/include/mediautils/InPlaceFunction.h
new file mode 100644
index 0000000..17c6274
--- /dev/null
+++ b/media/utils/include/mediautils/InPlaceFunction.h
@@ -0,0 +1,344 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdlib>
+#include <functional>
+#include <memory>
+#include <type_traits>
+
+namespace android::mediautils {
+
+namespace detail {
+// Vtable interface for erased types
+template <typename Ret, typename... Args>
+struct ICallableTable {
+    // Destroy the erased type
+    void (*destroy)(void* storage) = nullptr;
+    // Call the erased object
+    Ret (*invoke)(void* storage, Args&&...) = nullptr;
+    // **Note** the next two functions only copy object data, not the vptr
+    // Copy the erased object to a new InPlaceFunction buffer
+    void (*copy_to)(const void* storage, void* other) = nullptr;
+    // Move the erased object to a new InPlaceFunction buffer
+    void (*move_to)(void* storage, void* other) = nullptr;
+};
+}  // namespace detail
+
+// This class is an *almost* drop-in replacement for std::function which is guaranteed to never
+// allocate, and always holds the type erased functional object in an in-line small buffer of
+// templated size. If the object is too large to hold, the type will fail to instantiate.
+//
+// Some notable differences are:
+// - operator() is not const (unlike std::function where the call operator is
+// const even if the erased type is not const callable). This retains const
+// correctness by default. A workaround is keeping InPlaceFunction mutable.
+// - Moving from an InPlaceFunction leaves the object in a valid state (operator
+// bool remains true), similar to std::optional/std::variant.
+// Calls to the object are still defined (and are equivalent
+// to calling the underlying type after it has been moved from). To opt-out
+// (and/or ensure safety), clearing the object is recommended:
+//      func1 = std::move(func2); // func2 still valid (and moved-from) after this line
+//      func2 = nullptr; // calling func2 will now abort
+// - Unsafe implicit conversions of the return value to a reference type are
+// prohibited due to the risk of dangling references (some of this safety was
+// added to std::function in c++23). Only converting a reference to a reference to base class is
+// permitted:
+//      std::function<Base&()> = []() -> Derived& {...}
+// - Some (current libc++ implementation) implementations of std::function
+// incorrectly fail to handle returning non-moveable types which is valid given
+// mandatory copy elision.
+//
+// Additionally, the stored functional will use the typical rules of overload
+// resolution to disambiguate the correct call, except, the target class will
+// always be implicitly a non-const lvalue when called. If a different overload
+// is preferred, wrapping the target class in a lambda with explicit casts is
+// recommended (or using inheritance, mixins or CRTP). This avoids the
+// complexity of utilizing abonimable function types as template params.
+template <typename, size_t BufferSize = 32>
+class InPlaceFunction;
+// We partially specialize to match types which are spelled like functions
+template <typename Ret, typename... Args, size_t BufferSize>
+class InPlaceFunction<Ret(Args...), BufferSize> {
+  public:
+    // Storage Type Details
+    static constexpr size_t Size = BufferSize;
+    static constexpr size_t Alignment = alignof(std::max_align_t);
+    using Buffer_t = std::aligned_storage_t<Size, Alignment>;
+    template <typename T, size_t Other>
+    friend class InPlaceFunction;
+
+  private:
+    // Callable which is used for empty InPlaceFunction objects (to match the
+    // std::function interface).
+    struct BadCallable {
+        [[noreturn]] Ret operator()(Args...) { std::abort(); }
+    };
+    static_assert(std::is_trivially_destructible_v<BadCallable>);
+
+    // Implementation of vtable interface for erased types.
+    // Contains only static vtable instantiated once for each erased type and
+    // static helpers.
+    template <typename T>
+    struct TableImpl {
+        // T should be a decayed type
+        static_assert(std::is_same_v<T, std::decay_t<T>>);
+
+        // Helper functions to get an unerased reference to the type held in the
+        // buffer. std::launder is required to avoid strict aliasing rules.
+        // The cast is always defined, as a precondition for these calls is that
+        // (exactly) a T was placement new constructed into the buffer.
+        constexpr static T& getRef(void* storage) {
+            return *std::launder(reinterpret_cast<T*>(storage));
+        }
+
+        constexpr static const T& getRef(const void* storage) {
+            return *std::launder(reinterpret_cast<const T*>(storage));
+        }
+
+        // Constexpr implies inline
+        constexpr static detail::ICallableTable<Ret, Args...> table = {
+                // Stateless lambdas are convertible to function ptrs
+                .destroy = [](void* storage) { getRef(storage).~T(); },
+                .invoke = [](void* storage, Args&&... args) -> Ret {
+                    if constexpr (std::is_void_v<Ret>) {
+                        std::invoke(getRef(storage), std::forward<Args>(args)...);
+                    } else {
+                        return std::invoke(getRef(storage), std::forward<Args>(args)...);
+                    }
+                },
+                .copy_to = [](const void* storage,
+                              void* other) { ::new (other) T(getRef(storage)); },
+                .move_to = [](void* storage,
+                              void* other) { ::new (other) T(std::move(getRef(storage))); },
+        };
+    };
+
+    // Check size/align requirements for the T in Buffer_t.
+    template <typename T>
+    static constexpr bool WillFit_v = sizeof(T) <= Size && alignof(T) <= Alignment;
+
+    // Check size/align requirements for a function to function conversion
+    template <typename T>
+    static constexpr bool ConversionWillFit_v = (T::Size < Size) && (T::Alignment <= Alignment);
+
+    template <typename T>
+    struct IsInPlaceFunction : std::false_type {};
+
+    template <size_t BufferSize_>
+    struct IsInPlaceFunction<InPlaceFunction<Ret(Args...), BufferSize_>> : std::true_type {};
+
+    template <typename T>
+    static T BetterDeclval();
+    template <typename T>
+    static void CheckImplicitConversion(T);
+
+    template <class T, class U, class = void>
+    struct CanImplicitConvert : std::false_type {};
+
+    // std::is_convertible/std::invokeable has a bug (in libc++) regarding
+    // mandatory copy elision for non-moveable types. So, we roll our own.
+    // https://github.com/llvm/llvm-project/issues/55346
+    template <class From, class To>
+    struct CanImplicitConvert<From, To,
+                              decltype(CheckImplicitConversion<To>(BetterDeclval<From>()))>
+        : std::true_type {};
+
+    // Check if the provided type is a valid functional to be type-erased.
+    // if constexpr utilized for short-circuit behavior
+    template <typename T>
+    static constexpr bool isValidFunctional() {
+        using Target = std::decay_t<T>;
+        if constexpr (IsInPlaceFunction<Target>::value || std::is_same_v<Target, std::nullptr_t>) {
+            // Other overloads handle these cases
+            return false;
+        } else if constexpr (std::is_invocable_v<Target, Args...>) {
+            // The target type is a callable (with some unknown return value)
+            if constexpr (std::is_void_v<Ret>) {
+                // Any return value can be dropped to model a void returning
+                // function.
+                return WillFit_v<Target>;
+            } else {
+                using RawRet = std::invoke_result_t<Target, Args...>;
+                if constexpr (CanImplicitConvert<RawRet, Ret>::value) {
+                    if constexpr (std::is_reference_v<Ret>) {
+                        // If the return type is a reference, in order to
+                        // avoid dangling references, we only permit functionals
+                        // which return a reference to the exact type, or a base
+                        // type.
+                        if constexpr (std::is_reference_v<RawRet> &&
+                                      (std::is_same_v<std::decay_t<Ret>, std::decay_t<RawRet>> ||
+                                       std::is_base_of_v<std::decay_t<Ret>,
+                                                         std::decay_t<RawRet>>)) {
+                            return WillFit_v<Target>;
+                        }
+                        return false;
+                    }
+                    return WillFit_v<Target>;
+                }
+                // If we can't convert the raw return type, the functional is invalid.
+                return false;
+            }
+        }
+        return false;
+    }
+
+    template <typename T>
+    static constexpr bool IsValidFunctional_v = isValidFunctional<T>();
+    // Check if the type is a strictly smaller sized InPlaceFunction
+    template <typename T>
+    static constexpr bool isConvertibleFunc() {
+        using Target = std::decay_t<T>;
+        if constexpr (IsInPlaceFunction<Target>::value) {
+            return ConversionWillFit_v<Target>;
+        }
+        return false;
+    }
+
+    template <typename T>
+    static constexpr bool IsConvertibleFunc_v = isConvertibleFunc<T>();
+
+    // Members below
+    // This must come first for alignment
+    Buffer_t storage_;
+    const detail::ICallableTable<Ret, Args...>* vptr_;
+
+    constexpr void copy_to(InPlaceFunction& other) const {
+        vptr_->copy_to(std::addressof(storage_), std::addressof(other.storage_));
+        other.vptr_ = vptr_;
+    }
+
+    constexpr void move_to(InPlaceFunction& other) {
+        vptr_->move_to(std::addressof(storage_), std::addressof(other.storage_));
+        other.vptr_ = vptr_;
+    }
+
+    constexpr void destroy() { vptr_->destroy(std::addressof(storage_)); }
+
+    template <typename T, typename Target = std::decay_t<T>>
+    constexpr void genericInit(T&& t) {
+        vptr_ = &TableImpl<Target>::table;
+        ::new (std::addressof(storage_)) Target(std::forward<T>(t));
+    }
+
+    template <typename T, typename Target = std::decay_t<T>>
+    constexpr void convertingInit(T&& smallerFunc) {
+        // Redundant, but just in-case
+        static_assert(Target::Size < Size && Target::Alignment <= Alignment);
+        if constexpr (std::is_lvalue_reference_v<T>) {
+            smallerFunc.vptr_->copy_to(std::addressof(smallerFunc.storage_),
+                                       std::addressof(storage_));
+        } else {
+            smallerFunc.vptr_->move_to(std::addressof(smallerFunc.storage_),
+                                       std::addressof(storage_));
+        }
+        vptr_ = smallerFunc.vptr_;
+    }
+
+  public:
+    // Public interface
+    template <typename T, std::enable_if_t<IsValidFunctional_v<T>>* = nullptr>
+    constexpr InPlaceFunction(T&& t) {
+        genericInit(std::forward<T>(t));
+    }
+
+    // Conversion from smaller functions.
+    template <typename T, std::enable_if_t<IsConvertibleFunc_v<T>>* = nullptr>
+    constexpr InPlaceFunction(T&& t) {
+        convertingInit(std::forward<T>(t));
+    }
+
+    constexpr InPlaceFunction(const InPlaceFunction& other) { other.copy_to(*this); }
+
+    constexpr InPlaceFunction(InPlaceFunction&& other) { other.move_to(*this); }
+
+    // Making functions default constructible has pros and cons, we will align
+    // with the standard
+    constexpr InPlaceFunction() : InPlaceFunction(BadCallable{}) {}
+
+    constexpr InPlaceFunction(std::nullptr_t) : InPlaceFunction(BadCallable{}) {}
+
+#if __cplusplus >= 202002L
+    constexpr ~InPlaceFunction() {
+#else
+    ~InPlaceFunction() {
+#endif
+        destroy();
+    }
+
+    // The std::function call operator is marked const, but this violates const
+    // correctness. We deviate from the standard and do not mark the operator as
+    // const. Collections of InPlaceFunctions should probably be mutable.
+    constexpr Ret operator()(Args... args) {
+        if constexpr (std::is_void_v<Ret>) {
+            vptr_->invoke(std::addressof(storage_), std::forward<Args>(args)...);
+        } else {
+            return vptr_->invoke(std::addressof(storage_), std::forward<Args>(args)...);
+        }
+    }
+
+    constexpr InPlaceFunction& operator=(const InPlaceFunction& other) {
+        if (std::addressof(other) == this) return *this;
+        destroy();
+        other.copy_to(*this);
+        return *this;
+    }
+
+    constexpr InPlaceFunction& operator=(InPlaceFunction&& other) {
+        if (std::addressof(other) == this) return *this;
+        destroy();
+        other.move_to(*this);
+        return *this;
+    }
+
+    template <typename T, std::enable_if_t<IsValidFunctional_v<T>>* = nullptr>
+    constexpr InPlaceFunction& operator=(T&& t) {
+        // We can't assign to ourselves, since T is a different type
+        destroy();
+        genericInit(std::forward<T>(t));
+        return *this;
+    }
+
+    // Explicitly defining this function saves a move/dtor
+    template <typename T, std::enable_if_t<IsConvertibleFunc_v<T>>* = nullptr>
+    constexpr InPlaceFunction& operator=(T&& t) {
+        // We can't assign to ourselves, since T is different type
+        destroy();
+        convertingInit(std::forward<T>(t));
+        return *this;
+    }
+
+    constexpr InPlaceFunction& operator=(std::nullptr_t) { return operator=(BadCallable{}); }
+
+    // Moved from InPlaceFunctions are still considered valid (similar to
+    // std::optional). If using std::move on a function object explicitly, it is
+    // recommended that the object is reset using nullptr.
+    constexpr explicit operator bool() const { return vptr_ != &TableImpl<BadCallable>::table; }
+
+    constexpr void swap(InPlaceFunction& other) {
+        if (std::addressof(other) == this) return;
+        InPlaceFunction tmp{std::move(other)};
+        other.destroy();
+        move_to(other);
+        destroy();
+        tmp.move_to(*this);
+    }
+
+    friend constexpr void swap(InPlaceFunction& lhs, InPlaceFunction& rhs) { lhs.swap(rhs); }
+};
+
+}  // namespace android::mediautils
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/utils/include/mediautils/ProcessInfo.h
similarity index 90%
rename from media/libstagefright/include/media/stagefright/ProcessInfo.h
rename to media/utils/include/mediautils/ProcessInfo.h
index 06b9c92..9afa3df 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/utils/include/mediautils/ProcessInfo.h
@@ -18,8 +18,7 @@
 
 #define PROCESS_INFO_H_
 
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/ProcessInfoInterface.h>
+#include <mediautils/ProcessInfoInterface.h>
 #include <map>
 #include <mutex>
 #include <utils/Condition.h>
@@ -46,7 +45,8 @@
     std::mutex mOverrideLock;
     std::map<int, ProcessInfoOverride> mOverrideMap GUARDED_BY(mOverrideLock);
 
-    DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
+    ProcessInfo(const ProcessInfo&) = delete;
+    ProcessInfo& operator=(const ProcessInfo&) = delete;
 };
 
 }  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/utils/include/mediautils/ProcessInfoInterface.h
similarity index 93%
rename from media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
rename to media/utils/include/mediautils/ProcessInfoInterface.h
index b7fc858..b6529fc 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ b/media/utils/include/mediautils/ProcessInfoInterface.h
@@ -25,8 +25,8 @@
     virtual bool getPriority(int pid, int* priority) = 0;
     virtual bool isPidTrusted(int pid) = 0;
     virtual bool isPidUidTrusted(int pid, int uid) = 0;
-    virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
-    virtual void removeProcessInfoOverride(int pid);
+    virtual bool overrideProcessInfo(int pid, int procState, int oomScore) = 0;
+    virtual void removeProcessInfoOverride(int pid) = 0;
 
 protected:
     virtual ~ProcessInfoInterface() {}
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index de20d55..3d7981a 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -130,7 +130,7 @@
     std::optional<bool> doIsAllowed(uid_t uid);
     sp<content::pm::IPackageManagerNative> retrievePackageManager();
     sp<content::pm::IPackageManagerNative> mPackageManager; // To check apps manifest
-    uint_t mPackageManagerErrors = 0;
+    unsigned int mPackageManagerErrors = 0;
     struct Package {
         std::string name;
         bool playbackCaptureAllowed = false;
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
new file mode 100644
index 0000000..17c1ac9
--- /dev/null
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -0,0 +1,470 @@
+/*
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#pragma once
+
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <iomanip>
+#include <limits>
+#include <sstream>
+#include <string>
+#include <type_traits>
+#include <unordered_map>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <log/log_main.h>
+#include <utils/StrongPointer.h>
+
+namespace std {
+template <typename T>
+struct hash<::android::wp<T>> {
+    size_t operator()(const ::android::wp<T>& x) const {
+        return std::hash<const T*>()(x.unsafe_get());
+    }
+};
+}  // namespace std
+
+namespace android::mediautils {
+
+// Allocations represent owning handles to a region of shared memory (and thus
+// should not be copied in order to fulfill RAII).
+// To share ownership between multiple objects, a
+// ref-counting solution such as sp or shared ptr is appropriate, so the dtor
+// is called once for a particular block of memory.
+
+using AllocationType = ::android::sp<IMemory>;
+using WeakAllocationType = ::android::wp<IMemory>;
+
+namespace shared_allocator_impl {
+constexpr inline size_t roundup(size_t size, size_t pageSize) {
+    LOG_ALWAYS_FATAL_IF(pageSize == 0 || (pageSize & (pageSize - 1)) != 0,
+                        "Page size not multiple of 2");
+    return ((size + pageSize - 1) & ~(pageSize - 1));
+}
+
+constexpr inline bool isHeapValid(const sp<IMemoryHeap>& heap) {
+    return (heap && heap->getBase() &&
+            heap->getBase() != MAP_FAILED);  // TODO if not mapped locally
+}
+
+template <typename, typename = void>
+static constexpr bool has_deallocate_all = false;
+
+template <typename T>
+static constexpr bool has_deallocate_all<
+        T, std::enable_if_t<std::is_same_v<decltype(std::declval<T>().deallocate_all()), void>,
+                            void>> = true;
+
+template <typename, typename = void>
+static constexpr bool has_owns = false;
+
+template <typename T>
+static constexpr bool
+        has_owns<T, std::enable_if_t<std::is_same_v<decltype(std::declval<T>().owns(
+                                                            std::declval<const AllocationType>())),
+                                                    bool>,
+                                     void>> = true;
+
+template <typename, typename = void>
+static constexpr bool has_dump = false;
+
+template <typename T>
+static constexpr bool has_dump<
+        T,
+        std::enable_if_t<std::is_same_v<decltype(std::declval<T>().dump()), std::string>, void>> =
+        true;
+
+}  // namespace shared_allocator_impl
+
+struct BasicAllocRequest {
+    size_t size;
+};
+struct NamedAllocRequest : public BasicAllocRequest {
+    std::string_view name;
+};
+
+// We are required to add a layer of indirection to hold a handle to the actual
+// block due to sp<> being unable to be created from an object once its
+// ref-count has dropped to zero. So, we have to hold onto an extra reference
+// here. We effectively want to know when the refCount of the object drops to
+// one, since we need to hold on to a reference to pass the object to interfaces
+// requiring an sp<>.
+// TODO is there some way to avoid paying this cost?
+template <typename Allocator>
+class ScopedAllocator;
+template <typename AllocationT, typename AllocatorHandleType>
+class ScopedAllocation : public BnMemory {
+  public:
+    template <typename T>
+    friend class ScopedAllocator;
+    ScopedAllocation(const AllocationT& allocation, const AllocatorHandleType& handle)
+        : mAllocation(allocation), mHandle(handle) {}
+
+    // Defer the implementation to the underlying mAllocation
+
+    virtual sp<IMemoryHeap> getMemory(ssize_t* offset = nullptr,
+                                      size_t* size = nullptr) const override {
+        return mAllocation->getMemory(offset, size);
+    }
+
+  private:
+    ~ScopedAllocation() override { mHandle->deallocate(mAllocation); }
+
+    const AllocationT mAllocation;
+    const AllocatorHandleType mHandle;
+};
+
+// Allocations are only deallocated when going out of scope.
+// This should almost always be the outermost allocator.
+template <typename Allocator>
+class ScopedAllocator {
+  public:
+    using HandleT = std::shared_ptr<Allocator>;
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
+
+    ScopedAllocator() : mAllocator(std::make_shared<Allocator>()) {}
+
+    template <typename T>
+    auto allocate(T&& request) {
+        const auto allocation = mAllocator->allocate(std::forward<T>(request));
+        if (!allocation) {
+            return sp<ScopedAllocation<AllocationType, HandleT>>{};
+        }
+        return sp<ScopedAllocation<AllocationType, HandleT>>::make(allocation, mAllocator);
+    }
+
+    // Deallocate and deallocate_all are implicitly unsafe due to double
+    // deallocates upon ScopedAllocation destruction. We can protect against this
+    // efficiently with a gencount (for deallocate_all) or inefficiently (for
+    // deallocate) but we choose not to
+    //
+    // Owns is only safe to pseudo-impl due to static cast reqs
+    template <typename Enable = bool>
+    auto owns(const sp<ScopedAllocation<AllocationType, HandleT>>& allocation) const
+            -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+        return mAllocator->owns(allocation->mAllocation);
+    }
+
+    template <typename Enable = std::string>
+    auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+        return mAllocator->dump();
+    }
+
+  private:
+    // We store a shared pointer in order to ensure that the allocator outlives
+    // allocations (which call back to become dereferenced).
+    const HandleT mAllocator;
+};
+
+// A simple policy for PolicyAllocator which enforces a pool size and an allocation
+// size range.
+template <size_t PoolSize, size_t MinAllocSize = 0,
+          size_t MaxAllocSize = std::numeric_limits<size_t>::max()>
+class SizePolicy {
+    static_assert(PoolSize > 0);
+
+  public:
+    template <typename T>
+    bool isValid(T&& request) const {
+        static_assert(std::is_base_of_v<BasicAllocRequest, std::decay_t<T>>);
+        return !(request.size > kMaxAllocSize || request.size < kMinAllocSize ||
+                 mPoolSize + request.size > kPoolSize);
+    }
+
+    void allocated(const AllocationType& alloc) { mPoolSize += alloc->size(); }
+
+    void deallocated(const AllocationType& alloc) { mPoolSize -= alloc->size(); }
+
+    void deallocated_all() { mPoolSize = 0; }
+
+    static constexpr size_t kPoolSize = PoolSize;
+    static constexpr size_t kMinAllocSize = MinAllocSize;
+    static constexpr size_t kMaxAllocSize = MaxAllocSize;
+
+  private:
+    size_t mPoolSize = 0;
+};
+
+// An allocator which accepts or rejects allocation requests by a parametrized
+// policy (which can carry state).
+template <typename Allocator, typename Policy>
+class PolicyAllocator {
+  public:
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    PolicyAllocator(Allocator allocator, Policy policy)
+        : mAllocator(allocator), mPolicy(std::move(policy)) {}
+
+    // Default initialize the allocator and policy
+    PolicyAllocator() = default;
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        static_assert(std::is_base_of_v<android::mediautils::BasicAllocRequest, std::decay_t<T>>);
+        request.size = shared_allocator_impl::roundup(request.size, alignment());
+        if (!mPolicy.isValid(request)) {
+            return {};
+        }
+        AllocationType val = mAllocator.allocate(std::forward<T>(request));
+        if (val == nullptr) return val;
+        mPolicy.allocated(val);
+        return val;
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        mPolicy.deallocated(allocation);
+        mAllocator.deallocate(allocation);
+    }
+
+    template <typename Enable = void>
+    auto deallocate_all()
+            -> std::enable_if_t<shared_allocator_impl::has_deallocate_all<Allocator>, Enable> {
+        mAllocator.deallocate_all();
+        mPolicy.deallocated_all();
+    }
+
+    template <typename Enable = bool>
+    auto owns(const AllocationType& allocation) const
+            -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+        return mAllocator.owns(allocation);
+    }
+
+    template <typename Enable = std::string>
+    auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+        return mAllocator.dump();
+    }
+
+  private:
+    [[no_unique_address]] Allocator mAllocator;
+    [[no_unique_address]] Policy mPolicy;
+};
+
+// An allocator which keeps track of outstanding allocations for logging and
+// querying ownership.
+template <class Allocator>
+class SnoopingAllocator {
+  public:
+    struct AllocationData {
+        std::string name;
+        size_t allocation_number;
+    };
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    SnoopingAllocator(Allocator allocator, std::string_view name)
+        : mName(name), mAllocator(std::move(allocator)) {}
+
+    explicit SnoopingAllocator(std::string_view name) : mName(name), mAllocator(Allocator{}) {}
+
+    explicit SnoopingAllocator(Allocator allocator) : mAllocator(std::move(allocator)) {}
+
+    // Default construct allocator and name
+    SnoopingAllocator() = default;
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        static_assert(std::is_base_of_v<NamedAllocRequest, std::decay_t<T>>);
+        AllocationType allocation = mAllocator.allocate(request);
+        if (allocation)
+            mAllocations.insert({WeakAllocationType{allocation},
+                                 {std::string{request.name}, mAllocationNumber++}});
+        return allocation;
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        mAllocations.erase(WeakAllocationType{allocation});
+        mAllocator.deallocate(allocation);
+    }
+
+    void deallocate_all() {
+        if constexpr (shared_allocator_impl::has_deallocate_all<Allocator>) {
+            mAllocator.deallocate_all();
+        } else {
+            for (auto& [mem, value] : mAllocations) {
+                mAllocator.deallocate(mem);
+            }
+        }
+        mAllocations.clear();
+    }
+
+    bool owns(const AllocationType& allocation) const {
+        return (mAllocations.count(WeakAllocationType{allocation}) > 0);
+    }
+
+    std::string dump() const {
+        std::ostringstream dump;
+        dump << mName << " Allocator Dump:\n";
+        dump << std::setw(8) << "HeapID" << std::setw(8) << "Size" << std::setw(8) << "Offset"
+             << std::setw(8) << "Order"
+             << "   Name\n";
+        for (auto& [mem, value] : mAllocations) {
+            // TODO Imem size and offset
+            const AllocationType handle = mem.promote();
+            if (!handle) {
+                dump << "Invalid memory lifetime!";
+                continue;
+            }
+            const auto heap = handle->getMemory();
+            dump << std::setw(8) << heap->getHeapID() << std::setw(8) << heap->getSize()
+                 << std::setw(8) << heap->getOffset() << std::setw(8) << value.allocation_number
+                 << "   " << value.name << "\n";
+        }
+        return dump.str();
+    }
+
+    const std::unordered_map<WeakAllocationType, AllocationData>& getAllocations() {
+        return mAllocations;
+    }
+
+  private:
+    const std::string mName;
+    [[no_unique_address]] Allocator mAllocator;
+    // We don't take copies of the underlying information in an allocation,
+    // rather, the allocation information is put on the heap and referenced via
+    // a ref-counted solution. So, the address of the allocation information is
+    // appropriate to hash. In order for this block to be freed, the underlying
+    // allocation must be referenced by no one (thus deallocated).
+    std::unordered_map<WeakAllocationType, AllocationData> mAllocations;
+    // For debugging purposes, monotonic
+    size_t mAllocationNumber = 0;
+};
+
+// An allocator which passes a failed allocation request to a backup allocator.
+template <class PrimaryAllocator, class SecondaryAllocator>
+class FallbackAllocator {
+  public:
+    static_assert(PrimaryAllocator::alignment() == SecondaryAllocator::alignment());
+    static_assert(shared_allocator_impl::has_owns<PrimaryAllocator>);
+
+    static constexpr size_t alignment() { return PrimaryAllocator::alignment(); }
+
+    FallbackAllocator(const PrimaryAllocator& primary, const SecondaryAllocator& secondary)
+        : mPrimary(primary), mSecondary(secondary) {}
+
+    // Default construct primary and secondary allocator
+    FallbackAllocator() = default;
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        AllocationType allocation = mPrimary.allocate(std::forward<T>(request));
+        if (!allocation) allocation = mSecondary.allocate(std::forward<T>(request));
+        return allocation;
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        if (mPrimary.owns(allocation)) {
+            mPrimary.deallocate(allocation);
+        } else {
+            mSecondary.deallocate(allocation);
+        }
+    }
+
+    template <typename Enable = void>
+    auto deallocate_all() -> std::enable_if_t<
+            shared_allocator_impl::has_deallocate_all<PrimaryAllocator> &&
+                    shared_allocator_impl::has_deallocate_all<SecondaryAllocator>,
+            Enable> {
+        mPrimary.deallocate_all();
+        mSecondary.deallocate_all();
+    }
+
+    template <typename Enable = bool>
+    auto owns(const AllocationType& allocation) const
+            -> std::enable_if_t<shared_allocator_impl::has_owns<SecondaryAllocator>, Enable> {
+        return mPrimary.owns(allocation) || mSecondary.owns(allocation);
+    }
+
+    template <typename Enable = std::string>
+    auto dump() const
+            -> std::enable_if_t<shared_allocator_impl::has_dump<PrimaryAllocator> &&
+                                        shared_allocator_impl::has_dump<SecondaryAllocator>,
+                                Enable> {
+        return std::string("Primary: \n") + mPrimary.dump() + std::string("Secondary: \n") +
+               mSecondary.dump();
+    }
+
+  private:
+    [[no_unique_address]] PrimaryAllocator mPrimary;
+    [[no_unique_address]] SecondaryAllocator mSecondary;
+};
+
+// An allocator which is backed by a shared_ptr to an allocator, so multiple
+// allocators can share the same backing allocator (and thus the same state).
+template <typename Allocator>
+class IndirectAllocator {
+  public:
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    explicit IndirectAllocator(const std::shared_ptr<Allocator>& allocator)
+        : mAllocator(allocator) {}
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        return mAllocator->allocate(std::forward<T>(request));
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        mAllocator->deallocate(allocation);
+    }
+
+    // We can't implement deallocate_all/dump/owns, since we may not be the only allocator with
+    // access to the underlying allocator (making it not well-defined). If these
+    // methods are necesesary, we need to wrap with a snooping allocator.
+  private:
+    const std::shared_ptr<Allocator> mAllocator;
+};
+
+// Stateless. This allocator allocates full page-aligned MemoryHeapBases (backed by
+// a shared memory mapped anonymous file) as allocations.
+class MemoryHeapBaseAllocator {
+  public:
+    static constexpr size_t alignment() { return 4096; /* PAGE_SIZE */ }
+    static constexpr unsigned FLAGS = 0;  // default flags
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        static_assert(std::is_base_of_v<BasicAllocRequest, std::decay_t<T>>);
+        auto heap =
+                sp<MemoryHeapBase>::make(shared_allocator_impl::roundup(request.size, alignment()));
+        if (!shared_allocator_impl::isHeapValid(heap)) {
+            return {};
+        }
+        return sp<MemoryBase>::make(heap, 0, heap->getSize());
+    }
+
+    // Passing a block not allocated by a HeapAllocator is undefined.
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        const auto heap = allocation->getMemory();
+        if (!heap) return;
+        // This causes future mapped accesses (even across process boundaries)
+        // to receive SIGBUS.
+        ftruncate(heap->getHeapID(), 0);
+        // This static cast is safe, since as long as the block was originally
+        // allocated by us, the underlying IMemoryHeap was a MemoryHeapBase
+        static_cast<MemoryHeapBase&>(*heap).dispose();
+    }
+};
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/StaticStringView.h b/media/utils/include/mediautils/StaticStringView.h
new file mode 100644
index 0000000..14be240
--- /dev/null
+++ b/media/utils/include/mediautils/StaticStringView.h
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string_view>
+#include <type_traits>
+
+#pragma push_macro("EXPLICIT_CONVERSION_GENERATE_OPERATOR")
+#undef EXPLICIT_CONVERSION_GENERATE_OPERATOR
+#define EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, op)               \
+    friend constexpr bool operator op(T lhs, T rhs) {                 \
+        return operator op(static_cast<U>(lhs), static_cast<U>(rhs)); \
+    }                                                                 \
+    friend constexpr bool operator op(T lhs, U rhs) {                 \
+        return operator op(static_cast<U>(lhs), rhs);                 \
+    }                                                                 \
+    friend constexpr bool operator op(U lhs, T rhs) {                 \
+        return operator op(lhs, static_cast<U>(rhs));                 \
+    }
+
+#pragma push_macro("EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS")
+#undef EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS
+// Generate comparison operator friend functions for types (appropriately
+// const/ref qualified) where T is **explicitly** convertible to U.
+#define EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS(T, U)      \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, ==)                  \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, !=)                  \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, <)                   \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, <=)                  \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, >)                   \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, >=)
+
+namespace android::mediautils {
+
+// This class a reference to a string with static storage duration
+// which is const (i.e. a string view). We expose an identical API to
+// string_view, however we do not publicly inherit to avoid potential mis-use of
+// non-virtual dtors/methods.
+//
+// We can create APIs which consume only static strings, which
+// avoids allocation/deallocation of the string locally, as well as potential
+// lifetime issues caused by consuming raw pointers (or string_views).
+// Equivalently, a string_view which is always valid, and whose underlying data
+// can never change.
+//
+// In most cases, the string_view should be initialized at compile time (and there are
+// helpers to do so below). In order to initialize a non-constexpr array,
+// the second template param must be false (i.e. opt-in).
+// Construction/usage as follows (constexpr required unless second template param is false):
+//
+//     constexpr static std::array<char, 12> debugString = toStdArray("MyMethodName");
+//     constexpr auto myStaticStringView = StaticStringView::create<debugString>();
+//     const auto size_t length = myStaticStringView.length() // can call any string_view methods
+//     globalLog(myStaticStringView, ...); // Pass to APIs consuming StaticStringViews
+//
+struct StaticStringView final : private std::string_view {
+    template <typename T>
+    struct is_const_char_array : std::false_type {};
+
+    // Use templated value helper
+    template <size_t N>
+    struct is_const_char_array<const std::array<char, N>> : std::true_type {};
+
+    template <typename T>
+    static constexpr bool is_const_char_array_v =
+            is_const_char_array<std::remove_reference_t<T>>::value;
+
+    template <auto& val, std::enable_if_t<is_const_char_array_v<decltype(val)>, bool> Check = true>
+    static constexpr StaticStringView create() {
+        if constexpr (Check) {
+            // If this static_assert fails to compile, this method was called
+            // with a non-constexpr
+            static_assert(val[0]);
+        }
+        return StaticStringView{val.data(), val.size()};
+    }
+
+    // We can copy/move assign/construct from other StaticStringViews as their validity is already
+    // ensured
+    constexpr StaticStringView(const StaticStringView& other) = default;
+    constexpr StaticStringView& operator=(const StaticStringView& other) = default;
+    constexpr StaticStringView(StaticStringView&& other) = default;
+    constexpr StaticStringView& operator=(StaticStringView&& other) = default;
+
+    // Explicitly convert to a std::string_view (this is a strict loss of
+    // information so should only be used across APIs which intend to consume
+    // any std::string_view).
+    constexpr std::string_view getStringView() const { return *this; }
+
+    // The following methods expose an identical API to std::string_view
+    using std::string_view::begin;
+    using std::string_view::cbegin;
+    using std::string_view::cend;
+    using std::string_view::crbegin;
+    using std::string_view::crend;
+    using std::string_view::end;
+    using std::string_view::rbegin;
+    using std::string_view::rend;
+    using std::string_view::operator[];
+    using std::string_view::at;
+    using std::string_view::back;
+    using std::string_view::data;
+    using std::string_view::empty;
+    using std::string_view::front;
+    using std::string_view::length;
+    using std::string_view::max_size;
+    using std::string_view::size;
+    // These modifiers are valid because the resulting view is a
+    // substring of the original static string
+    using std::string_view::remove_prefix;
+    using std::string_view::remove_suffix;
+    // Skip swap
+    using std::string_view::compare;
+    using std::string_view::copy;
+    using std::string_view::find;
+    using std::string_view::find_first_not_of;
+    using std::string_view::find_first_of;
+    using std::string_view::find_last_not_of;
+    using std::string_view::find_last_of;
+    using std::string_view::rfind;
+    using std::string_view::substr;
+#if __cplusplus >= 202202L
+    using std::string_view::ends_with;
+    using std::string_view::starts_with;
+#endif
+    using std::string_view::npos;
+
+    // Non-member friend functions to follow. Identical API to std::string_view
+    template <class CharT, class Traits>
+    friend std::basic_ostream<CharT, Traits>& operator<<(std::basic_ostream<CharT, Traits>& os,
+                                                         StaticStringView v) {
+        return os << static_cast<std::string_view&>(v);
+    }
+
+    EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS(const StaticStringView&,
+                                                      const std::string_view&)
+
+  private:
+    constexpr StaticStringView(const char* ptr, size_t sz) : std::string_view(ptr, sz){};
+
+  public:
+    // The next two functions are logically consteval (only avail in c++20).
+    // We can't use templates as params, as they would require references to
+    // static which would unnecessarily bloat executable size.
+    template <typename T, size_t N, size_t M>
+    static constexpr std::array<T, N + M> concatArray(const std::array<T, N>& a,
+                                                      const std::array<T, M>& b) {
+        std::array<T, N + M> res{};
+        for (size_t i = 0; i < N; i++) {
+            res[i] = a[i];
+        }
+        for (size_t i = 0; i < M; i++) {
+            res[N + i] = b[i];
+        }
+        return res;
+    }
+
+    static void arrayIsNotNullTerminated();
+
+    // This method should only be called on C-style char arrays which are
+    // null-terminated. Calling this method on a char array with intermediate null
+    // characters (i.e. "hello\0" or "hel\0lo" will result in a std::array with null
+    // characters, which is most likely not intended.
+    // We attempt to detect a non-null terminated char array at link-time, but
+    // this is best effort. A consequence of this approach is that this method
+    // will fail to link for extern args, or when not inlined. Since this method
+    // is intended to be used constexpr, this is not an issue.
+    template <size_t N>
+    static constexpr std::array<char, N - 1> toStdArray(const char (&input)[N]) {
+        std::array<char, N - 1> res{};
+        for (size_t i = 0; i < N - 1; i++) {
+            res[i] = input[i];
+        }
+        // A workaround to generate a link-time error if toStdArray is not called on
+        // a null-terminated char array.
+        if (input[N - 1] != 0) arrayIsNotNullTerminated();
+        return res;
+    }
+};
+}  // namespace android::mediautils
+
+// Specialization of std::hash for use with std::unordered_map
+namespace std {
+template <>
+struct hash<android::mediautils::StaticStringView> {
+    constexpr size_t operator()(const android::mediautils::StaticStringView& val) {
+        return std::hash<std::string_view>{}(val.getStringView());
+    }
+};
+}  // namespace std
+
+#pragma pop_macro("EXPLICIT_CONVERSION_GENERATE_OPERATOR")
+#pragma pop_macro("EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS")
diff --git a/media/utils/include/mediautils/TidWrapper.h b/media/utils/include/mediautils/TidWrapper.h
new file mode 100644
index 0000000..aeefa01
--- /dev/null
+++ b/media/utils/include/mediautils/TidWrapper.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#if defined(__linux__)
+#include <signal.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+#endif
+
+namespace android::mediautils {
+
+// The library wrapper for gettid is only available on bionic. If we don't link
+// against it, we syscall directly.
+inline pid_t getThreadIdWrapper() {
+#if defined(__BIONIC__)
+    return ::gettid();
+#else
+    return syscall(SYS_gettid);
+#endif
+}
+
+// Send an abort signal to a (linux) thread id.
+inline int abortTid(int tid) {
+#if defined(__linux__)
+    const pid_t pid = getpid();
+    siginfo_t siginfo = {
+        .si_code = SI_QUEUE,
+        .si_pid = pid,
+        .si_uid = getuid(),
+    };
+    return syscall(SYS_rt_tgsigqueueinfo, pid, tid, SIGABRT, &siginfo);
+#else
+  errno = ENODEV;
+  return -1;
+#endif
+}
+
+}
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index bdb5337..f9ea50c 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -123,7 +123,6 @@
         const Duration secondChanceDuration;
         const std::chrono::system_clock::time_point startSystemTime;
         const pid_t tid;
-
         void onCancel(TimerThread::Handle handle) const;
         void onTimeout(TimerThread::Handle handle) const;
     };
diff --git a/media/utils/include/mediautils/TimerThread.h b/media/utils/include/mediautils/TimerThread.h
index c76fa7d..d5be177 100644
--- a/media/utils/include/mediautils/TimerThread.h
+++ b/media/utils/include/mediautils/TimerThread.h
@@ -21,9 +21,11 @@
 #include <deque>
 #include <functional>
 #include <map>
+#include <memory>
 #include <mutex>
 #include <string>
 #include <thread>
+#include <vector>
 
 #include <android-base/thread_annotations.h>
 
@@ -151,7 +153,15 @@
      */
     bool cancelTask(Handle handle);
 
-    std::string toString(size_t retiredCount = SIZE_MAX) const;
+    struct SnapshotAnalysis;
+    /**
+     * Take a snapshot of the current state of the TimerThread and determine the
+     * potential cause of a deadlock.
+     * \param retiredCount The number of successfully retired calls to capture
+     *                      (may be many).
+     * \return See below for a description of a SnapShotAnalysis object
+     */
+    SnapshotAnalysis getSnapshotAnalysis(size_t retiredCount = SIZE_MAX) const;
 
     /**
      * Returns a string representation of the TimerThread queue.
@@ -202,7 +212,6 @@
         return s;
     }
 
-  private:
     // To minimize movement of data, we pass around shared_ptrs to Requests.
     // These are allocated and deallocated outside of the lock.
     // TODO(b/243839867) consider options to merge Request with the
@@ -232,6 +241,40 @@
         std::string toString() const;
     };
 
+
+    // SnapshotAnalysis contains info deduced by analysisTimeout().
+
+    struct SnapshotAnalysis {
+        // If we were unable to determine any applicable thread ids,
+        // we leave their value as INVALID_PID.
+        // Note, we use the linux thread id (not pthread), so its type is pid_t.
+        static constexpr pid_t INVALID_PID = -1;
+        // Description of likely issue and/or blocked method.
+        // Empty if no actionable info.
+        std::string description;
+        // Tid of the (latest) monitored thread which has timed out.
+        // This is the thread which the suspect is deduced with respect to.
+        // Most often, this is the thread which an abort is being triggered
+        // from.
+        pid_t timeoutTid = INVALID_PID;
+        // Tid of the (HAL) thread which has likely halted progress, selected
+        // from pendingRequests. May be the same as timeoutTid, if the timed-out
+        // thread directly called into the HAL.
+        pid_t suspectTid = INVALID_PID;
+        // Number of second chances given by the timer thread
+        size_t secondChanceCount;
+        // List of pending requests
+        std::vector<std::shared_ptr<const Request>> pendingRequests;
+        // List of timed-out requests
+        std::vector<std::shared_ptr<const Request>> timeoutRequests;
+        // List of retired requests
+        std::vector<std::shared_ptr<const Request>> retiredRequests;
+        // Dumps the information contained above as well as additional call
+        // stacks where applicable.
+        std::string toString() const;
+    };
+
+  private:
     // Deque of requests, in order of add().
     // This class is thread-safe.
     class RequestQueue {
@@ -326,36 +369,11 @@
         }
     };
 
-    // Analysis contains info deduced by analysisTimeout().
-    //
-    // Summary is the result string from checking timeoutRequests to see if
-    // any might be caused by blocked calls in pendingRequests.
-    //
-    // Summary string is empty if there is no automatic actionable info.
-    //
-    // timeoutTid is the tid selected from timeoutRequests (if any).
-    //
-    // HALBlockedTid is the tid that is blocked from pendingRequests believed
-    // to cause the timeout.
-    // HALBlockedTid may be INVALID_PID if no suspected tid is found,
-    // and if HALBlockedTid is valid, it will not be the same as timeoutTid.
-    //
-    static constexpr pid_t INVALID_PID = -1;
-    struct Analysis {
-        std::string summary;
-        pid_t timeoutTid = INVALID_PID;
-        pid_t HALBlockedTid = INVALID_PID;
-    };
 
     // A HAL method is where the substring "Hidl" is in the class name.
     // The tag should look like: ... Hidl ... :: ...
     static bool isRequestFromHal(const std::shared_ptr<const Request>& request);
 
-    // Returns analysis from the requests.
-    static Analysis analyzeTimeout(
-        const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
-        const std::vector<std::shared_ptr<const Request>>& pendingRequests);
-
     std::vector<std::shared_ptr<const Request>> getPendingRequests() const;
 
     static constexpr size_t kRetiredQueueMax = 16;
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 232cc4e..0689083 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -7,76 +7,107 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_test_library {
-    name: "libsharedtest",
+// general test config
+cc_defaults {
+    name: "libmediautils_tests_config",
+
+    host_supported: true,
+
     cflags: [
         "-Wall",
         "-Werror",
         "-Wextra",
     ],
 
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
+    sanitize: {
+        undefined: true,
+        misc_undefined: [
+            "float-divide-by-zero",
+            "local-bounds",
+        ],
+        integer_overflow: true,
+        cfi: true,
+        memtag_heap: true,
+        diag: {
+            undefined: true,
+            misc_undefined: [
+                "float-divide-by-zero",
+                "local-bounds",
+            ],
+            integer_overflow: true,
+            cfi: true,
+            memtag_heap: true,
+        },
     },
+    target: {
+        host: {
+            sanitize: {
+                cfi: false,
+                diag: {
+                    cfi: false,
+                },
+            },
+        },
+    },
+}
+
+cc_defaults {
+    name: "libmediautils_tests_defaults",
+
+    defaults: ["libmediautils_tests_config"],
+
+    host_supported: true,
 
     shared_libs: [
+        "libbinder",
         "liblog",
+        "libmediautils",
+        "libutils",
     ],
+}
+
+cc_test_library {
+    name: "libsharedtest",
+
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "sharedtest.cpp",
-    ]
+    ],
 }
 
 cc_test {
     name: "library_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
-    },
-
-    shared_libs: [
-        "libbase",
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     data_libs: [
         "libsharedtest",
     ],
 
+    shared_libs: [
+        "libbase",
+    ],
+
     srcs: [
         "library_tests.cpp",
     ],
 }
 
 cc_test {
+    name: "libmediautils_test",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "memory-test.cpp",
+    ],
+}
+
+cc_test {
     name: "media_process_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "media_process_tests.cpp",
@@ -86,17 +117,7 @@
 cc_test {
     name: "media_synchronization_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "media_synchronization_tests.cpp",
@@ -106,17 +127,7 @@
 cc_test {
     name: "media_threadsnapshot_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "media_threadsnapshot_tests.cpp",
@@ -126,17 +137,10 @@
 cc_test {
     name: "mediautils_fixedstring_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
     ],
 
     srcs: [
@@ -147,17 +151,10 @@
 cc_test {
     name: "mediautils_scopedstatistics_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
     ],
 
     srcs: [
@@ -168,17 +165,10 @@
 cc_test {
     name: "methodstatistics_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
     ],
 
     srcs: [
@@ -187,28 +177,59 @@
 }
 
 cc_test {
+    name: "static_string_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "static_string_view_tests.cpp",
+    ],
+}
+
+cc_test {
     name: "timecheck_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
-    },
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "timecheck_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "timerthread_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "TimerThread-test.cpp",
+    ],
+}
+
+cc_test {
+    name: "extended_accumulator_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "extended_accumulator_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "inplace_function_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "inplace_function_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "shared_memory_allocator_tests",
+    defaults: ["libmediautils_tests_defaults"],
+    srcs: [
+        "shared_memory_allocator_tests.cpp",
+    ],
+}
diff --git a/media/utils/TimerThread-test.cpp b/media/utils/tests/TimerThread-test.cpp
similarity index 70%
rename from media/utils/TimerThread-test.cpp
rename to media/utils/tests/TimerThread-test.cpp
index 9452c07..468deed 100644
--- a/media/utils/TimerThread-test.cpp
+++ b/media/utils/tests/TimerThread-test.cpp
@@ -52,14 +52,16 @@
     std::atomic<bool> taskRan = false;
     TimerThread thread;
     TimerThread::Handle handle =
-            thread.scheduleTask("Basic", [&taskRan](TimerThread::Handle handle __unused) {
+            thread.scheduleTask("Basic", [&taskRan](TimerThread::Handle) {
                     taskRan = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
     ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
     std::this_thread::sleep_for(100ms - kJitter);
     ASSERT_FALSE(taskRan);
     std::this_thread::sleep_for(2 * kJitter);
-    ASSERT_TRUE(taskRan);
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_TRUE(taskRan); // timed-out called.
+    ASSERT_EQ(1ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // nothing cancelled
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 static void testCancel() {
@@ -68,15 +70,17 @@
     std::atomic<bool> taskRan = false;
     TimerThread thread;
     TimerThread::Handle handle =
-            thread.scheduleTask("Cancel", [&taskRan](TimerThread::Handle handle __unused) {
+            thread.scheduleTask("Cancel", [&taskRan](TimerThread::Handle) {
                     taskRan = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
     ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
     std::this_thread::sleep_for(100ms - kJitter);
     ASSERT_FALSE(taskRan);
     ASSERT_TRUE(thread.cancelTask(handle));
     std::this_thread::sleep_for(2 * kJitter);
-    ASSERT_FALSE(taskRan);
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_FALSE(taskRan); // timed-out did not call.
+    ASSERT_EQ(0ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // task cancelled.
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 static void testCancelAfterRun() {
@@ -86,14 +90,16 @@
     TimerThread thread;
     TimerThread::Handle handle =
             thread.scheduleTask("CancelAfterRun",
-                    [&taskRan](TimerThread::Handle handle __unused) {
+                    [&taskRan](TimerThread::Handle) {
                             taskRan = true; },
                             DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
     ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
     std::this_thread::sleep_for(100ms + kJitter);
-    ASSERT_TRUE(taskRan);
+    ASSERT_TRUE(taskRan); //  timed-out called.
     ASSERT_FALSE(thread.cancelTask(handle));
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // nothing actually cancelled
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 static void testMultipleTasks() {
@@ -104,23 +110,23 @@
 
     auto startTime = std::chrono::steady_clock::now();
 
-    thread.scheduleTask("0", [&taskRan](TimerThread::Handle handle __unused) {
+    thread.scheduleTask("0", [&taskRan](TimerThread::Handle) {
             taskRan[0] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(300, frac));
-    thread.scheduleTask("1", [&taskRan](TimerThread::Handle handle __unused) {
+    thread.scheduleTask("1", [&taskRan](TimerThread::Handle) {
             taskRan[1] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
-    thread.scheduleTask("2", [&taskRan](TimerThread::Handle handle __unused) {
+    thread.scheduleTask("2", [&taskRan](TimerThread::Handle) {
             taskRan[2] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
-    thread.scheduleTask("3", [&taskRan](TimerThread::Handle handle __unused) {
+    thread.scheduleTask("3", [&taskRan](TimerThread::Handle) {
             taskRan[3] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(400, frac));
-    auto handle4 = thread.scheduleTask("4", [&taskRan](TimerThread::Handle handle __unused) {
+    auto handle4 = thread.scheduleTask("4", [&taskRan](TimerThread::Handle) {
             taskRan[4] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
-    thread.scheduleTask("5", [&taskRan](TimerThread::Handle handle __unused) {
+    thread.scheduleTask("5", [&taskRan](TimerThread::Handle) {
             taskRan[5] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
 
     // 6 tasks pending
-    ASSERT_EQ(6, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(6ul, countChars(thread.pendingToString(), REQUEST_START));
     // 0 tasks completed
-    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // None of the tasks are expected to have finished at the start.
     std::array<std::atomic<bool>, 6> expected{};
@@ -162,17 +168,19 @@
     ASSERT_EQ(expected, taskRan);
 
     // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
-    // 4 tasks ran and 1 cancelled
-    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks called on timeout,  and 1 cancelled
+    ASSERT_EQ(4ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // Task 3 should trigger around 400ms.
     std::this_thread::sleep_until(startTime + 400ms - kJitter);
 
     ASSERT_EQ(expected, taskRan);
 
-    // 4 tasks ran and 1 cancelled
-    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+    // 4 tasks called on timeout and 1 cancelled
+    ASSERT_EQ(4ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 
     std::this_thread::sleep_until(startTime + 400ms + kJitter);
 
@@ -180,9 +188,10 @@
     ASSERT_EQ(expected, taskRan);
 
     // 0 tasks pending
-    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
-    // 5 tasks ran and 1 cancelled
-    ASSERT_EQ(5 + 1, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 5 tasks called on timeout and 1 cancelled
+    ASSERT_EQ(5ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 }; // class TimerThreadTest
@@ -221,48 +230,48 @@
     ASSERT_TRUE(TimerThread::isNoTimeoutHandle(handle2));
 
     // 3 tasks pending
-    ASSERT_EQ(3, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(3ul, countChars(thread.pendingToString(), REQUEST_START));
     // 0 tasks retired
-    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
 
     ASSERT_TRUE(thread.cancelTask(handle0));
     ASSERT_TRUE(thread.cancelTask(handle1));
 
     // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
     // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // handle1 is stale, cancel returns false.
     ASSERT_FALSE(thread.cancelTask(handle1));
 
     // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
     // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
 
     // Add another tracked task.
     auto handle3 = thread.trackTask("3");
     ASSERT_TRUE(TimerThread::isNoTimeoutHandle(handle3));
 
     // 2 tasks pending
-    ASSERT_EQ(2, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.pendingToString(), REQUEST_START));
     // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
 
     ASSERT_TRUE(thread.cancelTask(handle2));
 
     // 1 tasks pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
     // 3 tasks retired
-    ASSERT_EQ(3, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(3ul, countChars(thread.retiredToString(), REQUEST_START));
 
     ASSERT_TRUE(thread.cancelTask(handle3));
 
     // 0 tasks pending
-    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+    ASSERT_EQ(0ul, countChars(thread.pendingToString(), REQUEST_START));
     // 4 tasks retired
-    ASSERT_EQ(4, countChars(thread.retiredToString(), REQUEST_START));
+    ASSERT_EQ(4ul, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 }  // namespace
diff --git a/media/utils/tests/extended_accumulator_tests.cpp b/media/utils/tests/extended_accumulator_tests.cpp
new file mode 100644
index 0000000..e243e7e
--- /dev/null
+++ b/media/utils/tests/extended_accumulator_tests.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "extended_accumulator_tests"
+
+#include <mediautils/ExtendedAccumulator.h>
+
+#include <type_traits>
+#include <cstdint>
+#include <limits.h>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+// Conditionally choose a base accumulating counter value in order to prevent
+// unsigned underflow on the accumulator from aborting the tests.
+template <typename TType, typename CType>
+static constexpr CType getBase() {
+  static_assert(sizeof(TType) < sizeof(CType));
+  if constexpr (std::is_unsigned_v<CType>) {
+      return std::numeric_limits<TType>::max() + 1;
+  } else {
+      return 0;
+  }
+}
+
+// Since the entire state of this utility is the previous value, and the
+// behavior is isomorphic mod the underlying type on the previous value, we can
+// test combinations of the previous value of the underlying type and a
+// hypothetical signed update to that type and ensure the accumulator moves
+// correctly and reports overflow correctly.
+template <typename TestUInt, typename CType>
+void testPair(TestUInt prevVal, std::make_signed_t<TestUInt> delta) {
+    using TestDetect = ExtendedAccumulator<TestUInt, CType>;
+    using TestInt = typename TestDetect::SignedInt;
+    static_assert(std::is_same_v<typename TestDetect::UnsignedInt, TestUInt>);
+    static_assert(std::is_same_v<TestInt, std::make_signed_t<TestUInt>>);
+    static_assert(sizeof(TestUInt) < sizeof(CType));
+
+    // To safely detect underflow/overflow for testing
+    // Should be 0 mod TestUInt, max + 1 is convenient
+    static constexpr CType base = getBase<TestUInt, CType>();
+    const CType prev = base + prevVal;
+    TestDetect test{prev};
+    EXPECT_EQ(test.getValue(), prev);
+    // Prevent unsigned wraparound abort
+    CType next;
+    const auto err =  __builtin_add_overflow(prev, delta, &next);
+    LOG_ALWAYS_FATAL_IF(err, "Unexpected wrap in tests");
+    const auto [result, status] = test.poll(static_cast<TestUInt>(next));
+    EXPECT_EQ(test.getValue(), next);
+    EXPECT_EQ(result, delta);
+
+    // Test overflow/underflow event reporting.
+    if (next < base) EXPECT_EQ(TestDetect::Wrap::UNDERFLOW, status);
+    else if (next > base + std::numeric_limits<TestUInt>::max())
+        EXPECT_EQ(TestDetect::Wrap::OVERFLOW, status);
+    else EXPECT_EQ(TestDetect::Wrap::NORMAL, status);
+}
+
+// Test this utility on every combination of prior and update value for the
+// type uint8_t, with an unsigned containing type.
+TEST(wraparound_tests, cover_u8_u64) {
+    using TType = uint8_t;
+    using CType = uint64_t;
+    static constexpr CType max = std::numeric_limits<TType>::max();
+    for (CType i = 0; i <= max; i++) {
+        for (CType j = 0; j <= max; j++) {
+            testPair<TType, CType>(i, static_cast<int64_t>(j));
+        }
+    }
+}
+
+// Test this utility on every combination of prior and update value for the
+// type uint8_t, with a signed containing type.
+TEST(wraparound_tests, cover_u8_s64) {
+    using TType = uint8_t;
+    using CType = int64_t;
+    static constexpr CType max = std::numeric_limits<TType>::max();
+    for (CType i = 0; i <= max; i++) {
+        for (CType j = 0; j <= max; j++) {
+            testPair<TType, CType>(i, static_cast<int64_t>(j));
+        }
+    }
+}
diff --git a/media/utils/tests/inplace_function_tests.cpp b/media/utils/tests/inplace_function_tests.cpp
new file mode 100644
index 0000000..6172aa4
--- /dev/null
+++ b/media/utils/tests/inplace_function_tests.cpp
@@ -0,0 +1,493 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "inplace_function_tests"
+
+#include <mediautils/InPlaceFunction.h>
+
+#include <type_traits>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+struct BigCallable {
+    BigCallable(size_t* x, size_t val1, size_t val2) : ptr(x), a(val1), b(val2) {}
+    size_t* ptr;
+    size_t a;
+    size_t b;
+    size_t operator()(size_t input) const {
+        *ptr += a * 100 + b * 10 + input;
+        return 8;
+    }
+};
+
+TEST(InPlaceFunctionTests, Basic) {
+    size_t x = 5;
+    InPlaceFunction<size_t(size_t)> func;
+    {
+        BigCallable test{&x, 2, 3};
+        func = test;
+    }
+    EXPECT_EQ(func(2), 8ull);
+    EXPECT_EQ(x, 232ull + 5);
+}
+
+TEST(InPlaceFunctionTests, Invalid) {
+    InPlaceFunction<size_t(size_t)> func;
+    EXPECT_TRUE(!func);
+    InPlaceFunction<size_t(size_t)> func2{nullptr};
+    EXPECT_TRUE(!func2);
+    InPlaceFunction<size_t(size_t)> func3 = [](size_t x) { return x; };
+    EXPECT_TRUE(!(!func3));
+    func3 = nullptr;
+    EXPECT_TRUE(!func3);
+}
+
+TEST(InPlaceFunctionTests, MultiArg) {
+    InPlaceFunction<size_t(size_t, size_t, size_t)> func = [](size_t a, size_t b, size_t c) {
+        return a + b + c;
+    };
+    EXPECT_EQ(func(2, 3, 5), 2ull + 3 + 5);
+}
+struct Record {
+    Record(size_t m, size_t c, size_t d) : move_called(m), copy_called(c), dtor_called(d) {}
+    Record() {}
+    size_t move_called = 0;
+    size_t copy_called = 0;
+    size_t dtor_called = 0;
+    friend std::ostream& operator<<(std::ostream& os, const Record& record) {
+        return os << "Record, moves: " << record.move_called << ", copies: " << record.copy_called
+                  << ", dtor: " << record.dtor_called << '\n';
+    }
+};
+
+bool operator==(const Record& lhs, const Record& rhs) {
+    return lhs.move_called == rhs.move_called && lhs.copy_called == rhs.copy_called &&
+           lhs.dtor_called == rhs.dtor_called;
+}
+
+struct Noisy {
+    Record& ref;
+    size_t state;
+    Noisy(Record& record, size_t val) : ref(record), state(val) {}
+    Noisy(const Noisy& other) : ref(other.ref), state(other.state) { ref.copy_called++; }
+
+    Noisy(Noisy&& other) : ref(other.ref), state(other.state) { ref.move_called++; }
+    ~Noisy() { ref.dtor_called++; }
+
+    size_t operator()() { return state; }
+};
+
+TEST(InPlaceFunctionTests, CtorForwarding) {
+    Record record;
+    Noisy noisy{record, 17};
+    InPlaceFunction<size_t()> func{noisy};
+    EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+    EXPECT_EQ(func(), 17ull);
+    Record record2;
+    Noisy noisy2{record2, 13};
+    InPlaceFunction<size_t()> func2{std::move(noisy2)};
+    EXPECT_EQ(record2, Record(1, 0, 0));  // move, copy, dtor
+    EXPECT_EQ(func2(), 13ull);
+}
+
+TEST(InPlaceFunctionTests, FunctionCtorForwarding) {
+    {
+        Record record;
+        Noisy noisy{record, 17};
+        InPlaceFunction<size_t()> func{noisy};
+        EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+        EXPECT_EQ(func(), 17ull);
+        InPlaceFunction<size_t()> func2{func};
+        EXPECT_EQ(record, Record(0, 2, 0));  // move, copy, dtor
+        EXPECT_EQ(func2(), 17ull);
+    }
+    Record record;
+    Noisy noisy{record, 13};
+    InPlaceFunction<size_t()> func{noisy};
+    EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+    EXPECT_EQ(func(), 13ull);
+    InPlaceFunction<size_t()> func2{std::move(func)};
+    EXPECT_EQ(record, Record(1, 1, 0));  // move, copy, dtor
+    EXPECT_EQ(func2(), 13ull);
+    // We expect moved from functions to still be valid
+    EXPECT_TRUE(!(!func));
+    EXPECT_EQ(static_cast<bool>(func), static_cast<bool>(func2));
+    EXPECT_EQ(func(), 13ull);
+}
+
+TEST(InPlaceFunctionTests, Dtor) {
+    Record record;
+    {
+        InPlaceFunction<size_t()> func;
+        {
+            Noisy noisy{record, 17};
+            func = noisy;
+        }
+        EXPECT_EQ(func(), 17ull);
+        EXPECT_EQ(record.dtor_called, 1ull);
+    }
+    EXPECT_EQ(record.dtor_called, 2ull);
+}
+
+TEST(InPlaceFunctionTests, Assignment) {
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 5};
+        InPlaceFunction<size_t()> func{noisy};
+        EXPECT_EQ(func(), 17ull);
+        EXPECT_EQ(record.dtor_called, 0ull);
+        func = noisy2;
+        EXPECT_EQ(record.dtor_called, 1ull);
+        EXPECT_EQ(record2, Record(0, 1, 0));  // move, copy, dtor
+        EXPECT_EQ(func(), 5ull);
+    }
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 5};
+        InPlaceFunction<size_t()> func{noisy};
+        EXPECT_EQ(func(), 17ull);
+        EXPECT_EQ(record.dtor_called, 0ull);
+        func = std::move(noisy2);
+        EXPECT_EQ(record.dtor_called, 1ull);
+        EXPECT_EQ(record2, Record(1, 0, 0));  // move, copy, dtor
+        EXPECT_EQ(func(), 5ull);
+    }
+
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 13};
+        {
+            InPlaceFunction<size_t()> func{noisy};
+            EXPECT_EQ(func(), 17ull);
+            InPlaceFunction<size_t()> func2{noisy2};
+            EXPECT_EQ(record2, Record(0, 1, 0));  // move, copy, dtor
+            EXPECT_EQ(record.dtor_called, 0ull);
+            func = func2;
+            EXPECT_EQ(record.dtor_called, 1ull);
+            EXPECT_EQ(func(), 13ull);
+            EXPECT_EQ(record2, Record(0, 2, 0));  // move, copy, dtor
+            EXPECT_TRUE(static_cast<bool>(func2));
+            EXPECT_EQ(func2(), 13ull);
+        }
+        EXPECT_EQ(record2, Record(0, 2, 2));  // move, copy, dtor
+    }
+
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 13};
+        {
+            InPlaceFunction<size_t()> func{noisy};
+            EXPECT_EQ(func(), 17ull);
+            InPlaceFunction<size_t()> func2{noisy2};
+            EXPECT_EQ(record.dtor_called, 0ull);
+            EXPECT_EQ(record2, Record(0, 1, 0));  // move, copy, dtor
+            func = std::move(func2);
+            EXPECT_EQ(record.dtor_called, 1ull);
+            EXPECT_EQ(func(), 13ull);
+            EXPECT_EQ(record2, Record(1, 1, 0));  // move, copy, dtor
+            // Moved from function is still valid
+            EXPECT_TRUE(static_cast<bool>(func2));
+            EXPECT_EQ(func2(), 13ull);
+        }
+        EXPECT_EQ(record2, Record(1, 1, 2));  // move, copy, dtor
+    }
+}
+
+TEST(InPlaceFunctionTests, Swap) {
+    Record record1;
+    Record record2;
+    InPlaceFunction<size_t()> func1 = Noisy{record1, 5};
+    InPlaceFunction<size_t()> func2 = Noisy{record2, 7};
+    EXPECT_EQ(record1, Record(1, 0, 1));  // move, copy, dtor
+    EXPECT_EQ(record2, Record(1, 0, 1));  // move, copy, dtor
+    EXPECT_EQ(func1(), 5ull);
+    EXPECT_EQ(func2(), 7ull);
+    func1.swap(func2);
+    EXPECT_EQ(record1, Record(2, 0, 2));  // move, copy, dtor
+    // An additional move and destroy into the temporary object
+    EXPECT_EQ(record2, Record(3, 0, 3));  // move, copy, dtor
+    EXPECT_EQ(func1(), 7ull);
+    EXPECT_EQ(func2(), 5ull);
+}
+
+TEST(InPlaceFunctionTests, Conversion) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 16> func2 = noisy;
+        EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+        {
+            InPlaceFunction<size_t(), 32> func{func2};
+            EXPECT_EQ(record, Record(0, 2, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(0, 2, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(0, 2, 2));  // move, copy, dtor
+}
+
+TEST(InPlaceFunctionTests, ConversionMove) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 16> func2 = noisy;
+        EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+        {
+            InPlaceFunction<size_t(), 32> func{std::move(func2)};
+            EXPECT_EQ(record, Record(1, 1, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(1, 1, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(1, 1, 2));  // move, copy, dtor
+}
+
+TEST(InPlaceFunctionTests, ConversionAssign) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 32> func;
+        {
+            InPlaceFunction<size_t(), 16> func2 = noisy;
+            EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+            func = func2;
+            EXPECT_EQ(record, Record(0, 2, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(0, 2, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(0, 2, 2));  // move, copy, dtor
+}
+
+TEST(InPlaceFunctionTests, ConversionAssignMove) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 32> func;
+        {
+            InPlaceFunction<size_t(), 16> func2 = noisy;
+            EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+            func = std::move(func2);
+            EXPECT_EQ(record, Record(1, 1, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(1, 1, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(1, 1, 2));  // move, copy, dtor
+}
+
+struct NoMoveCopy {
+    NoMoveCopy() = default;
+    NoMoveCopy(const NoMoveCopy&) = delete;
+    NoMoveCopy(NoMoveCopy&&) = delete;
+};
+struct TestCallable {
+    NoMoveCopy& operator()(NoMoveCopy& x) { return x; }
+};
+
+TEST(InPlaceFunctionTests, ArgumentForwarding) {
+    const auto lambd = [](NoMoveCopy& x) -> NoMoveCopy& { return x; };
+    InPlaceFunction<NoMoveCopy&(NoMoveCopy&)> func = lambd;
+    const auto lambd2 = [](NoMoveCopy&& x) -> NoMoveCopy&& { return std::move(x); };
+    InPlaceFunction<NoMoveCopy && (NoMoveCopy &&)> func2 = lambd2;
+    auto lvalue = NoMoveCopy{};
+    func(lvalue);
+    func2(NoMoveCopy{});
+    InPlaceFunction<void(NoMoveCopy&)> func3 = [](const NoMoveCopy&) {};
+    func3(lvalue);
+    InPlaceFunction<void(NoMoveCopy &&)> func4 = [](const NoMoveCopy&) {};
+    func4(std::move(lvalue));
+    InPlaceFunction<void(const NoMoveCopy&)> func5 = [](const NoMoveCopy&) {};
+    func5(lvalue);
+    InPlaceFunction<void(const NoMoveCopy&&)> func6 = [](const NoMoveCopy&) {};
+    func6(std::move(lvalue));
+    InPlaceFunction<void(const NoMoveCopy&&)> func7 = [](const NoMoveCopy&&) {};
+    func7(std::move(lvalue));
+    InPlaceFunction<void(NoMoveCopy &&)> func8 = [](const NoMoveCopy&&) {};
+    func8(std::move(lvalue));
+
+    {
+        Record record;
+        Noisy noisy{record, 5};
+        const auto lambd3 = [](Noisy) {};
+        InPlaceFunction<void(Noisy)> func3{lambd3};
+        EXPECT_EQ(record, Record(0, 0, 0));  // move, copy, dtor
+        func3(std::move(noisy));
+        EXPECT_EQ(record, Record(2, 0, 2));  // move, copy, dtor
+    }
+
+    {
+        Record record;
+        Noisy noisy{record, 5};
+        const auto lambd3 = [](Noisy) {};
+        InPlaceFunction<void(Noisy)> func3{lambd3};
+        EXPECT_EQ(record, Record(0, 0, 0));  // move, copy, dtor
+        func3(noisy);
+        EXPECT_EQ(record, Record(1, 1, 2));  // move, copy, dtor
+    }
+}
+
+TEST(InPlaceFunctionTests, VoidFunction) {
+    InPlaceFunction<void(size_t)> func = [](size_t x) -> size_t { return x; };
+    func(5);
+    InPlaceFunction<void(void)> func2 = []() -> size_t { return 5; };
+    func2();
+}
+NoMoveCopy foo() {
+    return NoMoveCopy();
+}
+struct Test {
+    NoMoveCopy operator()() { return NoMoveCopy{}; }
+};
+
+TEST(InPlaceFunctionTests, FullElision) {
+    InPlaceFunction<NoMoveCopy()> func = foo;
+}
+
+TEST(InPlaceFunctionTests, ReturnConversion) {
+    const auto lambd = [](int&& x) -> int&& { return std::move(x); };
+    InPlaceFunction<int && (int&& x)> func = lambd;
+    func(5);
+    InPlaceFunction<void(int)> func3 = [](double) {};
+    func3(5);
+    InPlaceFunction<double()> func4 = []() -> int { return 5; };
+    func4();
+}
+
+struct Overloaded {
+    int operator()() & { return 2; }
+    int operator()() const& { return 3; }
+    int operator()() && { return 4; }
+    int operator()() const&& { return 5; }
+};
+
+TEST(InPlaceFunctionTests, OverloadResolution) {
+    InPlaceFunction<int()> func = Overloaded{};
+    EXPECT_EQ(func(), 2);
+    EXPECT_EQ(std::move(func()), 2);
+}
+
+template <class T, class U, class = void>
+struct can_assign : std::false_type {};
+
+template <class T, class U>
+struct can_assign<T, U, typename std::void_t<decltype(T().operator=(U()))>> : std::true_type {};
+
+template <class From, class To, bool Expected>
+static constexpr bool Convertible =
+        (can_assign<To, From>::value ==
+         std::is_constructible_v<To, From>)&&(std::is_constructible_v<To, From> == Expected);
+
+struct TooBig {
+    std::array<uint64_t, 5> big = {1, 2, 3, 4, 5};
+    size_t operator()() { return static_cast<size_t>(big[0] + big[1] + big[2] + big[3] + big[4]); }
+};
+static_assert(sizeof(TooBig) == 40);
+struct NotCallable {};
+struct WrongArg {
+    void operator()(NotCallable) {}
+};
+struct WrongRet {
+    NotCallable operator()(size_t) { return NotCallable{}; }
+};
+
+static_assert(Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(), 32>, true>);
+static_assert(
+        Convertible<InPlaceFunction<size_t(size_t), 32>, InPlaceFunction<size_t(), 32>, false>);
+static_assert(Convertible<InPlaceFunction<void(), 32>, InPlaceFunction<size_t(), 32>, false>);
+static_assert(Convertible<TooBig, InPlaceFunction<size_t(), 32>, false>);
+static_assert(Convertible<TooBig, InPlaceFunction<size_t(), 40>, true>);
+static_assert(Convertible<NotCallable, InPlaceFunction<size_t(), 40>, false>);
+static_assert(Convertible<WrongArg, InPlaceFunction<void(size_t), 40>, false>);
+static_assert(Convertible<WrongRet, InPlaceFunction<size_t(size_t), 40>, false>);
+// Void returning functions are modelled by any return type
+static_assert(Convertible<WrongRet, InPlaceFunction<void(size_t), 40>, true>);
+
+// Check constructibility/assignability from smaller function types
+static_assert(Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(), 24>, false>);
+static_assert(Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(), 40>, true>);
+static_assert(
+        Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(size_t), 40>, false>);
+static_assert(
+        Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<NotCallable(), 40>, false>);
+
+struct BadLambd {
+    int operator()(int&& x) { return std::move(x); }
+};
+
+static_assert(Convertible<BadLambd, InPlaceFunction<int(int&&), 32>, true>);
+static_assert(Convertible<BadLambd, InPlaceFunction<int&(int&&), 32>, false>);
+static_assert(Convertible<BadLambd, InPlaceFunction<const int&(int&&), 32>, false>);
+static_assert(Convertible<BadLambd, InPlaceFunction<int && (int&&), 32>, false>);
+static_assert(Convertible<BadLambd, InPlaceFunction<const int && (int&&), 32>, false>);
+
+struct Base {};
+struct Derived : Base {};
+struct Converted {
+    Converted(const Derived&) {}
+};
+
+struct ConvertCallable {
+    Derived operator()() { return Derived{}; }
+    Derived& operator()(Derived& x) { return x; }
+    Derived&& operator()(Derived&& x) { return std::move(x); }
+    const Derived& operator()(const Derived& x) { return x; }
+    const Derived&& operator()(const Derived&& x) { return std::move(x); }
+};
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived()>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base()>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Converted()>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Converted&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Converted && ()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Converted&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Converted && ()>, false>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived&(Derived&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base&(Derived&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived && (Derived &&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base && (Derived &&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived&(const Derived&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base&(const Derived&)>, true>);
+
+static_assert(
+        Convertible<ConvertCallable, InPlaceFunction<const Derived && (const Derived&&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base && (const Derived&&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived&(Derived&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base&(Derived&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived && (Derived &&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base && (Derived &&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived&(Derived&&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base&(Derived&&)>, true>);
diff --git a/media/utils/tests/library_tests.cpp b/media/utils/tests/library_tests.cpp
index c5c500c..f15f7f9 100644
--- a/media/utils/tests/library_tests.cpp
+++ b/media/utils/tests/library_tests.cpp
@@ -26,8 +26,9 @@
 
 namespace {
 
-static int32_t here = 0;  // accessed on same thread.
+[[maybe_unused]] static int32_t here = 0;  // accessed on same thread.
 
+#if __android__
 TEST(library_tests, basic) {
     std::string path = android::base::GetExecutableDirectory() + "/libsharedtest.so";
     // The flags to loadLibrary should not include  RTLD_GLOBAL or RTLD_NODELETE
@@ -64,6 +65,7 @@
     // will prevent unloading libraries.
     ASSERT_EQ(1, here);
 }
+#endif
 
 TEST(library_tests, sad_library) {
     std::string path = android::base::GetExecutableDirectory()
diff --git a/media/utils/tests/media_process_tests.cpp b/media/utils/tests/media_process_tests.cpp
index 2ae3f70..391c6a7 100644
--- a/media/utils/tests/media_process_tests.cpp
+++ b/media/utils/tests/media_process_tests.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <mediautils/Process.h>
+#include <mediautils/TidWrapper.h>
 
 #define LOG_TAG "media_process_tests"
 
@@ -24,8 +25,16 @@
 using namespace android;
 using namespace android::mediautils;
 
+// Disables false-positives from base::Split()
+//
+// See mismatched sanitized libraries here:
+// https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
+extern "C" const char* __asan_default_options() {
+  return "detect_container_overflow=0";
+}
+
 TEST(media_process_tests, basic) {
-  const std::string schedString = getThreadSchedAsString(gettid());
+  const std::string schedString = getThreadSchedAsString(getThreadIdWrapper());
 
   (void)schedString;
   // We don't test schedString, only that we haven't crashed.
diff --git a/media/utils/tests/media_threadsnapshot_tests.cpp b/media/utils/tests/media_threadsnapshot_tests.cpp
index c7a45e2..57cf698 100644
--- a/media/utils/tests/media_threadsnapshot_tests.cpp
+++ b/media/utils/tests/media_threadsnapshot_tests.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <mediautils/ThreadSnapshot.h>
+#include <mediautils/TidWrapper.h>
 
 #define LOG_TAG "media_threadsnapshot_tests"
 
@@ -27,10 +28,18 @@
 using namespace android;
 using namespace android::mediautils;
 
+// Disables false-positives from base::Split()
+//
+// See mismatched sanitized libraries here:
+// https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
+extern "C" const char* __asan_default_options() {
+  return "detect_container_overflow=0";
+}
+
 TEST(media_threadsnapshot_tests, basic) {
   using namespace std::chrono_literals;
 
-  ThreadSnapshot threadSnapshot(gettid());
+  ThreadSnapshot threadSnapshot(getThreadIdWrapper());
 
   threadSnapshot.onBegin();
 
diff --git a/media/utils/memory-test.cpp b/media/utils/tests/memory-test.cpp
similarity index 100%
rename from media/utils/memory-test.cpp
rename to media/utils/tests/memory-test.cpp
diff --git a/media/utils/tests/shared_memory_allocator_tests.cpp b/media/utils/tests/shared_memory_allocator_tests.cpp
new file mode 100644
index 0000000..11bc72a
--- /dev/null
+++ b/media/utils/tests/shared_memory_allocator_tests.cpp
@@ -0,0 +1,350 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "shared_memory_allocator_tests"
+
+#include <gtest/gtest.h>
+#include <mediautils/SharedMemoryAllocator.h>
+#include <sys/stat.h>
+#include <utils/Log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+namespace {
+void validate_block(const AllocationType& block) {
+    ASSERT_TRUE(block != nullptr);
+    memset(block->unsecurePointer(), 10, 4096);
+    EXPECT_EQ(*(static_cast<char*>(block->unsecurePointer()) + 100), static_cast<char>(10));
+}
+
+template <size_t N = 0, bool FatalOwn = true>
+struct ValidateForwarding {
+    static constexpr size_t alignment() { return 1337; }
+
+    bool owns(const AllocationType& allocation) const {
+        if (allocation == owned) return true;
+        if constexpr (FatalOwn) {
+            LOG_ALWAYS_FATAL_IF(allocation != not_owned, "Invalid allocation passed to allocator");
+        }
+        return false;
+    }
+
+    void deallocate_all() { deallocate_all_count++; }
+    std::string dump() const { return dump_string; }
+
+    static inline size_t deallocate_all_count = 0;
+    static inline const AllocationType owned =
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+    static inline const AllocationType not_owned =
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+    static inline const std::string dump_string = std::to_string(N) + "Test Dump Forwarding";
+};
+
+};  // namespace
+static_assert(shared_allocator_impl::has_owns<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_dump<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_deallocate_all<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_owns<SnoopingAllocator<MemoryHeapBaseAllocator>> == true);
+static_assert(shared_allocator_impl::has_dump<SnoopingAllocator<MemoryHeapBaseAllocator>> == true);
+static_assert(
+        shared_allocator_impl::has_deallocate_all<SnoopingAllocator<MemoryHeapBaseAllocator>> ==
+        true);
+static_assert(
+        shared_allocator_impl::has_owns<
+                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+        true);
+static_assert(
+        shared_allocator_impl::has_dump<
+                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+        true);
+static_assert(
+        shared_allocator_impl::has_deallocate_all<
+                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+        true);
+static_assert(shared_allocator_impl::has_owns<
+                      FallbackAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+                                        SnoopingAllocator<MemoryHeapBaseAllocator>>> == true);
+
+TEST(shared_memory_allocator_tests, roundup) {
+    using namespace shared_allocator_impl;
+    EXPECT_EQ(roundup(1023, 1024), 1024ul);
+    EXPECT_EQ(roundup(1024, 1024), 1024ul);
+    EXPECT_EQ(roundup(1025, 1024), 2048ul);
+    EXPECT_DEATH(roundup(1023, 1023), "");
+    EXPECT_DEATH(roundup(1023, 0), "");
+}
+
+TEST(shared_memory_allocator_tests, mheapbase_allocator) {
+    MemoryHeapBaseAllocator allocator;
+    const auto memory = allocator.allocate(BasicAllocRequest{500});
+    ASSERT_TRUE(memory != nullptr);
+    const auto fd = dup(memory->getMemory()->getHeapID());
+    EXPECT_EQ(memory->size(), static_cast<unsigned>(4096));
+    EXPECT_EQ(memory->size(), memory->getMemory()->getSize());
+    validate_block(memory);
+    allocator.deallocate(memory);
+    // Ensures we have closed the fd
+    EXPECT_EQ(memory->unsecurePointer(), nullptr);
+    EXPECT_EQ(memory->getMemory()->getBase(), nullptr);
+    struct stat st;
+    const auto err = fstat(fd, &st);
+    EXPECT_EQ(err, 0);
+    // Ensure we reclaim pages (overly-zealous)
+    EXPECT_EQ(st.st_size, 0);
+}
+
+TEST(shared_memory_allocator_tests, mheapbase_allocator_independence) {
+    static_assert(MemoryHeapBaseAllocator::alignment() == 4096);
+    MemoryHeapBaseAllocator allocator;
+    const auto first_memory = allocator.allocate(BasicAllocRequest{500});
+    const auto second_memory = allocator.allocate(BasicAllocRequest{500});
+    ASSERT_TRUE(first_memory != nullptr && second_memory != nullptr);
+    EXPECT_NE(first_memory->getMemory()->getHeapID(), second_memory->getMemory()->getHeapID());
+    allocator.deallocate(first_memory);
+    validate_block(second_memory);
+    allocator.deallocate(second_memory);
+}
+
+TEST(shared_memory_allocator_tests, snooping_allocator) {
+    static_assert(SnoopingAllocator<ValidateForwarding<0>>::alignment() ==
+                  ValidateForwarding<0>::alignment());
+
+    SnoopingAllocator<MemoryHeapBaseAllocator> allocator{"allocator"};
+    const auto first_memory = allocator.allocate(NamedAllocRequest{{500}, "allocate_1"});
+    auto second_memory = first_memory;
+    {
+        const auto tmp = allocator.allocate(NamedAllocRequest{{5000}, "allocate_2"});
+        // Test copying handle around
+        second_memory = tmp;
+    }
+    ASSERT_TRUE(first_memory && second_memory);
+    EXPECT_TRUE(allocator.owns(first_memory) && allocator.owns(second_memory));
+    const auto first_allocations = allocator.getAllocations();
+    EXPECT_EQ(first_allocations.size(), 2ull);
+    for (const auto& [key, val] : allocator.getAllocations()) {
+        if (val.allocation_number == 0) {
+            EXPECT_EQ(val.name, "allocate_1");
+            EXPECT_TRUE(first_memory == key);
+        }
+        if (val.allocation_number == 1) {
+            EXPECT_EQ(val.name, "allocate_2");
+            EXPECT_TRUE(second_memory == key);
+        }
+    }
+    // TODO test dump and deallocate forwarding
+    // EXPECT_EQ(allocator.dump(), std::string{});
+    validate_block(second_memory);
+    allocator.deallocate(second_memory);
+    EXPECT_EQ(second_memory->unsecurePointer(), nullptr);
+    EXPECT_FALSE(allocator.owns(second_memory));
+    EXPECT_TRUE(allocator.owns(first_memory));
+    const auto second_allocations = allocator.getAllocations();
+    EXPECT_EQ(second_allocations.size(), 1ul);
+    for (const auto& [key, val] : second_allocations) {
+        EXPECT_EQ(val.name, "allocate_1");
+        EXPECT_TRUE(first_memory == key);
+    }
+    // EXPECT_EQ(allocator.dump(), std::string{});
+    // TODO test deallocate_all O(1)
+}
+
+// TODO generic policy test
+TEST(shared_memory_allocator_tests, size_policy_allocator_enforcement) {
+    PolicyAllocator allocator{MemoryHeapBaseAllocator{},
+                              SizePolicy<4096 * 7, 4096 * 2, 4096 * 4>{}};
+    // Violate max size
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 5}) == nullptr);
+    // Violate min alloc size
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096}) == nullptr);
+    const auto first_memory = allocator.allocate(BasicAllocRequest{4096 * 4});
+    validate_block(first_memory);
+    // Violate pool size
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 4}) == nullptr);
+    const auto second_memory = allocator.allocate(BasicAllocRequest{4096 * 3});
+    validate_block(second_memory);
+    allocator.deallocate(second_memory);
+    // Check pool size update after deallocation
+    const auto new_second_memory = allocator.allocate(BasicAllocRequest{4096 * 2});
+    validate_block(new_second_memory);
+}
+
+TEST(shared_memory_allocator_tests, indirect_allocator) {
+    static_assert(IndirectAllocator<ValidateForwarding<0>>::alignment() ==
+                  ValidateForwarding<0>::alignment());
+    const auto allocator_handle = std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>();
+    IndirectAllocator allocator{allocator_handle};
+    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    EXPECT_TRUE(allocator_handle->owns(memory));
+    EXPECT_TRUE(allocator_handle->getAllocations().size() == 1);
+    allocator.deallocate(memory);
+    EXPECT_FALSE(allocator_handle->owns(memory));
+    EXPECT_TRUE(allocator_handle->getAllocations().size() == 0);
+}
+
+TEST(shared_memory_allocator_tests, policy_allocator_forwarding) {
+    // Test appropriate forwarding of allocator, deallocate
+    const auto primary_allocator =
+            std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("allocator");
+    PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<4096>{}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    EXPECT_TRUE(primary_allocator->owns(memory));
+    const auto& allocations = primary_allocator->getAllocations();
+    EXPECT_TRUE(allocations.size() == 1);
+    allocator.deallocate(memory);
+    EXPECT_TRUE(allocations.size() == 0);
+    const auto memory2 = allocator.allocate(NamedAllocRequest{{4096}, "allocation_2"});
+    EXPECT_TRUE(allocations.size() == 1);
+    EXPECT_TRUE(primary_allocator->owns(memory2));
+    allocator.deallocate(memory2);
+    EXPECT_FALSE(primary_allocator->owns(memory2));
+    EXPECT_TRUE(allocations.size() == 0);
+    // Test appropriate forwarding of own, dump, alignment, deallocate_all
+    PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<4096>{}};
+    EXPECT_TRUE(allocator2.owns(ValidateForwarding<0>::owned));
+    EXPECT_FALSE(allocator2.owns(ValidateForwarding<0>::not_owned));
+    EXPECT_TRUE(allocator2.dump().find(ValidateForwarding<0>::dump_string) != std::string::npos);
+    static_assert(decltype(allocator2)::alignment() == ValidateForwarding<0>::alignment());
+    size_t prev = ValidateForwarding<0>::deallocate_all_count;
+    allocator2.deallocate_all();
+    EXPECT_EQ(ValidateForwarding<0>::deallocate_all_count, prev + 1);
+}
+
+TEST(shared_memory_allocator_tests, snooping_allocator_nullptr) {
+    SnoopingAllocator allocator{PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<4096 * 2>{}}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    validate_block(memory);
+    ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{5000}, "allocation_2"}) == nullptr);
+    const auto& allocations = allocator.getAllocations();
+    EXPECT_EQ(allocations.size(), 1ul);
+    for (const auto& [key, val] : allocations) {
+        EXPECT_EQ(val.name, "allocation_1");
+        EXPECT_EQ(val.allocation_number, 0ul);
+        EXPECT_TRUE(key == memory);
+    }
+}
+
+TEST(shared_memory_allocator_tests, fallback_allocator) {
+    // Construct Fallback Allocator
+    const auto primary_allocator = std::make_shared<
+            SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>>>(
+            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>{}, "primary_allocator");
+    const auto secondary_allocator =
+            std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("secondary_allocator");
+
+    FallbackAllocator fallback_allocator{SnoopingAllocator{IndirectAllocator{primary_allocator}},
+                                         SnoopingAllocator{IndirectAllocator{secondary_allocator}}};
+    static_assert(decltype(fallback_allocator)::alignment() == 4096);
+    // Basic Allocation Test
+    const auto memory = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    validate_block(memory);
+    // Correct allocator selected
+    EXPECT_TRUE(fallback_allocator.owns(memory));
+    EXPECT_TRUE(primary_allocator->owns(memory));
+    EXPECT_FALSE(secondary_allocator->owns(memory));
+    // Test fallback allocation
+    const auto memory2 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_2"});
+    validate_block(memory2);
+    // Correct allocator selected
+    EXPECT_TRUE(fallback_allocator.owns(memory2));
+    EXPECT_FALSE(primary_allocator->owns(memory2));
+    EXPECT_TRUE(secondary_allocator->owns(memory2));
+    // Allocations ended up in the correct allocators
+    const auto& primary_allocations = primary_allocator->getAllocations();
+    EXPECT_TRUE(primary_allocations.size() == 1ul);
+    ASSERT_TRUE(primary_allocations.find(memory) != primary_allocations.end());
+    EXPECT_EQ(primary_allocations.find(memory)->second.name, std::string{"allocation_1"});
+    const auto& secondary_allocations = secondary_allocator->getAllocations();
+    EXPECT_TRUE(secondary_allocations.size() == 1ul);
+    ASSERT_TRUE(secondary_allocations.find(memory2) != secondary_allocations.end());
+    EXPECT_EQ(secondary_allocations.find(memory2)->second.name, std::string{"allocation_2"});
+    // Test deallocate appropriate forwarding
+    fallback_allocator.deallocate(memory);
+    EXPECT_TRUE(primary_allocator->getAllocations().size() == 0ul);
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+    // Appropriate fallback after deallocation
+    const auto memory3 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_3"});
+    EXPECT_TRUE(fallback_allocator.owns(memory3));
+    EXPECT_TRUE(primary_allocator->owns(memory3));
+    EXPECT_FALSE(secondary_allocator->owns(memory3));
+    EXPECT_TRUE(primary_allocator->getAllocations().size() == 1ul);
+    // Test deallocate appropriate forwarding
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+    fallback_allocator.deallocate(memory2);
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 0ul);
+    const auto memory4 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_4"});
+    EXPECT_TRUE(fallback_allocator.owns(memory4));
+    EXPECT_FALSE(primary_allocator->owns(memory4));
+    EXPECT_TRUE(secondary_allocator->owns(memory4));
+    // Allocations ended up in the correct allocators
+    EXPECT_TRUE(primary_allocator->getAllocations().size() == 1ul);
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+    ASSERT_TRUE(primary_allocations.find(memory3) != primary_allocations.end());
+    EXPECT_EQ(primary_allocations.find(memory3)->second.name, std::string{"allocation_3"});
+    ASSERT_TRUE(secondary_allocations.find(memory4) != secondary_allocations.end());
+    EXPECT_EQ(secondary_allocations.find(memory4)->second.name, std::string{"allocation_4"});
+}
+
+TEST(shared_memory_allocator_tests, fallback_allocator_forwarding) {
+    // Test forwarding
+    using Alloc1 = ValidateForwarding<0, false>;
+    using Alloc2 = ValidateForwarding<1, false>;
+    FallbackAllocator forward_test{Alloc1{}, Alloc2{}};
+    EXPECT_TRUE(forward_test.dump().find(Alloc1::dump_string) != std::string::npos);
+    EXPECT_TRUE(forward_test.dump().find(Alloc2::dump_string) != std::string::npos);
+    // Test owned forwarding
+    EXPECT_TRUE(forward_test.owns(Alloc1::owned));
+    EXPECT_TRUE(forward_test.owns(Alloc2::owned));
+    EXPECT_FALSE(forward_test.owns(Alloc1::not_owned));
+    EXPECT_FALSE(forward_test.owns(Alloc2::not_owned));
+    // Test alignment forwarding
+    static_assert(FallbackAllocator<Alloc1, Alloc2>::alignment() == Alloc1::alignment());
+    // Test deallocate_all forwarding
+    size_t prev1 = Alloc1::deallocate_all_count;
+    size_t prev2 = Alloc2::deallocate_all_count;
+    forward_test.deallocate_all();
+    EXPECT_EQ(prev1 + 1, Alloc1::deallocate_all_count);
+    EXPECT_EQ(prev2 + 1, Alloc2::deallocate_all_count);
+}
+
+TEST(shared_memory_allocator_tests, scoped_allocator) {
+    const auto underlying_allocator =
+            std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("Allocator");
+    ScopedAllocator allocator{underlying_allocator};
+    const auto& allocations = underlying_allocator->getAllocations();
+    {
+        decltype(allocator.allocate(NamedAllocRequest{})) copy;
+        {
+            EXPECT_EQ(allocations.size(), 0ul);
+            const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+            copy = memory;
+            EXPECT_EQ(allocations.size(), 1ul);
+            EXPECT_TRUE(allocator.owns(copy));
+            EXPECT_TRUE(allocator.owns(memory));
+        }
+        EXPECT_TRUE(allocator.owns(copy));
+        EXPECT_EQ(allocations.size(), 1ul);
+        for (const auto& [key, value] : allocations) {
+            EXPECT_EQ(value.name, std::string{"allocation_1"});
+        }
+    }
+    EXPECT_EQ(allocations.size(), 0ul);
+    // Test forwarding
+    static_assert(ScopedAllocator<ValidateForwarding<0>>::alignment() ==
+                  ValidateForwarding<0>::alignment());
+    ScopedAllocator<ValidateForwarding<0>> forwarding{};
+    EXPECT_EQ(forwarding.dump(), ValidateForwarding<0>::dump_string);
+}
diff --git a/media/utils/tests/static_string_view_tests.cpp b/media/utils/tests/static_string_view_tests.cpp
new file mode 100644
index 0000000..c00de68
--- /dev/null
+++ b/media/utils/tests/static_string_view_tests.cpp
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "StaticStringViewTests"
+
+#include <mediautils/StaticStringView.h>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android::mediautils;
+
+template <auto& T, class = void>
+struct CanCreate : std::false_type {};
+
+template <auto& T>
+struct CanCreate<T, typename std::void_t<decltype(StaticStringView::create<T>)>> : std::true_type {
+};
+
+static constexpr std::array<char, 2> global = {'a', 'b'};
+
+TEST(StaticStringViewTests, CreateTicket) {
+    // This will always fail due to template param binding rules
+    // const std::array<char,2> nonstatic = {'a', 'b'};
+    // static_assert(can_assign<nonstatic>::value == false);
+    static std::array<char, 2> nonconst = {'a', 'b'};
+    static const std::array<char, 2> nonconstexpr = {'a', 'b'};
+    static constexpr std::array<int, 2> nonchar = {1, 2};
+    static constexpr size_t nonarray = 2;
+
+    static_assert(CanCreate<nonconst>::value == false);
+    static_assert(CanCreate<nonarray>::value == false);
+    static_assert(CanCreate<nonchar>::value == false);
+    static_assert(CanCreate<nonconstexpr>::value == false);
+
+    static constexpr std::array<char, 2> scoped = {'a', 'b'};
+    constexpr StaticStringView Ticket1 = StaticStringView::create<global>();
+    constexpr StaticStringView Ticket2 = StaticStringView::create<scoped>();
+    const StaticStringView Ticket3 = StaticStringView::create<scoped>();
+    EXPECT_EQ(Ticket3, Ticket2);
+    EXPECT_EQ(Ticket1.getStringView(), Ticket2.getStringView());
+    EXPECT_EQ(std::string_view{"ab"}, Ticket1.getStringView());
+}
+TEST(StaticStringViewTests, CompileTimeConvert) {
+    static constexpr std::array<char, 4> converted = StaticStringView::toStdArray("test");
+    constexpr StaticStringView ticket = StaticStringView::create<converted>();
+    EXPECT_EQ(ticket, std::string_view{"test"});
+    // Unchecked constexpr construction
+    static const std::array<char, 5> converted2 = StaticStringView::toStdArray("test2");
+    constexpr auto ticket2 = StaticStringView::create<converted2, false>();
+    EXPECT_EQ(ticket2, std::string_view{"test2"});
+    constexpr char stack_array[4] = {'a', 'b', 'c', '\0'};
+    static constexpr auto converted3 = StaticStringView::toStdArray(stack_array);
+    constexpr auto ticket3 = StaticStringView::create<converted3>();
+    EXPECT_EQ(ticket3, std::string_view{"abc"});
+}
+
+TEST(StaticStringViewTests, CompileTimeConcat) {
+    // temporaries should not be static to prevent odr use
+    constexpr std::array<char, 3> arr1 = {'a', 'b', 'c'};
+    constexpr std::array<char, 4> arr2 = {'d', 'e', 'f', 'g'};
+    static constexpr std::array<char, 7> res = StaticStringView::concatArray(arr1, arr2);
+    static constexpr std::array<char, 7> expected = {'a', 'b', 'c', 'd', 'e', 'f', 'g'};
+    EXPECT_EQ(res, expected);
+}
+
+TEST(StaticStringViewTests, StringViewForwarding) {
+    static constexpr auto converted = StaticStringView::toStdArray("test");
+    constexpr auto ticket = StaticStringView::create<converted>();
+    EXPECT_EQ(ticket.length(), ticket.getStringView().length());
+    EXPECT_TRUE(ticket == ticket.getStringView());
+    EXPECT_TRUE(ticket == ticket);
+    EXPECT_TRUE(ticket.getStringView() == ticket);
+    EXPECT_TRUE(ticket > "abc");
+    EXPECT_TRUE("abc" < ticket);
+}
diff --git a/media/utils/tests/timecheck_tests.cpp b/media/utils/tests/timecheck_tests.cpp
index 8236174..bd91efa 100644
--- a/media/utils/tests/timecheck_tests.cpp
+++ b/media/utils/tests/timecheck_tests.cpp
@@ -26,7 +26,6 @@
 using namespace std::chrono_literals;
 
 namespace {
-
 TEST(timecheck_tests, success) {
     bool timeoutRegistered = false;
     float elapsedMsRegistered = 0.f;
@@ -69,4 +68,33 @@
 // Note: We do not test TimeCheck crash because TimeCheck is multithreaded and the
 // EXPECT_EXIT() signal catching is imperfect due to the gtest fork.
 
+// Note, the following test is to manually verify the correct thread is aborted.
+// Due to difficulties with gtest and EXPECT_EXIT, this is difficult to verify
+// automatically. TODO(b/246446561) Attempt to use EXPECT_EXIT
+
+#if 0
+void threadFunction() {
+    bool timeoutRegistered = false;
+    float elapsedMsRegistered = 0.f;
+    std::atomic_bool event = false;  // seq-cst implies acquire-release
+    {
+        TimeCheck timeCheck("timeout",
+                [&event, &timeoutRegistered, &elapsedMsRegistered]
+                        (bool timeout, float elapsedMs) {
+            timeoutRegistered = timeout;
+            elapsedMsRegistered = elapsedMs;
+            event = true; // store-release, must be last.
+        }, 1ms /* timeoutDuration */, {} /* secondChanceDuration */, true /* crash */);
+        std::this_thread::sleep_for(100ms);
+        ADD_FAILURE();
+    }
+}
+
+TEST(timecheck_tests, death) {
+  std::thread mthread{threadFunction};
+  mthread.join();
+}
+#endif
+
 } // namespace
+
diff --git a/services/audioflinger/AllocatorFactory.h b/services/audioflinger/AllocatorFactory.h
new file mode 100644
index 0000000..7534607
--- /dev/null
+++ b/services/audioflinger/AllocatorFactory.h
@@ -0,0 +1,95 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#include <mediautils/SharedMemoryAllocator.h>
+
+#pragma once
+
+// TODO how do we appropriately restrict visibility of this header?
+// It should only be included in AudioFlinger.h
+// We will make everything internal linkage for now.
+namespace android {
+namespace AllocatorFactory {
+namespace {
+// TODO make sure these are appropriate
+constexpr inline size_t MAX_MEMORY_SIZE = 1024 * 1024 * 100;                  // 100 MiB
+constexpr inline size_t DED_SIZE = (MAX_MEMORY_SIZE * 4) / 10;                // 40 MiB
+constexpr inline size_t SHARED_SIZE = MAX_MEMORY_SIZE - DED_SIZE;             // 60 MiB
+constexpr inline size_t SHARED_SIZE_LARGE = (SHARED_SIZE * 4) / 6;            // 40 MiB
+constexpr inline size_t SHARED_SIZE_SMALL = SHARED_SIZE - SHARED_SIZE_LARGE;  // 20 MiB
+constexpr inline size_t SMALL_THRESHOLD = 1024 * 40;                          // 40 KiB
+
+inline auto getDedicated() {
+    using namespace mediautils;
+    static const auto allocator =
+            std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<DED_SIZE>>>();
+    return allocator;
+}
+
+inline auto getSharedLarge() {
+    using namespace mediautils;
+    static const auto allocator = std::make_shared<
+            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<SHARED_SIZE_LARGE>>>();
+    return allocator;
+}
+
+inline auto getSharedSmall() {
+    using namespace mediautils;
+    static const auto allocator =
+            std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator,
+                                             SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>>();
+    return allocator;
+}
+
+template <typename Policy, typename Allocator>
+inline auto wrapWithPolicySnooping(Allocator allocator, std::string_view name) {
+    using namespace mediautils;
+    return SnoopingAllocator{PolicyAllocator{IndirectAllocator{allocator}, Policy{}}, name};
+}
+
+// A reasonable upper bound on how many clients we expect, and how many pieces to slice
+// the dedicate pool.
+constexpr inline size_t CLIENT_BOUND = 32;
+// Maximum amount of shared pools a single client can take (50%).
+constexpr inline size_t ADV_THRESHOLD_INV = 2;
+
+inline auto getClientAllocator() {
+    using namespace mediautils;
+    const auto makeDedPool = []() {
+        return wrapWithPolicySnooping<SizePolicy<DED_SIZE / CLIENT_BOUND>>(getDedicated(),
+                                                                           "Dedicated Pool");
+    };
+    const auto makeLargeShared = []() {
+        return wrapWithPolicySnooping<SizePolicy<SHARED_SIZE_LARGE / ADV_THRESHOLD_INV>>(
+                getSharedLarge(), "Large Shared");
+    };
+    const auto makeSmallShared = []() {
+        return wrapWithPolicySnooping<
+                SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
+                getSharedSmall(), "Small Shared");
+    };
+
+    return ScopedAllocator{std::make_shared<
+            FallbackAllocator<decltype(makeDedPool()),
+                              decltype(FallbackAllocator(makeLargeShared(), makeSmallShared()))>>(
+            makeDedPool(), FallbackAllocator{makeLargeShared(), makeSmallShared()})};
+}
+
+using ClientAllocator = decltype(getClientAllocator());
+}  // namespace
+}  // namespace AllocatorFactory
+}  // namespace android
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 6d4c3a3..41d4e16 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -43,7 +43,9 @@
         "FastThread.cpp",
         "FastThreadDumpState.cpp",
         "FastThreadState.cpp",
+        "MelReporter.cpp",
         "NBAIO_Tee.cpp",
+        "PatchCommandThread.cpp",
         "PatchPanel.cpp",
         "PropertyUtils.cpp",
         "SpdifStreamOut.cpp",
@@ -64,6 +66,7 @@
         "av-types-aidl-cpp",
         "effect-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libaudioflinger_timing",
         "libaudiofoundation",
         "libaudiohal",
         "libaudioprocessing",
@@ -74,6 +77,7 @@
         "liblog",
         "libbinder",
         "libaudioclient",
+        "libaudiomanager",
         "libmedialogservice",
         "libmediametrics",
         "libmediautils",
@@ -81,10 +85,10 @@
         "libnblog",
         "libpermission",
         "libpowermanager",
-        "libmediautils",
         "libmemunreachable",
         "libmedia_helper",
         "libshmemcompat",
+        "libsounddose",
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
@@ -99,7 +103,9 @@
         "libaaudio_headers",
         "libaudioclient_headers",
         "libaudiohal_headers",
+        "libaudioutils_headers",
         "libmedia_headers",
+        "libsounddose_headers",
     ],
 
     export_shared_lib_headers: [
@@ -117,3 +123,8 @@
     },
 
 }
+
+cc_library_headers {
+    name: "libaudioflinger_headers",
+    export_include_dirs: ["."],
+}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 8d3cf84..33ccf32 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -55,7 +55,7 @@
 #include <cutils/properties.h>
 
 #include <system/audio.h>
-#include <audiomanager/AudioManager.h>
+#include <audiomanager/IAudioManager.h>
 
 #include "AudioFlinger.h"
 #include "NBAIO_Tee.h"
@@ -117,11 +117,12 @@
 static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
         AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
 
-static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
-static const char kHardwareLockedString[] = "Hardware lock is taken\n";
-static const char kClientLockedString[] = "Client lock is taken\n";
-static const char kNoEffectsFactory[] = "Effects Factory is absent\n";
+static constexpr char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
+static constexpr char kHardwareLockedString[] = "Hardware lock is taken\n";
+static constexpr char kClientLockedString[] = "Client lock is taken\n";
+static constexpr char kNoEffectsFactory[] = "Effects Factory is absent\n";
 
+static constexpr char kAudioServiceName[] = "audio";
 
 nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs;
 
@@ -193,7 +194,6 @@
 BINDER_METHOD_ENTRY(restoreOutput) \
 BINDER_METHOD_ENTRY(openInput) \
 BINDER_METHOD_ENTRY(closeInput) \
-BINDER_METHOD_ENTRY(invalidateStream) \
 BINDER_METHOD_ENTRY(setVoiceVolume) \
 BINDER_METHOD_ENTRY(getRenderPosition) \
 BINDER_METHOD_ENTRY(getInputFramesLost) \
@@ -231,7 +231,9 @@
 BINDER_METHOD_ENTRY(setDeviceConnectedState) \
 BINDER_METHOD_ENTRY(setRequestedLatencyMode) \
 BINDER_METHOD_ENTRY(getSupportedLatencyModes) \
-
+BINDER_METHOD_ENTRY(setBluetoothLatencyModesEnabled) \
+BINDER_METHOD_ENTRY(supportsBluetoothLatencyModes) \
+BINDER_METHOD_ENTRY(getSoundDoseInterface) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -324,8 +326,11 @@
       mClientSharedHeapSize(kMinimumClientSharedHeapSizeBytes),
       mGlobalEffectEnableTime(0),
       mPatchPanel(this),
-      mDeviceEffectManager(this),
-      mSystemReady(false)
+      mPatchCommandThread(sp<PatchCommandThread>::make()),
+      mDeviceEffectManager(sp<DeviceEffectManager>::make(*this)),
+      mMelReporter(sp<MelReporter>::make(*this)),
+      mSystemReady(false),
+      mBluetoothLatencyModesEnabled(true)
 {
     // Move the audio session unique ID generator start base as time passes to limit risk of
     // generating the same ID again after an audioserver restart.
@@ -618,13 +623,20 @@
         fullConfig.format = config->format;
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
+        bool isBitPerfect;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
                                             &streamType, adjAttributionSource,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
-                                            deviceId, &portId, &secondaryOutputs, &isSpatialized);
+                                            deviceId, &portId, &secondaryOutputs, &isSpatialized,
+                                            &isBitPerfect);
+        if (ret != NO_ERROR) {
+            config->sample_rate = fullConfig.sample_rate;
+            config->channel_mask = fullConfig.channel_mask;
+            config->format = fullConfig.format;
+        }
         ALOGW_IF(!secondaryOutputs.empty(),
                  "%s does not support secondary outputs, ignoring them", __func__);
     } else {
@@ -665,27 +677,29 @@
 }
 
 /* static */
-int AudioFlinger::onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
+os::HapticScale AudioFlinger::onExternalVibrationStart(
+        const sp<os::ExternalVibration>& externalVibration) {
     sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != nullptr) {
         int32_t ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
             ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
-            return ret;
+            return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
         }
     }
     ALOGD("%s, start external vibration with intensity as MUTE due to %s",
             __func__,
             evs == nullptr ? "external vibration service not found"
                            : "error when querying intensity");
-    return static_cast<int>(os::HapticScale::MUTE);
+    return os::HapticScale::MUTE;
 }
 
 /* static */
 void AudioFlinger::onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
     sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != 0) {
+        ALOGD("%s, stopping external vibration", __func__);
         evs->onExternalVibrationStop(*externalVibration);
     }
 }
@@ -753,15 +767,14 @@
 {
     String8 result;
 
-    result.append("Clients:\n");
-    result.append("   pid    heap_size\n");
+    result.append("Client Allocators:\n");
     for (size_t i = 0; i < mClients.size(); ++i) {
         sp<Client> client = mClients.valueAt(i).promote();
         if (client != 0) {
-            result.appendFormat("%6d %12zu\n", client->pid(),
-                    client->heap()->getMemoryHeap()->getSize());
+          result.appendFormat("Client: %d\n", client->pid());
+          result.append(client->allocator().dump().c_str());
         }
-    }
+   }
 
     result.append("Notification Clients:\n");
     result.append("   pid    uid  name\n");
@@ -797,6 +810,11 @@
                             (uint32_t)(mStandbyTimeInNsecs / 1000000));
     result.append(buffer);
     write(fd, result.string(), result.size());
+
+    dprintf(fd, "Vibrator infos(size=%zu):\n", mAudioVibratorInfos.size());
+    for (const auto& vibratorInfo : mAudioVibratorInfos) {
+        dprintf(fd, "  - %s\n", vibratorInfo.toString().c_str());
+    }
 }
 
 void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
@@ -890,7 +908,10 @@
 
         mPatchPanel.dump(fd);
 
-        mDeviceEffectManager.dump(fd);
+        mDeviceEffectManager->dump(fd);
+
+        std::string melOutput = mMelReporter->dump();
+        write(fd, melOutput.c_str(), melOutput.size());
 
         // dump external setParameters
         auto dumpLogger = [fd](SimpleLog& logger, const char* name) {
@@ -1066,7 +1087,8 @@
     audio_stream_type_t streamType;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     std::vector<audio_io_handle_t> secondaryOutputs;
-    bool isSpatialized = false;;
+    bool isSpatialized = false;
+    bool isBitPerfect = false;
 
     // TODO b/182392553: refactor or make clearer
     pid_t clientPid =
@@ -1113,7 +1135,7 @@
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             adjAttributionSource, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs,
-                                            &isSpatialized);
+                                            &isSpatialized, &isBitPerfect);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1179,12 +1201,16 @@
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
-                                      &lStatus, portId, input.audioTrackCallback, isSpatialized);
+                                      &lStatus, portId, input.audioTrackCallback, isSpatialized,
+                                      isBitPerfect);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
         output.afFrameCount = thread->frameCount();
         output.afSampleRate = thread->sampleRate();
+        output.afChannelMask = static_cast<audio_channel_mask_t>(thread->channelMask() |
+                                                                 thread->hapticChannelMask());
+        output.afFormat = thread->format();
         output.afLatencyMs = thread->latency();
         output.portId = portId;
 
@@ -1643,6 +1669,46 @@
     return thread->getSupportedLatencyModes(modes);
 }
 
+status_t AudioFlinger::setBluetoothLatencyModesEnabled(bool enabled) {
+    Mutex::Autolock _l(mLock);
+    status_t status = INVALID_OPERATION;
+    for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+        // Success if at least one PlaybackThread supports Bluetooth latency modes
+        if (mPlaybackThreads.valueAt(i)->setBluetoothLatencyModesEnabled(enabled) == NO_ERROR) {
+            status = NO_ERROR;
+        }
+    }
+    if (status == NO_ERROR) {
+        mBluetoothLatencyModesEnabled.store(enabled);
+    }
+    return status;
+}
+
+status_t AudioFlinger::supportsBluetoothLatencyModes(bool* support) {
+    if (support == nullptr) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    *support = false;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        if (mAudioHwDevs.valueAt(i)->supportsBluetoothLatencyModes()) {
+             *support = true;
+             break;
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                             sp<media::ISoundDose>* soundDose) {
+    if (soundDose == nullptr) {
+        return BAD_VALUE;
+    }
+
+    *soundDose = mMelReporter->getSoundDoseInterface(callback);
+    return NO_ERROR;
+}
+
 status_t AudioFlinger::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     // check calling permissions
@@ -2205,12 +2271,8 @@
 AudioFlinger::Client::Client(const sp<AudioFlinger>& audioFlinger, pid_t pid)
     :   RefBase(),
         mAudioFlinger(audioFlinger),
-        mPid(pid)
-{
-    mMemoryDealer = new MemoryDealer(
-            audioFlinger->getClientSharedHeapSize(),
-            (std::string("AudioFlinger::Client(") + std::to_string(pid) + ")").c_str());
-}
+        mPid(pid),
+        mClientAllocator(AllocatorFactory::getClientAllocator()) {}
 
 // Client destructor must be called with AudioFlinger::mClientLock held
 AudioFlinger::Client::~Client()
@@ -2218,9 +2280,9 @@
     mAudioFlinger->removeClient_l(mPid);
 }
 
-sp<MemoryDealer> AudioFlinger::Client::heap() const
+AllocatorFactory::ClientAllocator& AudioFlinger::Client::allocator()
 {
-    return mMemoryDealer;
+    return mClientAllocator;
 }
 
 // ----------------------------------------------------------------------------
@@ -2423,6 +2485,12 @@
             };
         }
 
+        output.halConfig = {
+                thread->sampleRate(),
+                thread->channelMask(),
+                thread->format()
+        };
+
         // Check if one effect chain was awaiting for an AudioRecord to be created on this
         // session and move it to this thread.
         sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
@@ -2544,6 +2612,13 @@
         flags = static_cast<AudioHwDevice::Flags>(flags | AudioHwDevice::AHWD_IS_INSERT);
     }
 
+
+    if (bool supports = false;
+            dev->supportsBluetoothLatencyModes(&supports) == NO_ERROR && supports) {
+        flags = static_cast<AudioHwDevice::Flags>(flags |
+                AudioHwDevice::AHWD_SUPPORTS_BT_LATENCY_MODES);
+    }
+
     audio_module_handle_t handle = (audio_module_handle_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_MODULE);
     AudioHwDevice *audioDevice = new AudioHwDevice(handle, name, dev, flags);
     if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_PRIMARY) == 0) {
@@ -2748,9 +2823,28 @@
         ThreadBase *thread = (ThreadBase *)mMmapThreads.valueAt(i).get();
         thread->systemReady();
     }
+
+    // Java services are ready, so we can create a reference to AudioService
+    getOrCreateAudioManager();
+
     return NO_ERROR;
 }
 
+sp<IAudioManager> AudioFlinger::getOrCreateAudioManager()
+{
+    if (mAudioManager.load() == nullptr) {
+        // use checkService() to avoid blocking
+        sp<IBinder> binder =
+            defaultServiceManager()->checkService(String16(kAudioServiceName));
+        if (binder != nullptr) {
+            mAudioManager = interface_cast<IAudioManager>(binder);
+        } else {
+            ALOGE("%s(): binding to audio service failed.", __func__);
+        }
+    }
+    return mAudioManager.load();
+}
+
 status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfo> *microphones)
 {
     AutoMutex lock(mHardwareLock);
@@ -2866,7 +2960,11 @@
             return thread;
         } else {
             sp<PlaybackThread> thread;
-            if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
+            if (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+                thread = sp<BitPerfectThread>::make(this, outputStream, *output, mSystemReady);
+                ALOGV("%s() created bit-perfect output: ID %d thread %p",
+                      __func__, *output, thread.get());
+            } else if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
                 thread = new SpatializerThread(this, outputStream, *output,
                                                     mSystemReady, mixerConfig);
                 ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
@@ -2894,6 +2992,7 @@
             if (thread->isMsdDevice()) {
                 thread->setDownStreamPatch(&patch);
             }
+            thread->setBluetoothLatencyModesEnabled(mBluetoothLatencyModesEnabled.load());
             return thread;
         }
     }
@@ -3339,17 +3438,23 @@
     closeInputFinish(thread);
 }
 
-status_t AudioFlinger::invalidateStream(audio_stream_type_t stream)
-{
+status_t AudioFlinger::invalidateTracks(const std::vector<audio_port_handle_t> &portIds) {
     Mutex::Autolock _l(mLock);
-    ALOGV("invalidateStream() stream %d", stream);
+    ALOGV("%s", __func__);
 
+    std::set<audio_port_handle_t> portIdSet(portIds.begin(), portIds.end());
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
-        thread->invalidateTracks(stream);
+        thread->invalidateTracks(portIdSet);
+        if (portIdSet.empty()) {
+            return NO_ERROR;
+        }
     }
     for (size_t i = 0; i < mMmapThreads.size(); i++) {
-        mMmapThreads[i]->invalidateTracks(stream);
+        mMmapThreads[i]->invalidateTracks(portIdSet);
+        if (portIdSet.empty()) {
+            return NO_ERROR;
+        }
     }
     return NO_ERROR;
 }
@@ -4075,7 +4180,7 @@
         if (sessionId == AUDIO_SESSION_DEVICE) {
             sp<Client> client = registerPid(currentPid);
             ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
-            handle = mDeviceEffectManager.createEffect_l(
+            handle = mDeviceEffectManager->createEffect_l(
                     &descOut, device, client, effectClient, mPatchPanel.patches_l(),
                     &enabledOut, &lStatus, probe, request.notifyFramesProcessed);
             if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
@@ -4548,7 +4653,6 @@
         case TransactionCode::RESTORE_OUTPUT:
         case TransactionCode::OPEN_INPUT:
         case TransactionCode::CLOSE_INPUT:
-        case TransactionCode::INVALIDATE_STREAM:
         case TransactionCode::SET_VOICE_VOLUME:
         case TransactionCode::MOVE_EFFECTS:
         case TransactionCode::SET_EFFECT_SUSPENDED:
@@ -4563,6 +4667,7 @@
         case TransactionCode::SET_DEVICE_CONNECTED_STATE:
         case TransactionCode::SET_REQUESTED_LATENCY_MODE:
         case TransactionCode::GET_SUPPORTED_LATENCY_MODES:
+        case TransactionCode::INVALIDATE_TRACKS:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
@@ -4584,18 +4689,21 @@
         case TransactionCode::SET_MASTER_VOLUME:
         case TransactionCode::SET_MASTER_MUTE:
         case TransactionCode::MASTER_MUTE:
+        case TransactionCode::GET_SOUND_DOSE_INTERFACE:
         case TransactionCode::SET_MODE:
         case TransactionCode::SET_MIC_MUTE:
         case TransactionCode::SET_LOW_RAM_DEVICE:
         case TransactionCode::SYSTEM_READY:
         case TransactionCode::SET_AUDIO_HAL_PIDS:
         case TransactionCode::SET_VIBRATOR_INFOS:
-        case TransactionCode::UPDATE_SECONDARY_OUTPUTS: {
+        case TransactionCode::UPDATE_SECONDARY_OUTPUTS:
+        case TransactionCode::SET_BLUETOOTH_LATENCY_MODES_ENABLED:
+        case TransactionCode::SUPPORTS_BLUETOOTH_LATENCY_MODES: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
                       IPCThreadState::self()->getCallingUid());
-                // return status only for non void methods
+                // return status only for non-void methods
                 switch (code) {
                     case TransactionCode::SYSTEM_READY:
                         break;
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index fc4c807..16ad964 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -75,15 +75,21 @@
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/VolumeShaper.h>
 #include <mediautils/ServiceUtilities.h>
+#include <mediautils/SharedMemoryAllocator.h>
 #include <mediautils/Synchronization.h>
 #include <mediautils/ThreadSnapshot.h>
 
 #include <audio_utils/clock.h>
 #include <audio_utils/FdToString.h>
 #include <audio_utils/LinearMap.h>
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/MelProcessor.h>
 #include <audio_utils/SimpleLog.h>
 #include <audio_utils/TimestampVerifier.h>
 
+#include <sounddose/SoundDoseManager.h>
+#include <timing/MonotonicFrameCounter.h>
+
 #include "FastCapture.h"
 #include "FastMixer.h"
 #include <media/nbaio/NBAIO.h>
@@ -94,7 +100,7 @@
 #include "NBAIO_Tee.h"
 #include "ThreadMetrics.h"
 #include "TrackMetrics.h"
-
+#include "AllocatorFactory.h"
 #include <android/os/IPowerManager.h>
 
 #include <media/nblog/NBLog.h>
@@ -117,6 +123,8 @@
 class DevicesFactoryHalInterface;
 class EffectsFactoryHalInterface;
 class FastMixer;
+class IAudioManager;
+class ISoundDoseCallback;
 class PassthruBufferProvider;
 class RecordBufferConverter;
 class ServerProxy;
@@ -202,8 +210,6 @@
 
     virtual status_t closeInput(audio_io_handle_t input);
 
-    virtual status_t invalidateStream(audio_stream_type_t stream);
-
     virtual status_t setVoiceVolume(float volume);
 
     virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
@@ -299,6 +305,15 @@
     virtual status_t getSupportedLatencyModes(audio_io_handle_t output,
             std::vector<audio_latency_mode_t>* modes);
 
+    virtual status_t setBluetoothLatencyModesEnabled(bool enabled);
+
+    virtual status_t supportsBluetoothLatencyModes(bool* support);
+
+    virtual status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                           sp<media::ISoundDose>* soundDose);
+
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
         const std::function<status_t()>& delegate) override;
 
@@ -318,7 +333,8 @@
                             sp<MmapStreamInterface>& interface,
                             audio_port_handle_t *handle);
 
-    static int onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration);
+    static os::HapticScale onExternalVibrationStart(
+        const sp<os::ExternalVibration>& externalVibration);
     static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
 
     status_t addEffectToHal(audio_port_handle_t deviceId,
@@ -498,19 +514,19 @@
 
     // --- Client ---
     class Client : public RefBase {
-    public:
-                            Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
+      public:
+        Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
         virtual             ~Client();
-        sp<MemoryDealer>    heap() const;
+        AllocatorFactory::ClientAllocator& allocator();
         pid_t               pid() const { return mPid; }
         sp<AudioFlinger>    audioFlinger() const { return mAudioFlinger; }
 
     private:
         DISALLOW_COPY_AND_ASSIGN(Client);
 
-        const sp<AudioFlinger> mAudioFlinger;
-              sp<MemoryDealer> mMemoryDealer;
+        const sp<AudioFlinger>    mAudioFlinger;
         const pid_t         mPid;
+        AllocatorFactory::ClientAllocator mClientAllocator;
     };
 
     // --- Notification Client ---
@@ -580,6 +596,7 @@
     class OffloadThread;
     class DuplicatingThread;
     class AsyncCallbackThread;
+    class BitPerfectThread;
     class Track;
     class RecordTrack;
     class EffectBase;
@@ -629,10 +646,14 @@
 
 #include "PatchPanel.h"
 
+#include "PatchCommandThread.h"
+
 #include "Effects.h"
 
 #include "DeviceEffectManager.h"
 
+#include "MelReporter.h"
+
     // Find io handle by session id.
     // Preference is given to an io handle with a matching effect chain to session id.
     // If none found, AUDIO_IO_HANDLE_NONE is returned.
@@ -988,6 +1009,8 @@
                                       size_t rejectedKVPSize, const String8& rejectedKVPs,
                                       uid_t callingUid);
 
+    sp<IAudioManager> getOrCreateAudioManager();
+
 public:
     // These methods read variables atomically without mLock,
     // though the variables are updated with mLock.
@@ -1007,7 +1030,9 @@
     PatchPanel mPatchPanel;
     sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
 
-    DeviceEffectManager mDeviceEffectManager;
+    const sp<PatchCommandThread> mPatchCommandThread;
+    sp<DeviceEffectManager> mDeviceEffectManager;
+    sp<MelReporter> mMelReporter;
 
     bool       mSystemReady;
     std::atomic_bool mAudioPolicyReady{};
@@ -1029,6 +1054,12 @@
              std::vector<media::audio::common::AudioMMapPolicyInfo>> mPolicyInfos;
     int32_t mAAudioBurstsPerBuffer = 0;
     int32_t mAAudioHwBurstMinMicros = 0;
+
+    /** Interface for interacting with the AudioService. */
+    mediautils::atomic_sp<IAudioManager>       mAudioManager;
+
+    // Bluetooth Variable latency control logic is enabled or disabled
+    std::atomic_bool mBluetoothLatencyModesEnabled;
 };
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index 8c5d239..0e840a6 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -40,6 +40,8 @@
         // Means that this isn't a terminal module, and software patches
         // are used to transport audio data further.
         AHWD_IS_INSERT              = 0x4,
+        // This Module supports BT Latency mode control
+        AHWD_SUPPORTS_BT_LATENCY_MODES = 0x8,
     };
 
     AudioHwDevice(audio_module_handle_t handle,
@@ -64,6 +66,10 @@
         return (0 != (mFlags & AHWD_IS_INSERT));
     }
 
+    bool supportsBluetoothLatencyModes() const {
+        return (0 != (mFlags & AHWD_SUPPORTS_BT_LATENCY_MODES));
+    }
+
     audio_module_handle_t handle() const { return mHandle; }
     const char *moduleName() const { return mModuleName; }
     sp<DeviceHalInterface> hwDevice() const { return mHwDevice; }
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 3a8c1bc..9105500 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -33,16 +33,6 @@
 using detail::AudioHalVersionInfo;
 using media::IEffectClient;
 
-void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
-        const PatchPanel::Patch& patch) {
-    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
-            __func__, handle, patch.mHalHandle,
-            patch.mAudioPatch.num_sinks,
-            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
-
-    mCommandThread->createAudioPatchCommand(handle, patch);
-}
-
 void AudioFlinger::DeviceEffectManager::onCreateAudioPatch(audio_patch_handle_t handle,
         const PatchPanel::Patch& patch) {
     ALOGV("%s handle %d mHalHandle %d device sink %08x",
@@ -56,11 +46,6 @@
     }
 }
 
-void AudioFlinger::DeviceEffectManager::releaseAudioPatch(audio_patch_handle_t handle) {
-    ALOGV("%s", __func__);
-    mCommandThread->releaseAudioPatchCommand(handle);
-}
-
 void AudioFlinger::DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
     Mutex::Autolock _l(mLock);
@@ -117,7 +102,7 @@
             }
         }
     }
-    if (enabled != NULL) {
+    if (enabled != nullptr) {
         *enabled = (int)effect->isEnabled();
     }
     *status = lStatus;
@@ -212,91 +197,4 @@
     return true;
 }
 
-// -----------  DeviceEffectManager::CommandThread implementation ----------
-
-
-AudioFlinger::DeviceEffectManager::CommandThread::~CommandThread()
-{
-    Mutex::Autolock _l(mLock);
-    mCommands.clear();
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::onFirstRef()
-{
-    run("DeviceEffectManage_CommandThread", ANDROID_PRIORITY_AUDIO);
-}
-
-bool AudioFlinger::DeviceEffectManager::CommandThread::threadLoop()
-{
-    mLock.lock();
-    while (!exitPending())
-    {
-        while (!mCommands.empty() && !exitPending()) {
-            sp<Command> command = mCommands.front();
-            mCommands.pop_front();
-            mLock.unlock();
-
-            switch (command->mCommand) {
-            case CREATE_AUDIO_PATCH: {
-                CreateAudioPatchData *data = (CreateAudioPatchData *)command->mData.get();
-                ALOGV("CommandThread() processing create audio patch handle %d", data->mHandle);
-                mManager.onCreateAudioPatch(data->mHandle, data->mPatch);
-                } break;
-            case RELEASE_AUDIO_PATCH: {
-                ReleaseAudioPatchData *data = (ReleaseAudioPatchData *)command->mData.get();
-                ALOGV("CommandThread() processing release audio patch handle %d", data->mHandle);
-                mManager.onReleaseAudioPatch(data->mHandle);
-                } break;
-            default:
-                ALOGW("CommandThread() unknown command %d", command->mCommand);
-            }
-            mLock.lock();
-        }
-
-        // At this stage we have either an empty command queue or the first command in the queue
-        // has a finite delay. So unless we are exiting it is safe to wait.
-        if (!exitPending()) {
-            ALOGV("CommandThread() going to sleep");
-            mWaitWorkCV.wait(mLock);
-        }
-    }
-    mLock.unlock();
-    return false;
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::sendCommand(sp<Command> command) {
-    Mutex::Autolock _l(mLock);
-    mCommands.push_back(command);
-    mWaitWorkCV.signal();
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::createAudioPatchCommand(
-        audio_patch_handle_t handle, const PatchPanel::Patch& patch)
-{
-    sp<Command> command = new Command(CREATE_AUDIO_PATCH, new CreateAudioPatchData(handle, patch));
-    ALOGV("CommandThread() adding create patch handle %d mHalHandle %d.", handle, patch.mHalHandle);
-    sendCommand(command);
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::releaseAudioPatchCommand(
-        audio_patch_handle_t handle)
-{
-    sp<Command> command = new Command(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
-    ALOGV("CommandThread() adding release patch");
-    sendCommand(command);
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::exit()
-{
-    ALOGV("CommandThread::exit");
-    {
-        AutoMutex _l(mLock);
-        requestExit();
-        mWaitWorkCV.signal();
-    }
-    // Note that we can call it from the thread loop if all other references have been released
-    // but it will safely return WOULD_BLOCK in this case
-    requestExitAndWait();
-}
-
 } // namespace android
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index d2faa70..7602f12 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -20,15 +20,15 @@
 #endif
 
 // DeviceEffectManager is concealed within AudioFlinger, their lifetimes are the same.
-class DeviceEffectManager {
+class DeviceEffectManager : public PatchCommandThread::PatchCommandListener {
 public:
-    explicit DeviceEffectManager(AudioFlinger* audioFlinger)
-        : mCommandThread(new CommandThread(*this)), mAudioFlinger(*audioFlinger),
-        mMyCallback(new DeviceEffectManagerCallback(this)) {}
+    explicit DeviceEffectManager(AudioFlinger& audioFlinger)
+        : mAudioFlinger(audioFlinger),
+          mMyCallback(new DeviceEffectManagerCallback(*this)) {}
 
-            ~DeviceEffectManager() {
-                mCommandThread->exit();
-            }
+    void onFirstRef() override {
+        mAudioFlinger.mPatchCommandThread->addListener(this);
+    }
 
     sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
                 const AudioDeviceTypeAddr& device,
@@ -39,8 +39,6 @@
                 status_t *status,
                 bool probe,
                 bool notifyFramesProcessed);
-    void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
-    void releaseAudioPatch(audio_patch_handle_t handle);
 
     size_t removeEffect(const sp<DeviceEffectProxy>& effect);
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -59,94 +57,25 @@
 
     void dump(int fd);
 
+    // PatchCommandThread::PatchCommandListener implementation
+
+    void onCreateAudioPatch(audio_patch_handle_t handle,
+                            const PatchPanel::Patch& patch) override;
+    void onReleaseAudioPatch(audio_patch_handle_t handle) override;
+
 private:
-
-    // Thread to execute create and release patch commands asynchronously. This is needed because
-    // PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
-    // with mutex locked and effect management requires to call back into audio policy service
-    class Command;
-    class CommandThread : public Thread {
-    public:
-
-        enum {
-            CREATE_AUDIO_PATCH,
-            RELEASE_AUDIO_PATCH,
-        };
-
-        CommandThread(DeviceEffectManager& manager)
-            : Thread(false), mManager(manager) {}
-        ~CommandThread() override;
-
-        // Thread virtuals
-        void onFirstRef() override;
-        bool threadLoop() override;
-
-                void exit();
-
-                void createAudioPatchCommand(audio_patch_handle_t handle,
-                        const PatchPanel::Patch& patch);
-                void releaseAudioPatchCommand(audio_patch_handle_t handle);
-
-    private:
-        class CommandData;
-
-        // descriptor for requested tone playback event
-        class Command: public RefBase {
-        public:
-            Command() = default;
-            Command(int command, sp<CommandData> data)
-                : mCommand(command), mData(data) {}
-
-            int mCommand = -1;
-            sp<CommandData> mData;
-        };
-
-        class CommandData: public RefBase {
-        public:
-            virtual ~CommandData() = default;
-        };
-
-        class CreateAudioPatchData : public CommandData {
-        public:
-            CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
-                :   mHandle(handle), mPatch(patch) {}
-
-            audio_patch_handle_t mHandle;
-            const PatchPanel::Patch mPatch;
-        };
-
-        class ReleaseAudioPatchData : public CommandData {
-        public:
-            ReleaseAudioPatchData(audio_patch_handle_t handle)
-                :   mHandle(handle) {}
-
-            audio_patch_handle_t mHandle;
-        };
-
-        void sendCommand(sp<Command> command);
-
-        Mutex   mLock;
-        Condition mWaitWorkCV;
-        std::deque <sp<Command>> mCommands; // list of pending commands
-        DeviceEffectManager& mManager;
-    };
-
-    void onCreateAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
-    void onReleaseAudioPatch(audio_patch_handle_t handle);
-
     status_t checkEffectCompatibility(const effect_descriptor_t *desc);
 
     Mutex mLock;
-    sp<CommandThread> mCommandThread;
     AudioFlinger &mAudioFlinger;
     const sp<DeviceEffectManagerCallback> mMyCallback;
     std::map<AudioDeviceTypeAddr, sp<DeviceEffectProxy>> mDeviceEffects;
 };
 
-class DeviceEffectManagerCallback :  public EffectCallbackInterface {
+class DeviceEffectManagerCallback : public EffectCallbackInterface {
 public:
-            DeviceEffectManagerCallback(DeviceEffectManager *manager)
-                : mManager(*manager) {}
+    DeviceEffectManagerCallback(DeviceEffectManager& manager)
+        : mManager(manager) {}
 
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
            int32_t sessionId, int32_t deviceId,
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 98829d0..84b9c40 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -278,8 +278,8 @@
         if (!doRegister && !(registered && doEnable)) {
             return NO_ERROR;
         }
-        mPolicyLock.lock();
     }
+    mPolicyLock.lock();
     ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
         __func__, mDescriptor.name, mId, mSessionId, doRegister, registered, doEnable, enabled);
     if (doRegister) {
@@ -1597,7 +1597,7 @@
     return isHapticGenerator(&mDescriptor.type);
 }
 
-status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
 {
     if (mStatus != NO_ERROR) {
         return mStatus;
@@ -1613,7 +1613,7 @@
     param->vsize = sizeof(int32_t) * 2;
     *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
     *((int32_t*)param->data + 1) = id;
-    *((int32_t*)param->data + 2) = intensity;
+    *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1762,7 +1762,14 @@
         return;
     }
     int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
-    mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset);
+    mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{
+            {static_cast<size_t>(EFFECT_PARAM_BUFFER_SIZE + bufOffset)},
+            std::string("Effect ID: ")
+                    .append(std::to_string(effect->id()))
+                    .append(" Session ID: ")
+                    .append(std::to_string(static_cast<int>(effect->sessionId())))
+                    .append(" \n")
+            });
     if (mCblkMemory == 0 ||
             (mCblk = static_cast<effect_param_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
         ALOGE("not enough memory for Effect size=%zu", EFFECT_PARAM_BUFFER_SIZE +
@@ -2670,7 +2677,7 @@
     return false;
 }
 
-void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
 {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2954,6 +2961,9 @@
     if ((*flags & AUDIO_OUTPUT_FLAG_FAST) != 0 && !isFastCompatible()) {
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
     }
+    if ((*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0 && !isBitPerfectCompatible()) {
+        *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+    }
 }
 
 void AudioFlinger::EffectChain::checkInputFlagCompatibility(audio_input_flags_t *flags) const
@@ -2991,6 +3001,18 @@
     return true;
 }
 
+bool AudioFlinger::EffectChain::isBitPerfectCompatible() const {
+    Mutex::Autolock _l(mLock);
+    for (const auto &effect : mEffects) {
+        if (effect->isProcessImplemented()
+                && effect->isImplementationSoftware()) {
+            return false;
+        }
+    }
+    // Allow effects without processing or hw accelerated effects.
+    return true;
+}
+
 // isCompatibleWithThread_l() must be called with thread->mLock held
 bool AudioFlinger::EffectChain::isCompatibleWithThread_l(const sp<ThreadBase>& thread) const
 {
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 78788df..7b71a85 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -280,7 +280,7 @@
     static bool      isHapticGenerator(const effect_uuid_t* type);
     bool             isHapticGenerator() const;
 
-    status_t         setHapticIntensity(int id, int intensity);
+    status_t         setHapticIntensity(int id, os::HapticScale intensity);
     status_t         setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo);
 
     status_t         getConfigs(audio_config_base_t* inputCfg,
@@ -545,12 +545,15 @@
     // Is this EffectChain compatible with the FAST audio flag.
     bool isFastCompatible() const;
 
+    // Is this EffectChain compatible with the bit-perfect audio flag.
+    bool isBitPerfectCompatible() const;
+
     // isCompatibleWithThread_l() must be called with thread->mLock held
     bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
 
     bool containsHapticGeneratingEffect_l();
 
-    void setHapticIntensity_l(int id, int intensity);
+    void setHapticIntensity_l(int id, os::HapticScale intensity);
 
     sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
     wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
new file mode 100644
index 0000000..8cc7eab
--- /dev/null
+++ b/services/audioflinger/MelReporter.cpp
@@ -0,0 +1,124 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioFlinger::MelReporter"
+
+#include "AudioFlinger.h"
+
+#include <android/media/ISoundDoseCallback.h>
+#include <audio_utils/power.h>
+#include <utils/Log.h>
+
+namespace android {
+
+bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
+    switch (device) {
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_HEARING_AID:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+            return true;
+        default:
+            return false;
+    }
+}
+
+void AudioFlinger::MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
+        const PatchPanel::Patch& patch) {
+    ALOGV("%s: handle %d mHalHandle %d device sink %08x",
+            __func__, handle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+    if (patch.mAudioPatch.num_sources == 0
+        || patch.mAudioPatch.sources[0].type != AUDIO_PORT_TYPE_MIX) {
+        ALOGW("%s: patch does not contain any mix sources", __func__);
+        return;
+    }
+
+    audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
+    ActiveMelPatch newPatch;
+    newPatch.streamHandle = streamHandle;
+    for (int i = 0; i < patch.mAudioPatch.num_sinks; ++ i) {
+        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
+            && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
+            audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
+            newPatch.deviceHandles.push_back(deviceId);
+
+            // Start the MEL calculation in the PlaybackThread
+            std::lock_guard _lAf(mAudioFlinger.mLock);
+            auto thread = mAudioFlinger.checkPlaybackThread_l(streamHandle);
+            if (thread != nullptr) {
+                thread->startMelComputation(mSoundDoseManager.getOrCreateProcessorForDevice(
+                    deviceId,
+                    newPatch.streamHandle,
+                    thread->mSampleRate,
+                    thread->mChannelCount,
+                    thread->mFormat));
+            }
+        }
+    }
+
+    std::lock_guard _l(mLock);
+    mActiveMelPatches[patch.mAudioPatch.id] = newPatch;
+}
+
+void AudioFlinger::MelReporter::onReleaseAudioPatch(audio_patch_handle_t handle) {
+    ALOGV("%s", __func__);
+
+    ActiveMelPatch melPatch;
+    {
+        std::lock_guard _l(mLock);
+
+        auto patchIt = mActiveMelPatches.find(handle);
+        if (patchIt == mActiveMelPatches.end()) {
+            ALOGW(
+                "%s patch does not contain any mix sources with active MEL calculation",
+                __func__);
+            return;
+        }
+
+        melPatch = patchIt->second;
+        mActiveMelPatches.erase(patchIt);
+    }
+
+    // Stop MEL calculation for the PlaybackThread
+    std::lock_guard _lAf(mAudioFlinger.mLock);
+    mSoundDoseManager.removeStreamProcessor(melPatch.streamHandle);
+    auto thread = mAudioFlinger.checkPlaybackThread_l(melPatch.streamHandle);
+    if (thread != nullptr) {
+        thread->stopMelComputation();
+    }
+}
+
+sp<media::ISoundDose> AudioFlinger::MelReporter::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback>& callback) {
+    // no need to lock since getSoundDoseInterface is synchronized
+    return mSoundDoseManager.getSoundDoseInterface(callback);
+}
+
+std::string AudioFlinger::MelReporter::dump() {
+    std::lock_guard _l(mLock);
+    std::string output("\nSound Dose:\n");
+    output.append(mSoundDoseManager.dump());
+    return output;
+}
+
+}  // namespace android
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
new file mode 100644
index 0000000..f73b3d6
--- /dev/null
+++ b/services/audioflinger/MelReporter.h
@@ -0,0 +1,73 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+    #error This header file should only be included from AudioFlinger.h
+#endif
+
+#include <mutex>
+#include <sounddose/SoundDoseManager.h>
+#include <unordered_map>
+
+constexpr static int kMaxTimestampDeltaInSec = 120;
+
+/**
+ * Class for listening to new patches and starting the MEL computation. MelReporter is
+ * concealed within AudioFlinger, their lifetimes are the same.
+ */
+class MelReporter : public PatchCommandThread::PatchCommandListener {
+public:
+    explicit MelReporter(AudioFlinger& audioFlinger)
+        : mAudioFlinger(audioFlinger) {}
+
+    void onFirstRef() override {
+        mAudioFlinger.mPatchCommandThread->addListener(this);
+    }
+
+    /** Returns true if we should compute MEL for the given device. */
+    static bool shouldComputeMelForDeviceType(audio_devices_t device);
+
+    // For now only support internal MelReporting
+    [[nodiscard]] bool isHalReportingEnabled() const { return false; }
+
+    sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
+
+    std::string dump();
+
+    // PatchCommandListener methods
+    void onCreateAudioPatch(audio_patch_handle_t handle,
+                            const PatchPanel::Patch& patch) override;
+    void onReleaseAudioPatch(audio_patch_handle_t handle) override;
+
+private:
+    AudioFlinger& mAudioFlinger;  // does not own the object
+
+    SoundDoseManager mSoundDoseManager;
+
+    struct ActiveMelPatch {
+        audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
+        std::vector<audio_port_handle_t> deviceHandles;
+    };
+
+    /**
+     * Lock for protecting the active mel patches. Do not mix with the AudioFlinger lock.
+     * Locking order AudioFlinger::mLock -> PatchCommandThread::mLock -> MelReporter::mLock.
+     */
+    std::mutex mLock;
+    std::unordered_map<audio_patch_handle_t, ActiveMelPatch>
+        mActiveMelPatches GUARDED_BY(AudioFlinger::MelReporter::mLock);
+};
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index eb640bb..cb46c52 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -54,6 +54,14 @@
             bool        getAndSetSilencedNotified_l() { bool silencedNotified = mSilencedNotified;
                                                         mSilencedNotified = true;
                                                         return silencedNotified; }
+
+    /**
+     * Updates the mute state and notifies the audio service. Call this only when holding player
+     * thread lock.
+     */
+    void processMuteEvent_l(const sp<IAudioManager>& audioManager,
+                            mute_state_t muteState)
+                            REQUIRES(AudioFlinger::MmapPlaybackThread::mLock);
 private:
     friend class MmapThread;
 
@@ -71,5 +79,12 @@
     pid_t mPid;
     bool  mSilenced;            // protected by MMapThread::mLock
     bool  mSilencedNotified;    // protected by MMapThread::mLock
+
+    // TODO: replace PersistableBundle with own struct
+    // access these two variables only when holding player thread lock.
+    std::unique_ptr<os::PersistableBundle> mMuteEventExtras
+            GUARDED_BY(AudioFlinger::MmapPlaybackThread::mLock);
+    mute_state_t mMuteState
+            GUARDED_BY(AudioFlinger::MmapPlaybackThread::mLock);
 };  // end of Track
 
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
new file mode 100644
index 0000000..c3cb7e7
--- /dev/null
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -0,0 +1,156 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "AudioFlinger::PatchCommandThread"
+//#define LOG_NDEBUG 0
+
+#include "AudioFlinger.h"
+
+namespace android {
+
+constexpr char kPatchCommandThreadName[] = "AudioFlinger_PatchCommandThread";
+
+AudioFlinger::PatchCommandThread::~PatchCommandThread() {
+    exit();
+
+    std::lock_guard _l(mLock);
+    mCommands.clear();
+}
+
+void AudioFlinger::PatchCommandThread::onFirstRef() {
+    run(kPatchCommandThreadName, ANDROID_PRIORITY_AUDIO);
+}
+
+void AudioFlinger::PatchCommandThread::addListener(const sp<PatchCommandListener>& listener) {
+    ALOGV("%s add listener %p", __func__, static_cast<void*>(listener.get()));
+    std::lock_guard _l(mListenerLock);
+    mListeners.emplace_back(listener);
+}
+
+void AudioFlinger::PatchCommandThread::createAudioPatch(audio_patch_handle_t handle,
+        const PatchPanel::Patch& patch) {
+    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+            __func__, handle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+    createAudioPatchCommand(handle, patch);
+}
+
+void AudioFlinger::PatchCommandThread::releaseAudioPatch(audio_patch_handle_t handle) {
+    ALOGV("%s", __func__);
+    releaseAudioPatchCommand(handle);
+}
+
+bool AudioFlinger::PatchCommandThread::threadLoop() {
+    std::unique_lock _l(mLock);
+
+    while (!exitPending()) {
+        while (!mCommands.empty() && !exitPending()) {
+            const sp<Command> command = mCommands.front();
+            mCommands.pop_front();
+            _l.unlock();
+
+            std::vector<wp<PatchCommandListener>> listenersCopy;
+            {
+                std::lock_guard _ll(mListenerLock);
+                listenersCopy = mListeners;
+            }
+
+            switch (command->mCommand) {
+                case CREATE_AUDIO_PATCH: {
+                    const auto data = (CreateAudioPatchData*) command->mData.get();
+                    ALOGV("%s processing create audio patch handle %d",
+                          __func__,
+                          data->mHandle);
+
+                    for (const auto& listener : listenersCopy) {
+                        auto spListener = listener.promote();
+                        if (spListener) {
+                            spListener->onCreateAudioPatch(data->mHandle, data->mPatch);
+                        }
+                    }
+                }
+                    break;
+                case RELEASE_AUDIO_PATCH: {
+                    const auto data = (ReleaseAudioPatchData*) command->mData.get();
+                    ALOGV("%s processing release audio patch handle %d",
+                          __func__,
+                          data->mHandle);
+
+                    for (const auto& listener : listenersCopy) {
+                        auto spListener = listener.promote();
+                        if (spListener) {
+                            spListener->onReleaseAudioPatch(data->mHandle);
+                        }
+                    }
+                }
+                    break;
+                default:
+                    ALOGW("%s unknown command %d", __func__, command->mCommand);
+                    break;
+            }
+            _l.lock();
+        }
+
+        // At this stage we have either an empty command queue or the first command in the queue
+        // has a finite delay. So unless we are exiting it is safe to wait.
+        if (!exitPending()) {
+            ALOGV("%s going to sleep", __func__);
+            mWaitWorkCV.wait(_l);
+        }
+    }
+    return false;
+}
+
+void AudioFlinger::PatchCommandThread::sendCommand(const sp<Command>& command) {
+    std::lock_guard _l(mLock);
+    mCommands.emplace_back(command);
+    mWaitWorkCV.notify_one();
+}
+
+void AudioFlinger::PatchCommandThread::createAudioPatchCommand(
+        audio_patch_handle_t handle, const PatchPanel::Patch& patch) {
+    auto command = sp<Command>::make(CREATE_AUDIO_PATCH,
+                                     new CreateAudioPatchData(handle, patch));
+    ALOGV("%s adding create patch handle %d mHalHandle %d.",
+          __func__,
+          handle,
+          patch.mHalHandle);
+    sendCommand(command);
+}
+
+void AudioFlinger::PatchCommandThread::releaseAudioPatchCommand(audio_patch_handle_t handle) {
+    sp<Command> command =
+        sp<Command>::make(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
+    ALOGV("%s adding release patch", __func__);
+    sendCommand(command);
+}
+
+void AudioFlinger::PatchCommandThread::exit() {
+    ALOGV("%s", __func__);
+    {
+        std::lock_guard _l(mLock);
+        requestExit();
+        mWaitWorkCV.notify_one();
+    }
+    // Note that we can call it from the thread loop if all other references have been released
+    // but it will safely return WOULD_BLOCK in this case
+    requestExitAndWait();
+}
+
+}  // namespace android
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
new file mode 100644
index 0000000..b7853f0
--- /dev/null
+++ b/services/audioflinger/PatchCommandThread.h
@@ -0,0 +1,102 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+    #error This header file should only be included from AudioFlinger.h
+#endif
+
+class Command;
+
+// Thread to execute create and release patch commands asynchronously. This is needed because
+// PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
+// with mutex locked and effect management requires to call back into audio policy service
+class PatchCommandThread : public Thread {
+public:
+
+    enum {
+        CREATE_AUDIO_PATCH,
+        RELEASE_AUDIO_PATCH,
+    };
+
+    class PatchCommandListener : public virtual RefBase {
+    public:
+        virtual void onCreateAudioPatch(audio_patch_handle_t handle,
+                                        const PatchPanel::Patch& patch) = 0;
+        virtual void onReleaseAudioPatch(audio_patch_handle_t handle) = 0;
+    };
+
+    PatchCommandThread() : Thread(false /* canCallJava */) {}
+    ~PatchCommandThread() override;
+
+    void addListener(const sp<PatchCommandListener>& listener);
+
+    void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
+    void releaseAudioPatch(audio_patch_handle_t handle);
+
+    // Thread virtuals
+    void onFirstRef() override;
+    bool threadLoop() override;
+
+    void exit();
+
+    void createAudioPatchCommand(audio_patch_handle_t handle,
+            const PatchPanel::Patch& patch);
+    void releaseAudioPatchCommand(audio_patch_handle_t handle);
+
+private:
+    class CommandData;
+
+    // Command type received from the PatchPanel
+    class Command: public RefBase {
+    public:
+        Command() = default;
+        Command(int command, const sp<CommandData>& data)
+            : mCommand(command), mData(data) {}
+
+        const int mCommand = -1;
+        const sp<CommandData> mData;
+    };
+
+    class CommandData: public RefBase {};
+
+    class CreateAudioPatchData : public CommandData {
+    public:
+        CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
+            :   mHandle(handle), mPatch(patch) {}
+
+        const audio_patch_handle_t mHandle;
+        const PatchPanel::Patch mPatch;
+    };
+
+    class ReleaseAudioPatchData : public CommandData {
+    public:
+        ReleaseAudioPatchData(audio_patch_handle_t handle)
+            :   mHandle(handle) {}
+
+        audio_patch_handle_t mHandle;
+    };
+
+    void sendCommand(const sp<Command>& command);
+
+    std::string mThreadName;
+    std::mutex mLock;
+    std::condition_variable mWaitWorkCV;
+    std::deque<sp<Command>> mCommands GUARDED_BY(mLock); // list of pending commands
+
+    std::mutex mListenerLock;
+    std::vector<wp<PatchCommandListener>> mListeners GUARDED_BY(mListenerLock);
+};
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index b54b41f..3b428bb 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -449,7 +449,7 @@
     if (status == NO_ERROR) {
         *handle = (audio_patch_handle_t) mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH);
         newPatch.mHalHandle = halHandle;
-        mAudioFlinger.mDeviceEffectManager.createAudioPatch(*handle, newPatch);
+        mAudioFlinger.mPatchCommandThread->createAudioPatch(*handle, newPatch);
         if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
             addSoftwarePatchToInsertedModules(insertedModule, *handle, &newPatch.mAudioPatch);
         }
@@ -800,7 +800,7 @@
 void AudioFlinger::PatchPanel::erasePatch(audio_patch_handle_t handle) {
     mPatches.erase(handle);
     removeSoftwarePatchFromInsertedModules(handle);
-    mAudioFlinger.mDeviceEffectManager.releaseAudioPatch(handle);
+    mAudioFlinger.mPatchCommandThread->releaseAudioPatch(handle);
 }
 
 /* List connected audio ports and they attributes */
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 33983d7..9560609 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -83,7 +83,8 @@
                                   * ready as possible (aka. Buffer is full). */
                                 size_t frameCountToBeReady = SIZE_MAX,
                                 float speed = 1.0f,
-                                bool isSpatialized = false);
+                                bool isSpatialized = false,
+                                bool isBitPerfect = false);
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -147,8 +148,12 @@
     sp<media::VolumeHandler>   getVolumeHandler() { return mVolumeHandler; }
     /** Set the computed normalized final volume of the track.
      * !masterMute * masterVolume * streamVolume * averageLRVolume */
-    void                setFinalVolume(float volume);
+    void                setFinalVolume(float volumeLeft, float volumeRight);
     float               getFinalVolume() const { return mFinalVolume; }
+    void                getFinalVolume(float* left, float* right) const {
+                            *left = mFinalVolumeLeft;
+                            *right = mFinalVolumeRight;
+    }
 
     using SourceMetadatas = std::vector<playback_track_metadata_v7_t>;
     using MetadataInserter = std::back_insert_iterator<SourceMetadatas>;
@@ -203,6 +208,13 @@
     audio_output_flags_t getOutputFlags() const { return mFlags; }
     float getSpeed() const { return mSpeed; }
     bool isSpatialized() const override { return mIsSpatialized; }
+    bool isBitPerfect() const override { return mIsBitPerfect; }
+
+    /**
+     * Updates the mute state and notifies the audio service. Call this only when holding player
+     * thread lock.
+     */
+    void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState);
 
 protected:
     // for numerous
@@ -347,6 +359,10 @@
                                         // 'volatile' means accessed without lock or
                                         // barrier, but is read/written atomically
     float               mFinalVolume; // combine master volume, stream type volume and track volume
+    float               mFinalVolumeLeft; // combine master volume, stream type volume and track
+                                          // volume
+    float               mFinalVolumeRight; // combine master volume, stream type volume and track
+                                           // volume
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
@@ -355,6 +371,12 @@
     TeePatches  mTeePatches;
     const float         mSpeed;
     const bool          mIsSpatialized;
+    const bool          mIsBitPerfect;
+
+    // TODO: replace PersistableBundle with own struct
+    // access these two variables only when holding player thread lock.
+    std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
+    mute_state_t        mMuteState;
 };  // end of Track
 
 
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/services/audioflinger/TEST_MAPPING
+++ b/services/audioflinger/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index cf2aaed..e6d38cc 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -17,7 +17,7 @@
 
 
 #define LOG_TAG "AudioFlinger"
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
 #include "Configuration.h"
@@ -31,6 +31,7 @@
 #include <sys/syscall.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
+#include <binder/PersistableBundle.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioParameter.h>
@@ -43,6 +44,7 @@
 #include <private/media/AudioTrackShared.h>
 #include <private/android_filesystem_config.h>
 #include <audio_utils/Balance.h>
+#include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
 #include <audio_utils/channels.h>
 #include <audio_utils/mono_blend.h>
@@ -534,6 +536,8 @@
         return "MMAP_CAPTURE";
     case SPATIALIZER:
         return "SPATIALIZER";
+    case BIT_PERFECT:
+        return "BIT_PERFECT";
     default:
         return "unknown";
     }
@@ -807,6 +811,7 @@
                                             (CreateAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
             const DeviceTypeSet newDevices = getDeviceTypes();
+            configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
                     dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -817,6 +822,7 @@
                                             (ReleaseAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = releaseAudioPatch_l(data->mHandle);
             const DeviceTypeSet newDevices = getDeviceTypes();
+            configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
                     dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -1515,6 +1521,26 @@
             }
         }
         break;
+    case BIT_PERFECT:
+        if ((desc->flags & EFFECT_FLAG_HW_ACC_TUNNEL) != 0) {
+            // Allow HW accelerated effects of tunnel type
+            break;
+        }
+        // As bit-perfect tracks will not be allowed to apply audio effect that will touch the audio
+        // data, effects will not be allowed on 1) global effects (AUDIO_SESSION_OUTPUT_MIX),
+        // 2) post-processing effects (AUDIO_SESSION_OUTPUT_STAGE or AUDIO_SESSION_DEVICE) and
+        // 3) there is any bit-perfect track with the given session id.
+        if (sessionId == AUDIO_SESSION_OUTPUT_MIX || sessionId == AUDIO_SESSION_OUTPUT_STAGE ||
+            sessionId == AUDIO_SESSION_DEVICE) {
+            ALOGW("%s: effect %s not supported on bit-perfect thread %s",
+                  __func__, desc->name, mThreadName);
+            return BAD_VALUE;
+        } else if ((hasAudioSession_l(sessionId) & ThreadBase::BIT_PERFECT_SESSION) != 0) {
+            ALOGW("%s: effect %s not supported as there is a bit-perfect track with session as %d",
+                  __func__, desc->name, sessionId);
+            return BAD_VALUE;
+        }
+        break;
     default:
         LOG_ALWAYS_FATAL("checkEffectCompatibility_l(): wrong thread type %d", mType);
     }
@@ -2063,7 +2089,8 @@
         mHwSupportsPause(false), mHwPaused(false), mFlushPending(false),
         mLeftVolFloat(-1.0), mRightVolFloat(-1.0),
         mDownStreamPatch{},
-        mIsTimestampAdvancing(kMinimumTimeBetweenTimestampChecksNs)
+        mIsTimestampAdvancing(kMinimumTimeBetweenTimestampChecksNs),
+        mBluetoothLatencyModesEnabled(true)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
     mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
@@ -2279,7 +2306,8 @@
         status_t *status,
         audio_port_handle_t portId,
         const sp<media::IAudioTrackCallback>& callback,
-        bool isSpatialized)
+        bool isSpatialized,
+        bool isBitPerfect)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2311,6 +2339,25 @@
         *flags = (audio_output_flags_t)(*flags & outputFlags);
     }
 
+    if (isBitPerfect) {
+        sp<EffectChain> chain = getEffectChain_l(sessionId);
+        if (chain.get() != nullptr) {
+            // Bit-perfect is required according to the configuration and preferred mixer
+            // attributes, but it is not in the output flag from the client's request. Explicitly
+            // adding bit-perfect flag to check the compatibility
+            audio_output_flags_t flagsToCheck =
+                    (audio_output_flags_t)(*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+            chain->checkOutputFlagCompatibility(&flagsToCheck);
+            if ((flagsToCheck & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE) {
+                ALOGE("%s cannot create track as there is data-processing effect attached to "
+                      "given session id(%d)", __func__, sessionId);
+                lStatus = BAD_VALUE;
+                goto Exit;
+            }
+            *flags = flagsToCheck;
+        }
+    }
+
     // client expresses a preference for FAST, but we get the final say
     if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
       if (
@@ -2491,6 +2538,18 @@
     *pNotificationFrameCount = notificationFrameCount;
 
     switch (mType) {
+    case BIT_PERFECT:
+        if (isBitPerfect) {
+            if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) {
+                ALOGE("%s, bad parameter when request streaming bit-perfect, sampleRate=%u, "
+                      "format=%#x, channelMask=%#x, mSampleRate=%u, mFormat=%#x, mChannelMask=%#x",
+                      __func__, sampleRate, format, channelMask, mSampleRate, mFormat,
+                      mChannelMask);
+                lStatus = BAD_VALUE;
+                goto Exit;
+            }
+        }
+        break;
 
     case DIRECT:
         if (audio_is_linear_pcm(format)) { // TODO maybe use audio_has_proportional_frames()?
@@ -2572,7 +2631,7 @@
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
                           sessionId, creatorPid, attributionSource, trackFlags,
                           TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
-                          speed, isSpatialized);
+                          speed, isSpatialized, isBitPerfect);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2728,7 +2787,11 @@
             }
             // abort if start is rejected by audio policy manager
             if (status != NO_ERROR) {
-                return PERMISSION_DENIED;
+                // Do not replace the error if it is DEAD_OBJECT. When this happens, it indicates
+                // current playback thread is reopened, which may happen when clients set preferred
+                // mixer configuration. Returning DEAD_OBJECT will make the client restore track
+                // immediately.
+                return status == DEAD_OBJECT ? status : PERMISSION_DENIED;
             }
 #ifdef ADD_BATTERY_DATA
             // to track the speaker usage
@@ -2758,7 +2821,7 @@
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mLock.unlock();
-            const int intensity = AudioFlinger::onExternalVibrationStart(
+            const os::HapticScale intensity = AudioFlinger::onExternalVibrationStart(
                     track->getExternalVibration());
             std::optional<media::AudioVibratorInfo> vibratorInfo;
             {
@@ -2768,7 +2831,7 @@
                 vibratorInfo = std::move(mAudioFlinger->getDefaultVibratorInfo_l());
             }
             mLock.lock();
-            track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
+            track->setHapticIntensity(intensity);
             if (vibratorInfo) {
                 track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
             }
@@ -3349,8 +3412,16 @@
         }
         ssize_t framesWritten = mNormalSink->write((char *)mSinkBuffer + offset, count);
         ATRACE_END();
+
         if (framesWritten > 0) {
             bytesWritten = framesWritten * mFrameSize;
+
+            // Send to MelProcessor for sound dose measurement.
+            auto processor = mMelProcessor.load();
+            if (processor) {
+                processor->process((char *)mSinkBuffer + offset, bytesWritten);
+            }
+
 #ifdef TEE_SINK
             mTee.write((char *)mSinkBuffer + offset, framesWritten);
 #endif
@@ -3393,6 +3464,18 @@
     return bytesWritten;
 }
 
+void AudioFlinger::PlaybackThread::startMelComputation(
+        const sp<audio_utils::MelProcessor>& processor)
+{
+    ALOGV("%s: starting mel processor for thread %d", __func__, id());
+    mMelProcessor = processor;
+}
+
+void AudioFlinger::PlaybackThread::stopMelComputation() {
+    ALOGV("%s: stopping mel processor for thread %d", __func__, id());
+    mMelProcessor = nullptr;
+}
+
 void AudioFlinger::PlaybackThread::threadLoop_drain()
 {
     bool supportsDrain = false;
@@ -3451,9 +3534,15 @@
     mActiveSleepTimeUs = activeSleepTimeUs();
     mIdleSleepTimeUs = idleSleepTimeUs();
 
+    mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
+    // Shorten standby delay on VOIP RX output to avoid delayed routing updates
+    // after a call due to call end tone.
+    if (mOutput != nullptr && (mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
+        const nsecs_t NS_PER_MS = 1000000;
+        mStandbyDelayNs = std::min(mStandbyDelayNs, latency_l() * NS_PER_MS);
+    }
     // make sure standby delay is not too short when connected to an A2DP sink to avoid
     // truncating audio when going to standby.
-    mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
     if (!Intersection(outDeviceTypes(),  getAudioDeviceOutAllA2dpSet()).empty()) {
         if (mStandbyDelayNs < kDefaultStandbyTimeInNsecs) {
             mStandbyDelayNs = kDefaultStandbyTimeInNsecs;
@@ -3483,6 +3572,28 @@
     invalidateTracks_l(streamType);
 }
 
+void AudioFlinger::PlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
+    Mutex::Autolock _l(mLock);
+    invalidateTracks_l(portIds);
+}
+
+bool AudioFlinger::PlaybackThread::invalidateTracks_l(std::set<audio_port_handle_t>& portIds) {
+    bool trackMatch = false;
+    const size_t size = mTracks.size();
+    for (size_t i = 0; i < size; i++) {
+        sp<Track> t = mTracks[i];
+        if (t->isExternalTrack() && portIds.find(t->portId()) != portIds.end()) {
+            t->invalidate();
+            portIds.erase(t->portId());
+            trackMatch = true;
+        }
+        if (portIds.empty()) {
+            break;
+        }
+    }
+    return trackMatch;
+}
+
 // getTrackById_l must be called with holding thread lock
 AudioFlinger::PlaybackThread::Track* AudioFlinger::PlaybackThread::getTrackById_l(
         audio_port_handle_t trackPortId) {
@@ -3986,7 +4097,8 @@
             // Either threadLoop_mix() or threadLoop_sleepTime() should have set
             // mMixerBuffer with data if mMixerBufferValid is true and mSleepTimeUs == 0.
             // Merge mMixerBuffer data into mEffectBuffer (if any effects are valid)
-            // or mSinkBuffer (if there are no effects).
+            // or mSinkBuffer (if there are no effects and there is no data already copied to
+            // mSinkBuffer).
             //
             // This is done pre-effects computation; if effects change to
             // support higher precision, this needs to move.
@@ -3995,7 +4107,7 @@
             // TODO use mSleepTimeUs == 0 as an additional condition.
             uint32_t mixerChannelCount = mEffectBufferValid ?
                         audio_channel_count_from_out_mask(mMixerChannelMask) : mChannelCount;
-            if (mMixerBufferValid) {
+            if (mMixerBufferValid && (mEffectBufferValid || !mHasDataCopiedToSinkBuffer)) {
                 void *buffer = mEffectBufferValid ? mEffectBuffer : mSinkBuffer;
                 audio_format_t format = mEffectBufferValid ? mEffectBufferFormat : mFormat;
 
@@ -4086,7 +4198,7 @@
         // Effects buffer (buffer valid), we need to
         // copy into the sink buffer.
         // TODO use mSleepTimeUs == 0 as an additional condition.
-        if (mEffectBufferValid) {
+        if (mEffectBufferValid && !mHasDataCopiedToSinkBuffer) {
             //ALOGV("writing effect buffer to sink buffer format %#x", mFormat);
             void *effectBuffer = (mType == SPATIALIZER) ? mPostSpatializerBuffer : mEffectBuffer;
             if (requireMonoBlend()) {
@@ -4513,7 +4625,7 @@
             // When the track is stop, set the haptic intensity as MUTE
             // for the HapticGenerator effect.
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+                chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
             }
         }
     }
@@ -4662,6 +4774,9 @@
     if (configChanged) {
         sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -4692,6 +4807,9 @@
     } else {
         status = mOutput->stream->legacyReleaseAudioPatch();
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -4761,7 +4879,7 @@
 
     // initialize fast mixer depending on configuration
     bool initFastMixer;
-    if (mType == SPATIALIZER) {
+    if (mType == SPATIALIZER || mType == BIT_PERFECT) {
         initFastMixer = false;
     } else {
         switch (kUseFastMixer) {
@@ -5404,10 +5522,21 @@
                 volume *= vh;
                 track->mCachedVolume = volume;
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
-                float vlf = volume * float_from_gain(gain_minifloat_unpack_left(vlr));
-                float vrf = volume * float_from_gain(gain_minifloat_unpack_right(vlr));
+                float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
+                float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
 
-                track->setFinalVolume((vlf + vrf) / 2.f);
+                track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+                    /*muteState=*/{masterVolume == 0.f,
+                                   mStreamTypes[track->streamType()].volume == 0.f,
+                                   mStreamTypes[track->streamType()].mute,
+                                   track->isPlaybackRestricted(),
+                                   vlf == 0.f && vrf == 0.f,
+                                   vh == 0.f});
+
+                vlf *= volume;
+                vrf *= volume;
+
+                track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
                 // was it previously active?
@@ -5578,6 +5707,15 @@
                     ALOGV("Track right volume out of range: %.3g", vrf);
                     vrf = GAIN_FLOAT_UNITY;
                 }
+
+                track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+                    /*muteState=*/{masterVolume == 0.f,
+                                   mStreamTypes[track->streamType()].volume == 0.f,
+                                   mStreamTypes[track->streamType()].mute,
+                                   track->isPlaybackRestricted(),
+                                   vlf == 0.f && vrf == 0.f,
+                                   vh == 0.f});
+
                 // now apply the master volume and stream type volume and shaper volume
                 vlf *= v * vh;
                 vrf *= v * vh;
@@ -5597,7 +5735,7 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
-            track->setFinalVolume((vrf + vlf) / 2.f);
+            track->setFinalVolume(vrf, vlf);
 
             // Delegate volume control to effect in track effect chain if needed
             if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
@@ -6168,31 +6306,53 @@
 
     // Ensure volumeshaper state always advances even when muted.
     const sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
-    const auto [shaperVolume, shaperActive] = track->getVolumeHandler()->getVolume(
-            proxy->framesReleased());
+
+    const size_t framesReleased = proxy->framesReleased();
+    const int64_t frames = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+    const int64_t time = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+    ALOGV("%s: Direct/Offload bufferConsumed:%zu  timestamp frames:%lld  time:%lld",
+            __func__, framesReleased, (long long)frames, (long long)time);
+
+    const int64_t volumeShaperFrames =
+            mMonotonicFrameCounter.updateAndGetMonotonicFrameCount(frames, time);
+    const auto [shaperVolume, shaperActive] =
+            track->getVolumeHandler()->getVolume(volumeShaperFrames);
     mVolumeShaperActive = shaperActive;
 
+    gain_minifloat_packed_t vlr = proxy->getVolumeLR();
+    left = float_from_gain(gain_minifloat_unpack_left(vlr));
+    right = float_from_gain(gain_minifloat_unpack_right(vlr));
+
+    const bool clientVolumeMute = (left == 0.f && right == 0.f);
+
     if (mMasterMute || mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
         left = right = 0;
     } else {
         float typeVolume = mStreamTypes[track->streamType()].volume;
         const float v = mMasterVolume * typeVolume * shaperVolume;
 
-        gain_minifloat_packed_t vlr = proxy->getVolumeLR();
-        left = float_from_gain(gain_minifloat_unpack_left(vlr));
         if (left > GAIN_FLOAT_UNITY) {
             left = GAIN_FLOAT_UNITY;
         }
-        left *= v * mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
-        right = float_from_gain(gain_minifloat_unpack_right(vlr));
         if (right > GAIN_FLOAT_UNITY) {
             right = GAIN_FLOAT_UNITY;
         }
+
+        left *= v * mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
         right *= v * mMasterBalanceRight;
     }
 
+    track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+        /*muteState=*/{mMasterMute,
+                       mStreamTypes[track->streamType()].volume == 0.f,
+                       mStreamTypes[track->streamType()].mute,
+                       track->isPlaybackRestricted(),
+                       clientVolumeMute,
+                       shaperVolume == 0.f});
+
     if (lastTrack) {
-        track->setFinalVolume((left + right) / 2.f);
+        track->setFinalVolume(left, right);
         if (left != mLeftVolFloat || right != mRightVolFloat) {
             mLeftVolFloat = left;
             mRightVolFloat = right;
@@ -6639,6 +6799,7 @@
     mFlushPending = false;
     mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
+    mMonotonicFrameCounter.onFlush();
 }
 
 int64_t AudioFlinger::DirectOutputThread::computeWaitTimeNs_l() const {
@@ -7080,6 +7241,13 @@
     }
 }
 
+void AudioFlinger::OffloadThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
+    Mutex::Autolock _l(mLock);
+    if (PlaybackThread::invalidateTracks_l(portIds)) {
+        mFlushPending = true;
+    }
+}
+
 // ----------------------------------------------------------------------------
 
 AudioFlinger::DuplicatingThread::DuplicatingThread(const sp<AudioFlinger>& audioFlinger,
@@ -7434,6 +7602,15 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::PlaybackThread::setBluetoothLatencyModesEnabled(bool enabled) {
+    if (mOutput == nullptr || mOutput->audioHwDev == nullptr
+            || !mOutput->audioHwDev->supportsBluetoothLatencyModes()) {
+        return INVALID_OPERATION;
+    }
+    mBluetoothLatencyModesEnabled.store(enabled);
+    return NO_ERROR;
+}
+
 void AudioFlinger::SpatializerThread::checkOutputStageEffects()
 {
     bool hasVirtualizer = false;
@@ -9331,6 +9508,9 @@
         track->logEndInterval();
         track->logBeginInterval(pathSourcesAsString);
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -9347,6 +9527,9 @@
     } else {
         status = mInput->stream->legacyReleaseAudioPatch();
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -9643,8 +9826,10 @@
     return mHalStream->getMmapPosition(position);
 }
 
-status_t AudioFlinger::MmapThread::exitStandby()
+status_t AudioFlinger::MmapThread::exitStandby_l()
 {
+    // The HAL must receive track metadata before starting the stream
+    updateMetadata_l();
     status_t ret = mHalStream->start();
     if (ret != NO_ERROR) {
         ALOGE("%s: error mHalStream->start() = %d for first track", __FUNCTION__, ret);
@@ -9670,13 +9855,10 @@
 
     status_t ret;
 
+    // For the first track, reuse portId and session allocated when the stream was opened.
     if (*handle == mPortId) {
-        // For the first track, reuse portId and session allocated when the stream was opened.
-        ret = exitStandby();
-        if (ret == NO_ERROR) {
-            acquireWakeLock();
-        }
-        return ret;
+        acquireWakeLock();
+        return NO_ERROR;
     }
 
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
@@ -9693,6 +9875,7 @@
         audio_port_handle_t deviceId = mDeviceId;
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
+        bool isBitPerfect;
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
@@ -9702,7 +9885,8 @@
                                             &deviceId,
                                             &portId,
                                             &secondaryOutputs,
-                                            &isSpatialized);
+                                            &isSpatialized,
+                                            &isBitPerfect);
         ALOGD_IF(!secondaryOutputs.empty(),
                  "MmapThread::start does not support secondary outputs, ignoring them");
     } else {
@@ -9778,7 +9962,6 @@
         }
     }
 
-
     mActiveTracks.add(track);
     sp<EffectChain> chain = getEffectChain_l(mSessionId);
     if (chain != 0) {
@@ -9789,11 +9972,16 @@
 
     track->logBeginInterval(patchSinksToString(&mPatch)); // log to MediaMetrics
     *handle = portId;
+
+    if (mActiveTracks.size() == 1) {
+        ret = exitStandby_l();
+    }
+
     broadcast_l();
 
-    ALOGV("%s DONE handle %d stream %p", __FUNCTION__, *handle, mHalStream.get());
+    ALOGV("%s DONE status %d handle %d stream %p", __FUNCTION__, ret, *handle, mHalStream.get());
 
-    return NO_ERROR;
+    return ret;
 }
 
 status_t AudioFlinger::MmapThread::stop(audio_port_handle_t handle)
@@ -9805,7 +9993,6 @@
     }
 
     if (handle == mPortId) {
-        mHalStream->stop();
         releaseWakeLock();
         return NO_ERROR;
     }
@@ -9842,6 +10029,10 @@
         chain->decTrackCnt();
     }
 
+    if (mActiveTracks.isEmpty()) {
+        mHalStream->stop();
+    }
+
     broadcast_l();
 
     return NO_ERROR;
@@ -10112,6 +10303,9 @@
         mPatch = *patch;
         mDeviceId = deviceId;
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -10131,6 +10325,9 @@
     } else {
         status = mHalStream->legacyReleaseAudioPatch();
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -10404,6 +10601,25 @@
     }
 }
 
+void AudioFlinger::MmapPlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds)
+{
+    Mutex::Autolock _l(mLock);
+    bool trackMatch = false;
+    for (const sp<MmapTrack> &track : mActiveTracks) {
+        if (portIds.find(track->portId()) != portIds.end()) {
+            track->invalidate();
+            trackMatch = true;
+            portIds.erase(track->portId());
+        }
+        if (portIds.empty()) {
+            break;
+        }
+    }
+    if (trackMatch) {
+        broadcast_l();
+    }
+}
+
 void AudioFlinger::MmapPlaybackThread::processVolume_l()
 {
     float volume;
@@ -10433,20 +10649,10 @@
         } else {
             sp<MmapStreamCallback> callback = mCallback.promote();
             if (callback != 0) {
-                int channelCount;
-                if (isOutput()) {
-                    channelCount = audio_channel_count_from_out_mask(mChannelMask);
-                } else {
-                    channelCount = audio_channel_count_from_in_mask(mChannelMask);
-                }
-                Vector<float> values;
-                for (int i = 0; i < channelCount; i++) {
-                    values.add(volume);
-                }
                 mHalVolFloat = volume; // SW volume control worked, so update value.
                 mNoCallbackWarningCount = 0;
                 mLock.unlock();
-                callback->onVolumeChanged(mChannelMask, values);
+                callback->onVolumeChanged(volume);
                 mLock.lock();
             } else {
                 if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
@@ -10457,6 +10663,14 @@
         }
         for (const sp<MmapTrack> &track : mActiveTracks) {
             track->setMetadataHasChanged();
+            track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+                /*muteState=*/{mMasterMute,
+                               mStreamVolume == 0.f,
+                               mStreamMute,
+                               // TODO(b/241533526): adjust logic to include mute from AppOps
+                               false /*muteFromPlaybackRestricted*/,
+                               false /*muteFromClientVolume*/,
+                               false /*muteFromVolumeShaper*/});
         }
     }
 }
@@ -10541,16 +10755,15 @@
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
 }
 
-status_t AudioFlinger::MmapCaptureThread::exitStandby()
+status_t AudioFlinger::MmapCaptureThread::exitStandby_l()
 {
     {
         // mInput might have been cleared by clearInput()
-        Mutex::Autolock _l(mLock);
         if (mInput != nullptr && mInput->stream != nullptr) {
             mInput->stream->setGain(1.0f);
         }
     }
-    return MmapThread::exitStandby();
+    return MmapThread::exitStandby_l();
 }
 
 AudioFlinger::AudioStreamIn* AudioFlinger::MmapCaptureThread::clearInput()
@@ -10639,4 +10852,48 @@
     return mInput->getCapturePosition((int64_t*)position, timeNanos);
 }
 
+// ----------------------------------------------------------------------------
+
+AudioFlinger::BitPerfectThread::BitPerfectThread(const sp<AudioFlinger> &audioflinger,
+        AudioStreamOut *output, audio_io_handle_t id, bool systemReady)
+        : MixerThread(audioflinger, output, id, systemReady, BIT_PERFECT) {}
+
+AudioFlinger::PlaybackThread::mixer_state AudioFlinger::BitPerfectThread::prepareTracks_l(
+        Vector<sp<Track>> *tracksToRemove) {
+    mixer_state result = MixerThread::prepareTracks_l(tracksToRemove);
+    // If there is only one active track and it is bit-perfect, enable tee buffer.
+    float volumeLeft = 1.0f;
+    float volumeRight = 1.0f;
+    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+        const int trackId = mActiveTracks[0]->id();
+        mAudioMixer->setParameter(
+                    trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
+        mAudioMixer->setParameter(
+                    trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
+                    (void *)(uintptr_t)mNormalFrameCount);
+        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        mIsBitPerfect = true;
+    } else {
+        mIsBitPerfect = false;
+        // No need to copy bit-perfect data directly to sink buffer given there are multiple tracks
+        // active.
+        for (const auto& track : mActiveTracks) {
+            const int trackId = track->id();
+            mAudioMixer->setParameter(
+                        trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, nullptr);
+        }
+    }
+    if (mVolumeLeft != volumeLeft || mVolumeRight != volumeRight) {
+        mVolumeLeft = volumeLeft;
+        mVolumeRight = volumeRight;
+        setVolumeForOutput_l(volumeLeft, volumeRight);
+    }
+    return result;
+}
+
+void AudioFlinger::BitPerfectThread::threadLoop_mix() {
+    MixerThread::threadLoop_mix();
+    mHasDataCopiedToSinkBuffer = mIsBitPerfect;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index c616de9..247c609 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -33,6 +33,7 @@
         MMAP_PLAYBACK,      // Thread class for MMAP playback stream
         MMAP_CAPTURE,       // Thread class for MMAP capture stream
         SPATIALIZER,  //
+        BIT_PERFECT,        // Thread class for BitPerfectThread
         // If you add any values here, also update ThreadBase::threadTypeToString()
     };
 
@@ -430,8 +431,10 @@
                                             // track
                     FAST_SESSION = 0x4,     // the audio session corresponds to at least one
                                             // fast track
-                    SPATIALIZED_SESSION = 0x8 // the audio session corresponds to at least one
-                                              // spatialized track
+                    SPATIALIZED_SESSION = 0x8, // the audio session corresponds to at least one
+                                               // spatialized track
+                    BIT_PERFECT_SESSION = 0x10 // the audio session corresponds to at least one
+                                               // bit-perfect track
                 };
 
                 // get effect chain corresponding to session Id.
@@ -497,6 +500,9 @@
                             if (track->isSpatialized()) {
                                 result |= SPATIALIZED_SESSION;  // caution, only first track.
                             }
+                            if (track->isBitPerfect()) {
+                                result |= BIT_PERFECT_SESSION;
+                            }
                             break;
                         }
                     }
@@ -624,7 +630,7 @@
     virtual     void        dumpTracks_l(int fd __unused, const Vector<String16>& args __unused) { }
 
 
-    friend class AudioFlinger;      // for mEffectChains
+    friend class AudioFlinger;      // for mEffectChains and mAudioManager
 
                 const type_t            mType;
 
@@ -797,6 +803,11 @@
                      */
                     bool            readAndClearHasChanged();
 
+                    /** Force updating track metadata to audio HAL stream next time
+                     * readAndClearHasChanged() is called.
+                     */
+                    void            setHasChanged() { mHasChanged = true; }
+
                 private:
                     void            logTrack(const char *funcName, const sp<T> &track) const;
 
@@ -977,7 +988,8 @@
                                 status_t *status /*non-NULL*/,
                                 audio_port_handle_t portId,
                                 const sp<media::IAudioTrackCallback>& callback,
-                                bool isSpatialized);
+                                bool isSpatialized,
+                                bool isBitPerfect);
 
                 AudioStreamOut* getOutput() const;
                 AudioStreamOut* clearOutput();
@@ -1024,7 +1036,11 @@
 
                 // called with AudioFlinger lock held
                         bool     invalidateTracks_l(audio_stream_type_t streamType);
+                        bool     invalidateTracks_l(std::set<audio_port_handle_t>& portIds);
                 virtual void     invalidateTracks(audio_stream_type_t streamType);
+                // Invalidate tracks by a set of port ids. The port id will be removed from
+                // the given set if the corresponding track is found and invalidated.
+                virtual void     invalidateTracks(std::set<audio_port_handle_t>& portIds);
 
     virtual     size_t      frameCount() const { return mNormalFrameCount; }
 
@@ -1086,6 +1102,11 @@
                     return INVALID_OPERATION;
                 }
 
+    virtual     status_t setBluetoothLatencyModesEnabled(bool enabled);
+
+                void startMelComputation(const sp<audio_utils::MelProcessor>& processor);
+                void stopMelComputation();
+
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1155,6 +1176,9 @@
     // for any processing (including output processing).
     bool                            mEffectBufferValid;
 
+    // Set to "true" to enable when data has already copied to sink
+    bool                            mHasDataCopiedToSinkBuffer = false;
+
     // Frame size aligned buffer used as input and output to all post processing effects
     // except the Spatializer in a SPATIALIZER thread. Non spatialized tracks are mixed into
     // this buffer so that post processing effects can be applied.
@@ -1185,6 +1209,8 @@
     audio_channel_mask_t            mMixerChannelMask = AUDIO_CHANNEL_NONE;
 
 private:
+    mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
+
     // mMasterMute is in both PlaybackThread and in AudioFlinger.  When a
     // PlaybackThread needs to find out if master-muted, it checks it's local
     // copy rather than the one in AudioFlinger.  This optimization saves a lock.
@@ -1436,6 +1462,9 @@
     virtual     void flushHw_l() {
                     mIsTimestampAdvancing.clear();
                 }
+
+        // Bluetooth Variable latency control logic is enabled or disabled for this thread
+        std::atomic_bool mBluetoothLatencyModesEnabled;
 };
 
 class MixerThread : public PlaybackThread {
@@ -1576,6 +1605,8 @@
     virtual     void        onAddNewTrack_l();
 
     const       audio_offload_info_t mOffloadInfo;
+
+    audioflinger::MonotonicFrameCounter mMonotonicFrameCounter;  // for VolumeShaper
     bool mVolumeShaperActive = false;
 
     DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
@@ -1633,6 +1664,7 @@
     virtual     bool        waitingAsyncCallback();
     virtual     bool        waitingAsyncCallback_l();
     virtual     void        invalidateTracks(audio_stream_type_t streamType);
+                void        invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
 
     virtual     bool        keepWakeLock() const { return (mKeepWakeLock || (mDrainSequence & 1)); }
 
@@ -2093,7 +2125,7 @@
     virtual     void        threadLoop_exit();
     virtual     void        threadLoop_standby();
     virtual     bool        shouldStandby_l() { return false; }
-    virtual     status_t    exitStandby();
+    virtual     status_t    exitStandby_l() REQUIRES(mLock);
 
     virtual     status_t    initCheck() const { return (mHalStream == 0) ? NO_INIT : NO_ERROR; }
     virtual     size_t      frameCount() const { return mFrameCount; }
@@ -2129,6 +2161,7 @@
     virtual     audio_stream_type_t streamType() { return AUDIO_STREAM_DEFAULT; }
 
     virtual     void        invalidateTracks(audio_stream_type_t streamType __unused) {}
+    virtual     void        invalidateTracks(std::set<audio_port_handle_t>& portIds __unused) {}
 
                 // Sets the UID records silence
     virtual     void        setRecordSilenced(audio_port_handle_t portId __unused,
@@ -2208,6 +2241,7 @@
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
 
     virtual     void        invalidateTracks(audio_stream_type_t streamType);
+                void        invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
 
     virtual     audio_stream_type_t streamType() { return mStreamType; }
     virtual     void        checkSilentMode_l();
@@ -2244,7 +2278,7 @@
 
                 AudioStreamIn* clearInput();
 
-                status_t       exitStandby() override;
+                status_t       exitStandby_l() REQUIRES(mLock) override;
 
                 void           updateMetadata_l() override;
                 void           processVolume_l() override;
@@ -2263,3 +2297,18 @@
 
                 AudioStreamIn*  mInput;
 };
+
+class BitPerfectThread : public MixerThread {
+public:
+    BitPerfectThread(const sp<AudioFlinger>& audioflinger, AudioStreamOut *output,
+                     audio_io_handle_t id, bool systemReady);
+
+protected:
+    mixer_state prepareTracks_l(Vector<sp<Track>> *tracksToRemove) override;
+    void threadLoop_mix() override;
+
+private:
+    bool mIsBitPerfect;
+    float mVolumeLeft = 0.f;
+    float mVolumeRight = 0.f;
+};
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 20bfbb0..f305aa8 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -109,6 +109,8 @@
 
     virtual bool        isSpatialized() const { return false; }
 
+    virtual bool        isBitPerfect() const { return false; }
+
 #ifdef TEE_SINK
            void         dumpTee(int fd, const std::string &reason) const {
                                 mTee.dump(fd, reason);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 2d5fd78..ac863b5 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -162,11 +162,12 @@
     }
 
     if (client != 0) {
-        mCblkMemory = client->heap()->allocate(size);
+        mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{{size},
+                std::string("Track ID: ").append(std::to_string(mId))});
         if (mCblkMemory == 0 ||
                 (mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
             ALOGE("%s(%d): not enough memory for AudioTrack size=%zu", __func__, mId, size);
-            client->heap()->dump("AudioTrack");
+            ALOGE("%s", client->allocator().dump().c_str());
             mCblkMemory.clear();
             return;
         }
@@ -632,7 +633,8 @@
             audio_port_handle_t portId,
             size_t frameCountToBeReady,
             float speed,
-            bool isSpatialized)
+            bool isSpatialized,
+            bool isBitPerfect)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -667,7 +669,8 @@
     mFlushHwPending(false),
     mFlags(flags),
     mSpeed(speed),
-    mIsSpatialized(isSpatialized)
+    mIsSpatialized(isSpatialized),
+    mIsBitPerfect(isBitPerfect)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -1115,10 +1118,10 @@
             mObservedUnderruns = playbackThread->getFastTrackUnderruns(mFastIndex);
         }
         status = playbackThread->addTrack_l(this);
-        if (status == INVALID_OPERATION || status == PERMISSION_DENIED) {
+        if (status == INVALID_OPERATION || status == PERMISSION_DENIED || status == DEAD_OBJECT) {
             triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
             //  restore previous state if start was rejected by policy manager
-            if (status == PERMISSION_DENIED) {
+            if (status == PERMISSION_DENIED || status == DEAD_OBJECT) {
                 mState = state;
             }
         }
@@ -1359,25 +1362,7 @@
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
 {
-    sp<VolumeShaper::Configuration> newConfiguration;
-
-    if (isOffloadedOrDirect()) {
-        const VolumeShaper::Configuration::OptionFlag optionFlag
-            = configuration->getOptionFlags();
-        if ((optionFlag & VolumeShaper::Configuration::OPTION_FLAG_CLOCK_TIME) == 0) {
-            ALOGW("%s(%d): %s tracks do not support frame counted VolumeShaper,"
-                    " using clock time instead",
-                    __func__, mId,
-                    isOffloaded() ? "Offload" : "Direct");
-            newConfiguration = new VolumeShaper::Configuration(*configuration);
-            newConfiguration->setOptionFlags(
-                VolumeShaper::Configuration::OptionFlag(optionFlag
-                        | VolumeShaper::Configuration::OPTION_FLAG_CLOCK_TIME));
-        }
-    }
-
-    VolumeShaper::Status status = mVolumeHandler->applyVolumeShaper(
-            (newConfiguration.get() != nullptr ? newConfiguration : configuration), operation);
+    VolumeShaper::Status status = mVolumeHandler->applyVolumeShaper(configuration, operation);
 
     if (isOffloadedOrDirect()) {
         // Signal thread to fetch new volume.
@@ -1398,8 +1383,11 @@
     return mVolumeHandler->getVolumeShaperState(id);
 }
 
-void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volume)
+void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volumeLeft, float volumeRight)
 {
+    mFinalVolumeLeft = volumeLeft;
+    mFinalVolumeRight = volumeRight;
+    const float volume = (volumeLeft + volumeRight) * 0.5f;
     if (mFinalVolume != volume) { // Compare to an epsilon if too many meaningless updates
         mFinalVolume = volume;
         setMetadataHasChanged();
@@ -1492,6 +1480,39 @@
     }
 }
 
+// must be called with player thread lock held
+void AudioFlinger::PlaybackThread::Track::processMuteEvent_l(const sp<
+    IAudioManager>& audioManager, mute_state_t muteState)
+{
+    if (mMuteState == muteState) {
+        // mute state did not change, do nothing
+        return;
+    }
+
+    status_t result = UNKNOWN_ERROR;
+    if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+        if (mMuteEventExtras == nullptr) {
+            mMuteEventExtras = std::make_unique<os::PersistableBundle>();
+        }
+        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
+                                 static_cast<int>(muteState));
+
+        result = audioManager->portEvent(mPortId,
+                                         PLAYER_UPDATE_MUTED,
+                                         mMuteEventExtras);
+    }
+
+    if (result == OK) {
+        mMuteState = muteState;
+    } else {
+        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
+              __func__,
+              id(),
+              mPortId,
+              result);
+    }
+}
+
 status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& timestamp)
 {
     if (!isOffloaded() && !isDirect()) {
@@ -1924,6 +1945,8 @@
         PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
         if ((mTrack->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
                 && playbackThread->mHapticChannelCount > 0) {
+            ALOGD("%s, haptic playback was %s for track %d",
+                    __func__, muted ? "muted" : "unmuted", mTrack->id());
             mTrack->setHapticPlaybackEnabled(!muted);
             return true;
         }
@@ -3116,6 +3139,38 @@
 {
 }
 
+void AudioFlinger::MmapThread::MmapTrack::processMuteEvent_l(const sp<
+    IAudioManager>& audioManager, mute_state_t muteState)
+{
+    if (mMuteState == muteState) {
+        // mute state did not change, do nothing
+        return;
+    }
+
+    status_t result = UNKNOWN_ERROR;
+    if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+        if (mMuteEventExtras == nullptr) {
+            mMuteEventExtras = std::make_unique<os::PersistableBundle>();
+        }
+        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
+                                 static_cast<int>(muteState));
+
+        result = audioManager->portEvent(mPortId,
+                                         PLAYER_UPDATE_MUTED,
+                                         mMuteEventExtras);
+    }
+
+    if (result == OK) {
+        mMuteState = muteState;
+    } else {
+        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
+              __func__,
+              id(),
+              mPortId,
+              result);
+    }
+}
+
 void AudioFlinger::MmapThread::MmapTrack::appendDumpHeader(String8& result)
 {
     result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s\n",
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
new file mode 100644
index 0000000..0e409d3
--- /dev/null
+++ b/services/audioflinger/sounddose/Android.bp
@@ -0,0 +1,43 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_library {
+    name: "libsounddose",
+
+    double_loadable: true,
+
+    srcs: [
+        "SoundDoseManager.cpp",
+    ],
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "libaudioutils",
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+
+    header_libs: [
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_library_headers {
+    name: "libsounddose_headers",
+    host_supported: true,
+    device_supported: true,
+    export_include_dirs: ["."],
+}
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
new file mode 100644
index 0000000..46f310c
--- /dev/null
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -0,0 +1,236 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoundDoseManager"
+
+#include "SoundDoseManager.h"
+
+#include <android-base/stringprintf.h>
+#include <time.h>
+#include <utils/Log.h>
+#include <cinttypes>
+#include "android/media/SoundDoseRecord.h"
+
+namespace android {
+
+namespace {
+
+int64_t getMonotonicSecond() {
+    struct timespec now_ts;
+    if (clock_gettime(CLOCK_MONOTONIC, &now_ts) != 0) {
+        ALOGE("%s: cannot get timestamp", __func__);
+        return -1;
+    }
+    return now_ts.tv_sec;
+}
+
+}  // namespace
+
+sp<audio_utils::MelProcessor> SoundDoseManager::getOrCreateProcessorForDevice(
+        audio_port_handle_t deviceId, audio_io_handle_t streamHandle, uint32_t sampleRate,
+        size_t channelCount, audio_format_t format) {
+    std::lock_guard _l(mLock);
+
+    auto streamProcessor = mActiveProcessors.find(streamHandle);
+    sp<audio_utils::MelProcessor> processor;
+    if (streamProcessor != mActiveProcessors.end() &&
+            (processor = streamProcessor->second.promote())) {
+        ALOGV("%s: found callback for stream %d", __func__, streamHandle);
+        processor->setDeviceId(deviceId);
+        processor->setOutputRs2(mRs2Value);
+        return processor;
+    } else {
+        ALOGV("%s: creating new callback for device %d", __func__, streamHandle);
+        sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
+                sampleRate, channelCount, format, *this, deviceId, mRs2Value);
+        mActiveProcessors[streamHandle] = melProcessor;
+        return melProcessor;
+    }
+}
+
+void SoundDoseManager::setOutputRs2(float rs2Value) {
+    ALOGV("%s", __func__);
+    std::lock_guard _l(mLock);
+
+    mRs2Value = rs2Value;
+
+    for (auto& streamProcessor : mActiveProcessors) {
+        sp<audio_utils::MelProcessor> processor = streamProcessor.second.promote();
+        if (processor != nullptr) {
+            status_t result = processor->setOutputRs2(mRs2Value);
+            if (result != NO_ERROR) {
+                ALOGW("%s: could not set RS2 value %f for stream %d", __func__, mRs2Value,
+                      streamProcessor.first);
+            }
+        }
+    }
+}
+
+void SoundDoseManager::removeStreamProcessor(audio_io_handle_t streamHandle) {
+    std::lock_guard _l(mLock);
+    auto callbackToRemove = mActiveProcessors.find(streamHandle);
+    if (callbackToRemove != mActiveProcessors.end()) {
+        mActiveProcessors.erase(callbackToRemove);
+    }
+}
+
+void SoundDoseManager::SoundDose::binderDied(__unused const wp<IBinder>& who) {
+    ALOGV("%s", __func__);
+
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->resetSoundDose();
+    }
+}
+
+binder::Status SoundDoseManager::SoundDose::setOutputRs2(float value) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setOutputRs2(value);
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::resetCsd(
+        float currentCsd, const std::vector<media::SoundDoseRecord>& records) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->resetCsd(currentCsd, records);
+    }
+    return binder::Status::ok();
+}
+
+void SoundDoseManager::resetSoundDose() {
+    std::lock_guard lock(mLock);
+    mSoundDose = nullptr;
+}
+
+void SoundDoseManager::resetCsd(float currentCsd,
+                                const std::vector<media::SoundDoseRecord>& records) {
+    std::lock_guard lock(mLock);
+    std::vector<audio_utils::CsdRecord> resetRecords;
+    for (const auto& record : records) {
+        resetRecords.emplace_back(record.timestamp, record.duration, record.value,
+                                  record.averageMel);
+    }
+
+    mMelAggregator->reset(currentCsd, resetRecords);
+}
+
+void SoundDoseManager::onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
+                                      audio_port_handle_t deviceId) const {
+    ALOGV("%s", __func__);
+
+    sp<media::ISoundDoseCallback> soundDoseCallback;
+    std::vector<audio_utils::CsdRecord> records;
+    float currentCsd;
+    {
+        std::lock_guard _l(mLock);
+
+        int64_t timestampSec = getMonotonicSecond();
+
+        // only for internal callbacks
+        records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
+                deviceId, std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
+                timestampSec - length));
+
+        currentCsd = mMelAggregator->getCsd();
+    }
+
+    soundDoseCallback = getSoundDoseCallback();
+
+    if (records.size() > 0 && soundDoseCallback != nullptr) {
+        std::vector<media::SoundDoseRecord> newRecordsToReport;
+        for (const auto& record : records) {
+            newRecordsToReport.emplace_back(csdRecordToSoundDoseRecord(record));
+        }
+
+        soundDoseCallback->onNewCsdValue(currentCsd, newRecordsToReport);
+    }
+}
+
+sp<media::ISoundDoseCallback> SoundDoseManager::getSoundDoseCallback() const {
+    std::lock_guard _l(mLock);
+    if (mSoundDose == nullptr) {
+        return nullptr;
+    }
+
+    return mSoundDose->mSoundDoseCallback;
+}
+
+void SoundDoseManager::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const {
+    ALOGV("%s: Momentary exposure for device %d triggered: %f MEL", __func__, deviceId, currentMel);
+
+    auto soundDoseCallback = getSoundDoseCallback();
+    if (soundDoseCallback != nullptr) {
+        soundDoseCallback->onMomentaryExposure(currentMel, deviceId);
+    }
+}
+
+sp<media::ISoundDose> SoundDoseManager::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback>& callback) {
+    ALOGV("%s: Register ISoundDoseCallback", __func__);
+
+    std::lock_guard _l(mLock);
+    if (mSoundDose == nullptr) {
+        mSoundDose = sp<SoundDose>::make(this, callback);
+    }
+    return mSoundDose;
+}
+
+std::string SoundDoseManager::dump() const {
+    std::string output;
+    mMelAggregator->foreachCsd([&output](audio_utils::CsdRecord csdRecord) {
+        base::StringAppendF(&output,
+                            "CSD %f with average MEL %f in interval [%" PRId64 ", %" PRId64 "]",
+                            csdRecord.value, csdRecord.averageMel, csdRecord.timestamp,
+                            csdRecord.timestamp + csdRecord.duration);
+        base::StringAppendF(&output, "\n");
+    });
+
+    base::StringAppendF(&output, "\nCached Mel Records:\n");
+    mMelAggregator->foreachCachedMel([&output](const audio_utils::MelRecord& melRecord) {
+        base::StringAppendF(&output, "Continuous MELs for portId=%d, ", melRecord.portId);
+        base::StringAppendF(&output, "starting at timestamp %" PRId64 ": ", melRecord.timestamp);
+
+        for (const auto& mel : melRecord.mels) {
+            base::StringAppendF(&output, "%.2f ", mel);
+        }
+        base::StringAppendF(&output, "\n");
+    });
+
+    return output;
+}
+
+size_t SoundDoseManager::getCachedMelRecordsSize() const {
+    return mMelAggregator->getCachedMelRecordsSize();
+}
+
+media::SoundDoseRecord SoundDoseManager::csdRecordToSoundDoseRecord(
+        const audio_utils::CsdRecord& legacy) {
+    media::SoundDoseRecord soundDoseRecord{};
+    soundDoseRecord.timestamp = legacy.timestamp;
+    soundDoseRecord.duration = legacy.duration;
+    soundDoseRecord.value = legacy.value;
+    soundDoseRecord.averageMel = legacy.averageMel;
+    return soundDoseRecord;
+}
+
+}  // namespace android
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
new file mode 100644
index 0000000..b0aa5d6
--- /dev/null
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -0,0 +1,134 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#pragma once
+
+#include <android/media/BnSoundDose.h>
+#include <android/media/ISoundDoseCallback.h>
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/MelProcessor.h>
+#include <binder/Status.h>
+#include <mutex>
+#include <unordered_map>
+
+namespace android {
+
+class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
+  public:
+    /** CSD is computed with a rolling window of 7 days. */
+    static constexpr int64_t kCsdWindowSeconds = 604800;  // 60s * 60m * 24h * 7d
+    /** Default RS2 value in dBA as defined in IEC 62368-1 3rd edition. */
+    static constexpr float kDefaultRs2Value = 100.f;
+
+    SoundDoseManager()
+        : mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
+          mRs2Value(kDefaultRs2Value){};
+
+    /**
+     * \brief Creates or gets the MelProcessor assigned to the streamHandle
+     *
+     * \param deviceId          id for the devices where the stream is active.
+     * \param streanHandle      handle to the stream
+     * \param sampleRate        sample rate for the processor
+     * \param channelCount      number of channels to be processed.
+     * \param format            format of the input samples.
+     *
+     * \return MelProcessor assigned to the stream and device id.
+     */
+    sp<audio_utils::MelProcessor> getOrCreateProcessorForDevice(audio_port_handle_t deviceId,
+                                                                audio_io_handle_t streamHandle,
+                                                                uint32_t sampleRate,
+                                                                size_t channelCount,
+                                                                audio_format_t format);
+
+    /**
+     * \brief Removes stream processor when MEL computation is not needed anymore
+     *
+     * \param streanHandle      handle to the stream
+     */
+    void removeStreamProcessor(audio_io_handle_t streamHandle);
+
+    /**
+     * Sets the output RS2 value for momentary exposure warnings. Must not be
+     * higher than 100dBA and not lower than 80dBA.
+     *
+     * \param rs2Value value to use for momentary exposure
+     */
+    void setOutputRs2(float rs2Value);
+
+    /**
+     * \brief Registers the interface for passing callbacks to the AudioService and gets
+     * the ISoundDose interface.
+     *
+     * \returns the sound dose binder to send commands to the SoundDoseManager
+     **/
+    sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
+
+    std::string dump() const;
+
+    // used for testing
+    size_t getCachedMelRecordsSize() const;
+
+    /** Method for converting from audio_utils::CsdRecord to media::SoundDoseRecord. */
+    static media::SoundDoseRecord csdRecordToSoundDoseRecord(const audio_utils::CsdRecord& legacy);
+
+    // ------ Override audio_utils::MelProcessor::MelCallback ------
+    void onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
+                        audio_port_handle_t deviceId) const override;
+
+    void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
+
+private:
+    class SoundDose : public media::BnSoundDose,
+                      public IBinder::DeathRecipient {
+    public:
+        SoundDose(SoundDoseManager* manager, const sp<media::ISoundDoseCallback>& callback)
+            : mSoundDoseManager(manager),
+              mSoundDoseCallback(callback) {};
+
+        /** IBinder::DeathRecipient. Listen to the death of ISoundDoseCallback. */
+        virtual void binderDied(const wp<IBinder>& who);
+
+        /** BnSoundDose override */
+        binder::Status setOutputRs2(float value) override;
+        binder::Status resetCsd(float currentCsd,
+                                const std::vector<media::SoundDoseRecord>& records) override;
+
+        wp<SoundDoseManager> mSoundDoseManager;
+        const sp<media::ISoundDoseCallback> mSoundDoseCallback;
+    };
+
+    void resetSoundDose();
+
+    void resetCsd(float currentCsd, const std::vector<media::SoundDoseRecord>& records);
+
+    sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
+    
+    mutable std::mutex mLock;
+
+    // no need for lock since MelAggregator is thread-safe
+    const sp<audio_utils::MelAggregator> mMelAggregator;
+
+    std::unordered_map<audio_io_handle_t, wp<audio_utils::MelProcessor>> mActiveProcessors
+            GUARDED_BY(mLock);
+
+    float mRs2Value GUARDED_BY(mLock);
+
+    sp<SoundDose> mSoundDose GUARDED_BY(mLock);
+};
+
+}  // namespace android
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
new file mode 100644
index 0000000..a886663
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -0,0 +1,40 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_test {
+    name: "sounddosemanager_tests",
+
+    srcs: [
+        "sounddosemanager_tests.cpp"
+    ],
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "libaudioutils",
+        "libbase",
+        "liblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libgmock",
+        "libsounddose",
+    ],
+
+    header_libs: [
+        "libaudioutils_headers",
+        "libsounddose_headers",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
\ No newline at end of file
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
new file mode 100644
index 0000000..0aa5a20
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "sounddosemanager_tests"
+
+#include <SoundDoseManager.h>
+
+#include <gtest/gtest.h>
+
+namespace android {
+namespace {
+
+TEST(SoundDoseManagerTest, GetProcessorForExistingStream) {
+    SoundDoseManager soundDoseManager;
+    sp<audio_utils::MelProcessor> processor1 =
+        soundDoseManager.getOrCreateProcessorForDevice(/*deviceId=*/1,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+    sp<audio_utils::MelProcessor> processor2 =
+        soundDoseManager.getOrCreateProcessorForDevice(/*deviceId=*/2,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+    EXPECT_EQ(processor1, processor2);
+}
+
+TEST(SoundDoseManagerTest, RemoveExistingStream) {
+    SoundDoseManager soundDoseManager;
+    sp<audio_utils::MelProcessor> processor1 =
+        soundDoseManager.getOrCreateProcessorForDevice(/*deviceId=*/1,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+    soundDoseManager.removeStreamProcessor(1);
+    sp<audio_utils::MelProcessor> processor2 =
+        soundDoseManager.getOrCreateProcessorForDevice(/*deviceId=*/2,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+    EXPECT_NE(processor1, processor2);
+}
+
+TEST(SoundDoseManagerTest, NewMelValuesCacheNewRecord) {
+    SoundDoseManager soundDoseManager;
+    std::vector<float>mels{1, 1};
+
+    soundDoseManager.onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1);
+
+    EXPECT_EQ(soundDoseManager.getCachedMelRecordsSize(), size_t{1});
+}
+
+}  // namespace
+}  // namespace android
diff --git a/services/audioflinger/timing/Android.bp b/services/audioflinger/timing/Android.bp
new file mode 100644
index 0000000..17ce8bd
--- /dev/null
+++ b/services/audioflinger/timing/Android.bp
@@ -0,0 +1,28 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_library {
+    name: "libaudioflinger_timing",
+
+    host_supported: true,
+
+    srcs: [
+        "MonotonicFrameCounter.cpp",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.cpp b/services/audioflinger/timing/MonotonicFrameCounter.cpp
new file mode 100644
index 0000000..286f549
--- /dev/null
+++ b/services/audioflinger/timing/MonotonicFrameCounter.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MonotonicFrameCounter"
+
+#include <utils/Log.h>
+#include "MonotonicFrameCounter.h"
+
+namespace android::audioflinger {
+
+int64_t MonotonicFrameCounter::updateAndGetMonotonicFrameCount(
+        int64_t newFrameCount, int64_t newTime) {
+    if (newFrameCount < 0 || newTime < 0) {
+        const auto result = getLastReportedFrameCount();
+        ALOGW("%s: invalid (frame, time) pair newFrameCount:%lld newFrameCount:%lld,"
+                " using %lld as frameCount",
+                __func__, (long long) newFrameCount, (long long)newFrameCount,
+                (long long)result);
+        return result;
+    }
+    if (newFrameCount < mLastReceivedFrameCount) {
+        const auto result = getLastReportedFrameCount();
+        ALOGW("%s: retrograde newFrameCount:%lld < mLastReceivedFrameCount:%lld,"
+                " ignoring, returning %lld as frameCount",
+                __func__, (long long) newFrameCount, (long long)mLastReceivedFrameCount,
+                (long long)result);
+        return result;
+    }
+    // Input looks fine.
+    // For better granularity, we could consider extrapolation on newTime.
+    mLastReceivedFrameCount = newFrameCount;
+    return getLastReportedFrameCount();
+}
+
+int64_t MonotonicFrameCounter::onFlush() {
+    ALOGV("%s: Updating mOffsetFrameCount:%lld with mLastReceivedFrameCount:%lld",
+            __func__, (long long)mOffsetFrameCount, (long long)mLastReceivedFrameCount);
+    mOffsetFrameCount += mLastReceivedFrameCount;
+    mLastReceivedFrameCount = 0;
+    return mOffsetFrameCount;
+}
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.h b/services/audioflinger/timing/MonotonicFrameCounter.h
new file mode 100644
index 0000000..0ea9510
--- /dev/null
+++ b/services/audioflinger/timing/MonotonicFrameCounter.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+namespace android::audioflinger {
+
+/**
+ * MonotonicFrameCounter
+ *
+ * Advances a monotonic frame count based on input timestamp pairs (frames, time).
+ * It takes into account a possible flush, which will "reset" the frames to 0.
+ *
+ * This class is used to drive VolumeShaper volume automation.
+ *
+ * The timestamps provided in updateAndGetMonotonicFrameCount should
+ * be of sufficient granularity for the purpose at hand.  Currently no temporal
+ * extrapolation is done.
+ *
+ * This class is not thread safe.
+ */
+class MonotonicFrameCounter {
+public:
+    /**
+     * Receives a new timestamp pair (frames, time) and returns a monotonic frameCount.
+     *
+     * \param newFrameCount the frameCount currently played.
+     * \param newTime       the time corresponding to the frameCount.
+     * \return              a monotonic frame count usable for automation timing.
+     */
+    int64_t updateAndGetMonotonicFrameCount(int64_t newFrameCount, int64_t newTime);
+
+    /**
+     * Notifies when a flush occurs, whereupon the received frameCount sequence restarts at 0.
+     *
+     * \return the last reported frameCount.
+     */
+    int64_t onFlush();
+
+    /**
+     * Returns the received (input) frameCount to reported (output) frameCount offset.
+     *
+     * This offset is sufficient to ensure monotonicity after flush is called,
+     * suitability for any other purpose is *not* guaranteed.
+     */
+    int64_t getOffsetFrameCount() const { return mOffsetFrameCount; }
+
+    /**
+     * Returns the last received frameCount.
+     */
+    int64_t getLastReceivedFrameCount() const {
+        return mLastReceivedFrameCount;
+    }
+
+    /**
+     * Returns the last reported frameCount from updateAndGetMonotonicFrameCount().
+     */
+    int64_t getLastReportedFrameCount() const {
+        // This is consistent after onFlush().
+        return mOffsetFrameCount + mLastReceivedFrameCount;
+    }
+
+private:
+    int64_t mOffsetFrameCount = 0;
+    int64_t mLastReceivedFrameCount = 0;
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
new file mode 100644
index 0000000..29267a6
--- /dev/null
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -0,0 +1,29 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_test {
+    name: "monotonicframecounter_tests",
+
+    host_supported: true,
+
+    srcs: [
+        "monotonicframecounter_tests.cpp"
+    ],
+
+    static_libs: [
+        "libaudioflinger_timing",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
\ No newline at end of file
diff --git a/services/audioflinger/timing/tests/monotonicframecounter_tests.cpp b/services/audioflinger/timing/tests/monotonicframecounter_tests.cpp
new file mode 100644
index 0000000..7aaa4fa
--- /dev/null
+++ b/services/audioflinger/timing/tests/monotonicframecounter_tests.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "monotonicframecounter_tests"
+
+#include "../MonotonicFrameCounter.h"
+
+#include <gtest/gtest.h>
+
+using namespace android::audioflinger;
+
+namespace {
+
+TEST(MonotonicFrameCounterTest, SimpleProgression) {
+    MonotonicFrameCounter monotonicFrameCounter;
+
+    const std::vector<std::pair<int64_t, int64_t>> frametimes{
+        {0, 0}, {100, 100}, {200, 200},
+    };
+
+    int64_t maxReceivedFrameCount = 0;
+    for (const auto& p : frametimes) {
+        maxReceivedFrameCount = std::max(maxReceivedFrameCount, p.first);
+        ASSERT_EQ(p.first,
+                monotonicFrameCounter.updateAndGetMonotonicFrameCount(p.first, p.second));
+    }
+    ASSERT_EQ(maxReceivedFrameCount, monotonicFrameCounter.getLastReportedFrameCount());
+}
+
+TEST(MonotonicFrameCounterTest, InvalidData) {
+    MonotonicFrameCounter monotonicFrameCounter;
+
+    const std::vector<std::pair<int64_t, int64_t>> frametimes{
+        {-1, -1}, {100, 100}, {-1, -1}, {90, 90}, {200, 200},
+    };
+
+    int64_t prevFrameCount = 0;
+    int64_t maxReceivedFrameCount = 0;
+    for (const auto& p : frametimes) {
+        maxReceivedFrameCount = std::max(maxReceivedFrameCount, p.first);
+        const int64_t frameCount =
+                monotonicFrameCounter.updateAndGetMonotonicFrameCount(p.first, p.second);
+        // we must be monotonic
+        ASSERT_GE(frameCount, prevFrameCount);
+        prevFrameCount = frameCount;
+    }
+    ASSERT_EQ(maxReceivedFrameCount, monotonicFrameCounter.getLastReportedFrameCount());
+}
+
+TEST(MonotonicFrameCounterTest, Flush) {
+    MonotonicFrameCounter monotonicFrameCounter;
+
+    // Different playback sequences are separated by a flush.
+    const std::vector<std::vector<std::pair<int64_t, int64_t>>> frameset{
+        {{-1, -1}, {100, 10}, {200, 20}, {300, 30},},
+        {{-1, -1}, {100, 10}, {200, 20}, {300, 30},},
+        {{-1, -1}, {100, 100}, {-1, -1}, {90, 90}, {200, 200},},
+    };
+
+    int64_t prevFrameCount = 0;
+    int64_t maxReceivedFrameCount = 0;
+    int64_t sumMaxReceivedFrameCount = 0;
+    for (const auto& v : frameset) {
+        for (const auto& p : v) {
+            maxReceivedFrameCount = std::max(maxReceivedFrameCount, p.first);
+            const int64_t frameCount =
+                    monotonicFrameCounter.updateAndGetMonotonicFrameCount(p.first, p.second);
+            // we must be monotonic
+            ASSERT_GE(frameCount, prevFrameCount);
+            prevFrameCount = frameCount;
+        }
+        monotonicFrameCounter.onFlush();
+        sumMaxReceivedFrameCount += maxReceivedFrameCount;
+        maxReceivedFrameCount = 0;
+    }
+
+    // On flush we keep a monotonic reported framecount
+    // even though the received framecount resets to 0.
+    // The requirement of equality here is implementation dependent.
+    ASSERT_EQ(sumMaxReceivedFrameCount, monotonicFrameCounter.getLastReportedFrameCount());
+}
+
+}  // namespace
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 496591a..520bad2 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -134,17 +134,18 @@
     // request an output appropriate for playback of the supplied stream type and parameters
     virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
     virtual status_t getOutputForAttr(const audio_attributes_t *attr,
-                                        audio_io_handle_t *output,
-                                        audio_session_t session,
-                                        audio_stream_type_t *stream,
-                                        const AttributionSourceState& attributionSouce,
-                                        const audio_config_t *config,
-                                        audio_output_flags_t *flags,
-                                        audio_port_handle_t *selectedDeviceId,
-                                        audio_port_handle_t *portId,
-                                        std::vector<audio_io_handle_t> *secondaryOutputs,
-                                        output_type_t *outputType,
-                                        bool *isSpatialized) = 0;
+                                      audio_io_handle_t *output,
+                                      audio_session_t session,
+                                      audio_stream_type_t *stream,
+                                      const AttributionSourceState& attributionSource,
+                                      audio_config_t *config,
+                                      audio_output_flags_t *flags,
+                                      audio_port_handle_t *selectedDeviceId,
+                                      audio_port_handle_t *portId,
+                                      std::vector<audio_io_handle_t> *secondaryOutputs,
+                                      output_type_t *outputType,
+                                      bool *isSpatialized,
+                                      bool *isBitPerfect) = 0;
     // indicates to the audio policy manager that the output starts being used by corresponding
     // stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -159,8 +160,8 @@
                                      audio_io_handle_t *input,
                                      audio_unique_id_t riid,
                                      audio_session_t session,
-                                     const AttributionSourceState& attributionSouce,
-                                     const audio_config_base_t *config,
+                                     const AttributionSourceState& attributionSource,
+                                     audio_config_base_t *config,
                                      audio_input_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      input_type_t *inputType,
@@ -307,13 +308,13 @@
     virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
 
     virtual status_t getProductStrategyFromAudioAttributes(
-            const AudioAttributes &aa, product_strategy_t &productStrategy,
+            const audio_attributes_t &aa, product_strategy_t &productStrategy,
             bool fallbackOnDefault) = 0;
 
     virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) = 0;
 
     virtual status_t getVolumeGroupFromAudioAttributes(
-            const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
+            const audio_attributes_t &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
 
     virtual bool     isCallScreenModeSupported() = 0;
 
@@ -407,6 +408,20 @@
     // retrieves the list of available direct audio profiles for the given audio attributes
     virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                                     AudioProfileVector& audioProfiles) = 0;
+
+    virtual status_t getSupportedMixerAttributes(
+            audio_port_handle_t portId, std::vector<audio_mixer_attributes_t>& mixerAttrs) = 0;
+    virtual status_t setPreferredMixerAttributes(
+            const audio_attributes_t* attr,
+            audio_port_handle_t portId,
+            uid_t uid,
+            const audio_mixer_attributes_t* mixerAttributes) = 0;
+    virtual status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                 audio_port_handle_t portId,
+                                                 audio_mixer_attributes_t* mixerAttributes) = 0;
+    virtual status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                   audio_port_handle_t portId,
+                                                   uid_t uid) = 0;
 };
 
 // Audio Policy client Interface
@@ -477,9 +492,6 @@
     virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
                                      audio_io_handle_t output, int delayMs = 0) = 0;
 
-    // invalidate a stream type, causing a reroute to an unspecified new output
-    virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
-
     // function enabling to send proprietary informations directly from audio policy manager to
     // audio hardware interface.
     virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs,
@@ -549,6 +561,8 @@
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
 
     virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+
+    virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index f130f7c..4d43eb0 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -11,12 +11,23 @@
           "include-filter": "com.google.android.gts.audio.AudioHostTest#testTwoChannelCapturing"
         }
       ]
-    },
+    }
+  ],
+  "postsubmit": [
     {
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 3d3e0cf..de8e77f 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -246,3 +246,16 @@
         }
     }
 }
+
+/**
+ * Indicates if two given audio output flags are considered as matched, which means that
+ * 1) the `supersetFlags` and `subsetFlags` both contain or both don't contain must match flags and
+ * 2) `supersetFlags` contains all flags from `subsetFlags`.
+ */
+static inline bool audio_output_flags_is_subset(audio_output_flags_t supersetFlags,
+                                                audio_output_flags_t subsetFlags,
+                                                uint32_t mustMatchFlags)
+{
+    return ((supersetFlags ^ subsetFlags) & mustMatchFlags) == AUDIO_OUTPUT_FLAG_NONE
+            && (supersetFlags & subsetFlags) == subsetFlags;
+}
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 1f23ae3..92a5628 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -24,6 +24,7 @@
         "src/HwModule.cpp",
         "src/IOProfile.cpp",
         "src/PolicyAudioPort.cpp",
+        "src/PreferredMixerAttributesInfo.cpp",
         "src/Serializer.cpp",
         "src/SoundTriggerSession.cpp",
         "src/TypeConverter.cpp",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 75fa595..52a000f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -440,6 +440,10 @@
 
     void setTracksInvalidatedStatusByStrategy(product_strategy_t strategy);
 
+    bool isConfigurationMatched(const audio_config_base_t& config, audio_output_flags_t flags);
+
+    PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
@@ -450,6 +454,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
+    bool mUsePreferredMixerAttributes = false;
 };
 
 // Audio output driven by an input device directly.
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 3b19e52..54a143c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -72,13 +72,16 @@
      */
     status_t getOutputForAttr(const audio_attributes_t& attributes,
                               const audio_config_base_t& config,
-                              uid_t uid, audio_output_flags_t flags,
+                              uid_t uid,
+                              audio_session_t session,
+                              audio_output_flags_t flags,
                               sp<AudioPolicyMix> &primaryMix,
                               std::vector<sp<AudioPolicyMix>> *secondaryMixes);
 
     sp<DeviceDescriptor> getDeviceAndMixForInputSource(const audio_attributes_t& attributes,
                                                        const DeviceVector &availableDeviceTypes,
                                                        uid_t uid,
+                                                       audio_session_t session,
                                                        sp<AudioPolicyMix> *policyMix) const;
 
     /**
@@ -124,11 +127,13 @@
     void dump(String8 *dst) const;
 
 private:
-    enum class MixMatchStatus { MATCH, NO_MATCH };
-    MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
+    bool mixMatch(const AudioMix* mix, size_t mixIndex,
                             const audio_attributes_t& attributes,
                             const audio_config_base_t& config,
-                            uid_t uid);
+                            uid_t uid,
+                            audio_session_t session);
 };
 
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr);
+
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 4adc920..80e098b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -183,6 +183,10 @@
         return isSingleDeviceType(mDeviceTypes, deviceType);
     }
 
+    bool onlyContainsDevice(const sp<DeviceDescriptor>& item) const {
+        return this->size() == 1 && contains(item);
+    }
+
     bool contains(const sp<DeviceDescriptor>& item) const { return indexOf(item) >= 0; }
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 90b812d..c489eed 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -35,10 +35,7 @@
 class IOProfile : public AudioPort, public PolicyAudioPort
 {
 public:
-    IOProfile(const std::string &name, audio_port_role_t role)
-        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
-          curOpenCount(0),
-          curActiveCount(0) {}
+    IOProfile(const std::string &name, audio_port_role_t role);
 
     virtual ~IOProfile() = default;
 
@@ -66,6 +63,17 @@
         if (getRole() == AUDIO_PORT_ROLE_SINK && (flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             maxActiveCount = 0;
         }
+        if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
+            mMixerBehaviors.clear();
+            mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+            if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+                mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
+            }
+        }
+    }
+
+    const MixerBehaviorSet& getMixerBehaviors() const {
+        return mMixerBehaviors;
     }
 
     /**
@@ -97,6 +105,25 @@
                              uint32_t flags,
                              bool exactMatchRequiredForInputFlags = false) const;
 
+    /**
+     * @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
+     *
+     * @param devices vector of devices to be checked for compatibility
+     * @return true if all devices are supported, false otherwise.
+     */
+    bool areAllDevicesSupported(const DeviceVector &devices) const;
+
+    /**
+     * @brief isCompatibleProfileForFlags: Checks if the IO profile is compatible with
+     * specified flags.
+     *
+     * @param flags to be checked for compatibility
+     * @param exactMatchRequiredForInputFlags true if exact match is required on flags
+     * @return true if the profile is compatible, false otherwise.
+     */
+    bool isCompatibleProfileForFlags(uint32_t flags,
+                                     bool exactMatchRequiredForInputFlags = false) const;
+
     void dump(String8 *dst, int spaces) const;
     void log();
 
@@ -193,6 +220,8 @@
         return false;
     }
 
+    void toSupportedMixerAttributes(std::vector<audio_mixer_attributes_t>* mixerAttributes) const;
+
     // Number of streams currently opened for this profile.
     uint32_t     curOpenCount;
     // Number of streams currently active for this profile. This is not the number of active clients
@@ -201,6 +230,8 @@
 
 private:
     DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
+
+    MixerBehaviorSet mMixerBehaviors;
 };
 
 class InputProfile : public IOProfile
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
new file mode 100644
index 0000000..9472481
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+
+#include <utils/RefBase.h>
+
+#include "AudioRoute.h"
+#include "HwModule.h"
+#include "IOProfile.h"
+
+namespace android {
+
+class PreferredMixerAttributesInfo : public RefBase {
+public:
+    PreferredMixerAttributesInfo(uid_t uid, audio_port_handle_t devicePortId,
+                                 const sp<IOProfile>& profile, audio_output_flags_t flags,
+                                 const audio_mixer_attributes_t& mixerAttributes)
+        : mDevicePortId(devicePortId), mUid(uid), mProfile(profile),
+          mOutputFlags(flags), mMixerAttributes(mixerAttributes) { }
+
+    audio_port_handle_t getDeviceId() const { return mDevicePortId; }
+    const audio_config_base_t& getConfigBase() const { return mMixerAttributes.config; }
+    uid_t getUid() const { return mUid; }
+    int getActiveClientCount() const { return mActiveClientsCount; }
+    const sp<IOProfile> getProfile() const { return mProfile; };
+    audio_output_flags_t getFlags() const { return mOutputFlags; }
+    const audio_mixer_attributes_t& getMixerAttributes() const { return mMixerAttributes; }
+
+    void increaseActiveClient() { mActiveClientsCount++; }
+    void decreaseActiveClient() { mActiveClientsCount--; }
+
+    void dump(String8 *dst);
+
+private:
+    const audio_port_handle_t mDevicePortId;
+    const uid_t mUid;
+    const sp<IOProfile> mProfile;
+    const audio_output_flags_t mOutputFlags;
+    const audio_mixer_attributes_t mMixerAttributes;
+    int mActiveClientsCount = 0;
+};
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 8eefe77..a46186b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -26,6 +26,7 @@
 #include "Volume.h"
 #include "HwModule.h"
 #include "TypeConverter.h"
+#include "policy.h"
 #include <media/AudioGain.h>
 #include <media/AudioParameter.h>
 #include <media/AudioPolicy.h>
@@ -301,7 +302,9 @@
     mDirectClientSession(AUDIO_SESSION_NONE)
 {
     if (profile != NULL) {
-        mFlags = (audio_output_flags_t)profile->getFlags();
+        // By default, opening the output without immutable flags, the bit-perfect flags should be
+        // applied when the apps explicitly request.
+        mFlags = (audio_output_flags_t)(profile->getFlags() & (~AUDIO_OUTPUT_FLAG_BIT_PERFECT));
     }
 }
 
@@ -311,6 +314,9 @@
     if (extraInfo != nullptr) {
         allExtraInfo.appendFormat("%s; ", extraInfo);
     }
+    if (mProfile != nullptr) {
+        allExtraInfo.appendFormat("IOProfile name:%s; ", mProfile->getName().c_str());
+    }
     std::string flagsLiteral = toString(mFlags);
     allExtraInfo.appendFormat("Latency: %d; 0x%04x", mLatency, mFlags);
     if (!flagsLiteral.empty()) {
@@ -931,6 +937,27 @@
     return false;
 }
 
+bool SwAudioOutputDescriptor::isConfigurationMatched(const audio_config_base_t &config,
+                                                     audio_output_flags_t flags) {
+    const uint32_t mustMatchOutputFlags =
+            AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    return audio_output_flags_is_subset(AudioOutputDescriptor::mFlags, flags, mustMatchOutputFlags)
+            && mSamplingRate == config.sample_rate
+            && mChannelMask == config.channel_mask
+            && mFormat == config.format;
+}
+
+PortHandleVector SwAudioOutputDescriptor::getClientsForStream(
+        audio_stream_type_t streamType) const {
+    PortHandleVector clientsForStream;
+    for (const auto& client : getClientIterable()) {
+        if (client->stream() == streamType) {
+            clientsForStream.push_back(client->portId());
+        }
+    }
+    return clientsForStream;
+}
+
 void SwAudioOutputCollection::dump(String8 *dst) const
 {
     dst->appendFormat("\n Outputs (%zu):\n", size());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 56c0603..003dcaf 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -18,6 +18,9 @@
 //#define LOG_NDEBUG 0
 
 #include <algorithm>
+#include <iterator>
+#include <optional>
+#include <regex>
 #include "AudioPolicyMix.h"
 #include "TypeConverter.h"
 #include "HwModule.h"
@@ -28,13 +31,19 @@
 namespace android {
 namespace {
 
+bool matchAddressToTags(const audio_attributes_t& attr, const String8& addr) {
+    std::optional<std::string> tagAddress = extractAddressFromAudioAttributes(attr);
+    return tagAddress.has_value() && tagAddress->compare(addr.c_str()) == 0;
+}
+
 // Returns true if the criterion matches.
 // The exclude criteria are handled in the same way as positive
 // ones - only condition is matched (the function will return
 // same result both for RULE_MATCH_X and RULE_EXCLUDE_X).
 bool isCriterionMatched(const AudioMixMatchCriterion& criterion,
                         const audio_attributes_t& attr,
-                        const uid_t uid) {
+                        const uid_t uid,
+                        const audio_session_t session) {
     uint32_t ruleWithoutExclusion = criterion.mRule & ~RULE_EXCLUSION_MASK;
     switch(ruleWithoutExclusion) {
         case RULE_MATCH_ATTRIBUTE_USAGE:
@@ -48,6 +57,8 @@
                 userid_t userId = multiuser_get_user_id(uid);
                 return criterion.mValue.mUserId == userId;
             }
+        case RULE_MATCH_AUDIO_SESSION_ID:
+            return criterion.mValue.mAudioSessionId == session;
     }
     ALOGE("Encountered invalid mix rule 0x%x", criterion.mRule);
     return false;
@@ -60,10 +71,11 @@
 //   for the criteria to match.
 bool areMixCriteriaMatched(const std::vector<AudioMixMatchCriterion>& criteria,
                            const audio_attributes_t& attr,
-                           const uid_t uid) {
+                           const uid_t uid,
+                           const audio_session_t session) {
     // If any of the exclusion criteria are matched the mix doesn't match.
     auto isMatchingExcludeCriterion = [&](const AudioMixMatchCriterion& c) {
-        return c.isExcludeCriterion() && isCriterionMatched(c, attr, uid);
+        return c.isExcludeCriterion() && isCriterionMatched(c, attr, uid, session);
     };
     if (std::any_of(criteria.begin(), criteria.end(), isMatchingExcludeCriterion)) {
         return false;
@@ -76,7 +88,7 @@
             continue;
         }
         presentPositiveRules |= criterion.mRule;
-        if (isCriterionMatched(criterion, attr, uid)) {
+        if (isCriterionMatched(criterion, attr, uid, session)) {
             matchedPositiveRules |= criterion.mRule;
         }
     }
@@ -150,6 +162,9 @@
         case RULE_MATCH_USERID:
             ruleValue = std::to_string(criterion.mValue.mUserId);
             break;
+        case RULE_MATCH_AUDIO_SESSION_ID:
+            ruleValue = std::to_string(criterion.mValue.mAudioSessionId);
+            break;
         default:
             unknownRule = true;
         }
@@ -241,7 +256,8 @@
 }
 
 status_t AudioPolicyMixCollection::getOutputForAttr(
-        const audio_attributes_t& attributes, const audio_config_base_t& config, uid_t uid,
+        const audio_attributes_t& attributes, const audio_config_base_t& config, const uid_t uid,
+        const audio_session_t session,
         audio_output_flags_t flags,
         sp<AudioPolicyMix> &primaryMix,
         std::vector<sp<AudioPolicyMix>> *secondaryMixes)
@@ -267,7 +283,7 @@
             continue; // Primary output already found
         }
 
-        if(mixMatch(policyMix.get(), i, attributes, config, uid) == MixMatchStatus::NO_MATCH) {
+        if(!mixMatch(policyMix.get(), i, attributes, config, uid, session)) {
             ALOGV("%s: Mix %zu: does not match", __func__, i);
             continue; // skip the mix
         }
@@ -285,9 +301,9 @@
     return NO_ERROR;
 }
 
-AudioPolicyMixCollection::MixMatchStatus AudioPolicyMixCollection::mixMatch(
-        const AudioMix* mix, size_t mixIndex, const audio_attributes_t& attributes,
-        const audio_config_base_t& config, uid_t uid) {
+bool AudioPolicyMixCollection::mixMatch(const AudioMix* mix, size_t mixIndex,
+    const audio_attributes_t& attributes, const audio_config_base_t& config,
+    uid_t uid, audio_session_t session) {
 
     if (mix->mMixType == MIX_TYPE_PLAYERS) {
         // Loopback render mixes are created from a public API and thus restricted
@@ -297,20 +313,20 @@
                   attributes.usage == AUDIO_USAGE_MEDIA ||
                   attributes.usage == AUDIO_USAGE_GAME ||
                   attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
-                return MixMatchStatus::NO_MATCH;
+                return false;
             }
             auto hasFlag = [](auto flags, auto flag) { return (flags & flag) == flag; };
             if (hasFlag(attributes.flags, AUDIO_FLAG_NO_SYSTEM_CAPTURE)) {
-                return MixMatchStatus::NO_MATCH;
+                return false;
             }
 
             if (attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION) {
                 if (!mix->mVoiceCommunicationCaptureAllowed) {
-                    return MixMatchStatus::NO_MATCH;
+                    return false;
                 }
             } else if (!mix->mAllowPrivilegedMediaPlaybackCapture &&
                 hasFlag(attributes.flags, AUDIO_FLAG_NO_MEDIA_PROJECTION)) {
-                return MixMatchStatus::NO_MATCH;
+                return false;
             }
         }
 
@@ -320,32 +336,22 @@
             !((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
               (config.format == mix->mFormat.format)) &&
               config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
-            return MixMatchStatus::NO_MATCH;
+            return false;
         }
 
         // if there is an address match, prioritize that match
-        if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
-                strncmp(attributes.tags + strlen("addr="),
-                        mix->mDeviceAddress.string(),
-                        AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
-            ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
-            return MixMatchStatus::MATCH;
-        }
-
-        if (areMixCriteriaMatched(mix->mCriteria, attributes, uid)) {
-            ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
-            return MixMatchStatus::MATCH;
+        if (matchAddressToTags(attributes, mix->mDeviceAddress)
+            || areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
+                ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
+                return true;
         }
     } else if (mix->mMixType == MIX_TYPE_RECORDERS) {
         if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
-                strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
-                strncmp(attributes.tags + strlen("addr="),
-                        mix->mDeviceAddress.string(),
-                        AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
-            return MixMatchStatus::MATCH;
+            matchAddressToTags(attributes, mix->mDeviceAddress)) {
+            return true;
         }
     }
-    return MixMatchStatus::NO_MATCH;
+    return false;
 }
 
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForOutput(
@@ -369,6 +375,7 @@
         const audio_attributes_t& attributes,
         const DeviceVector &availDevices,
         uid_t uid,
+        audio_session_t session,
         sp<AudioPolicyMix> *policyMix) const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -376,7 +383,7 @@
         if (mix->mMixType != MIX_TYPE_RECORDERS) {
             continue;
         }
-        if (areMixCriteriaMatched(mix->mCriteria, attributes, uid)) {
+        if (areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
             // Assuming PolicyMix only for remote submix for input
             // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX.
             auto mixDevice = availDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
@@ -395,14 +402,14 @@
 status_t AudioPolicyMixCollection::getInputMixForAttr(
         audio_attributes_t attr, sp<AudioPolicyMix> *policyMix)
 {
-    if (strncmp(attr.tags, "addr=", strlen("addr=")) != 0) {
+    std::optional<std::string> address = extractAddressFromAudioAttributes(attr);
+    if (!address.has_value()) {
         return BAD_VALUE;
     }
-    String8 address(attr.tags + strlen("addr="));
 
 #ifdef LOG_NDEBUG
     ALOGV("getInputMixForAttr looking for address %s for source %d\n  mixes available:",
-            address.string(), attr.source);
+            address->c_str(), attr.source);
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix> audioPolicyMix = itemAt(i);
         ALOGV("\tmix %zu address=%s", i, audioPolicyMix->mDeviceAddress.string());
@@ -412,20 +419,20 @@
     size_t index;
     for (index = 0; index < size(); index++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(index);
-        if (registeredMix->mDeviceAddress.compare(address) == 0) {
+        if (address->compare(registeredMix->mDeviceAddress.c_str()) == 0) {
             ALOGD("getInputMixForAttr found addr=%s dev=0x%x",
                     registeredMix->mDeviceAddress.string(), registeredMix->mDeviceType);
             break;
         }
     }
     if (index == size()) {
-        ALOGW("getInputMixForAttr() no policy for address %s", address.string());
+        ALOGW("getInputMixForAttr() no policy for address %s", address->c_str());
         return BAD_VALUE;
     }
     const sp<AudioPolicyMix> audioPolicyMix = itemAt(index);
 
     if (audioPolicyMix->mMixType != MIX_TYPE_PLAYERS) {
-        ALOGW("getInputMixForAttr() bad policy mix type for address %s", address.string());
+        ALOGW("getInputMixForAttr() bad policy mix type for address %s", address->c_str());
         return BAD_VALUE;
     }
     if (policyMix != nullptr) {
@@ -625,4 +632,14 @@
     }
 }
 
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr) {
+    static const std::regex addrTagRegex("addr=([^;]+)");
+
+    std::cmatch match;
+    if (std::regex_search(attr.tags, match, addrTagRegex)) {
+        return match[1].str();
+    }
+    return std::nullopt;
+}
+
 }; //namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 21f2018..98d7d59 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -24,6 +24,15 @@
 
 namespace android {
 
+IOProfile::IOProfile(const std::string &name, audio_port_role_t role)
+        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
+          curOpenCount(0),
+          curActiveCount(0) {
+    if (role == AUDIO_PORT_ROLE_SOURCE) {
+        mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+    }
+}
+
 bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
                                     uint32_t samplingRate,
                                     uint32_t *updatedSamplingRate,
@@ -40,11 +49,9 @@
     const bool isRecordThread =
             getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
     ALOG_ASSERT(isPlaybackThread != isRecordThread);
-
-    if (!devices.isEmpty()) {
-        if (!mSupportedDevices.containsAllDevices(devices)) {
-            return false;
-        }
+    if (!areAllDevicesSupported(devices) ||
+            !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
+        return false;
     }
 
     if (!audio_is_valid_format(format) ||
@@ -78,21 +85,6 @@
         }
     }
 
-    const uint32_t mustMatchOutputFlags =
-            AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
-    if (isPlaybackThread && (((getFlags() ^ flags) & mustMatchOutputFlags)
-                    || (getFlags() & flags) != flags)) {
-        return false;
-    }
-    // The only input flag that is allowed to be different is the fast flag.
-    // An existing fast stream is compatible with a normal track request.
-    // An existing normal stream is compatible with a fast track request,
-    // but the fast request will be denied by AudioFlinger and converted to normal track.
-    if (isRecordThread && ((getFlags() ^ flags) &
-            ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
-        return false;
-    }
-
     if (updatedSamplingRate != NULL) {
         *updatedSamplingRate = myUpdatedSamplingRate;
     }
@@ -105,6 +97,41 @@
     return true;
 }
 
+bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
+    if (devices.empty()) {
+        return true;
+    }
+    return mSupportedDevices.containsAllDevices(devices);
+}
+
+bool IOProfile::isCompatibleProfileForFlags(uint32_t flags,
+                                            bool exactMatchRequiredForInputFlags) const {
+    const bool isPlaybackThread =
+            getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
+    const bool isRecordThread =
+            getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
+    ALOG_ASSERT(isPlaybackThread != isRecordThread);
+
+    const uint32_t mustMatchOutputFlags =
+            AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    if (isPlaybackThread &&
+        !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
+                                      (audio_output_flags_t)flags,
+                                      mustMatchOutputFlags)) {
+        return false;
+    }
+    // The only input flag that is allowed to be different is the fast flag.
+    // An existing fast stream is compatible with a normal track request.
+    // An existing normal stream is compatible with a fast track request,
+    // but the fast request will be denied by AudioFlinger and converted to normal track.
+    if (isRecordThread && ((getFlags() ^ flags) &
+            ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
+        return false;
+    }
+
+    return true;
+}
+
 bool IOProfile::containsSingleDeviceSupportingEncodedFormats(
         const sp<DeviceDescriptor>& device) const {
     if (device == nullptr) {
@@ -116,6 +143,34 @@
                 return device == deviceDesc && deviceDesc->hasCurrentEncodedFormat(); }) == 1;
 }
 
+void IOProfile::toSupportedMixerAttributes(
+        std::vector<audio_mixer_attributes_t> *mixerAttributes) const {
+    if (!hasDynamicAudioProfile()) {
+        // The mixer attributes is only supported when there is a dynamic profile.
+        return;
+    }
+    for (const auto& profile : mProfiles) {
+        if (!profile->isValid()) {
+            continue;
+        }
+        for (const auto sampleRate : profile->getSampleRates()) {
+            for (const auto channelMask : profile->getChannels()) {
+                const audio_config_base_t config = {
+                        .format = profile->getFormat(),
+                        .sample_rate = sampleRate,
+                        .channel_mask = channelMask
+                };
+                for (const auto mixerBehavior : mMixerBehaviors) {
+                    mixerAttributes->push_back({
+                        .config = config,
+                        .mixer_behavior = mixerBehavior
+                    });
+                }
+            }
+        }
+    }
+}
+
 void IOProfile::dump(String8 *dst, int spaces) const
 {
     String8 extraInfo;
diff --git a/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp b/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp
new file mode 100644
index 0000000..edb2c6d
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PreferredMixerAttributesInfo.h"
+
+namespace android {
+
+void PreferredMixerAttributesInfo::dump(String8 *dst) {
+    dst->appendFormat("device port ID: %d; owner uid: %d; profile name: %s; flags: %#x; "
+                      "sample rate: %u; channel mask: %#x; format: %#x; mixer behavior: %d; "
+                      "active clients count: %d\n",
+                      mDevicePortId, mUid, mProfile->getName().c_str(), mOutputFlags,
+                      mMixerAttributes.config.sample_rate, mMixerAttributes.config.channel_mask,
+                      mMixerAttributes.config.format, mMixerAttributes.mixer_behavior,
+                      mActiveClientsCount);
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index c5b3546..8a44547 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -56,10 +56,12 @@
     MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
     MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
     MAKE_STRING_FROM_ENUM(RULE_MATCH_USERID),
+    MAKE_STRING_FROM_ENUM(RULE_MATCH_AUDIO_SESSION_ID),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_USERID),
+    MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_AUDIO_SESSION_ID),
     TERMINATOR
 };
 
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 2aa2f9a..e8251e3 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -24,7 +24,7 @@
 #include <vector>
 
 #include <HandleGenerator.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
@@ -43,20 +43,14 @@
 class ProductStrategy : public virtual RefBase, private HandleGenerator<uint32_t>
 {
 private:
-    struct AudioAttributes {
-        audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
-        volume_group_t mVolumeGroup = VOLUME_GROUP_NONE;
-        audio_attributes_t mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-    };
-
-    using AudioAttributesVector = std::vector<AudioAttributes>;
+    using VolumeGroupAttributesVector = std::vector<VolumeGroupAttributes>;
 
 public:
     ProductStrategy(const std::string &name);
 
-    void addAttributes(const AudioAttributes &audioAttributes);
+    void addAttributes(const VolumeGroupAttributes &volumeGroupAttributes);
 
-    std::vector<android::AudioAttributes> listAudioAttributes() const;
+    std::vector<android::VolumeGroupAttributes> listVolumeGroupAttributes() const;
 
     std::string getName() const { return mName; }
     AttributesVector getAudioAttributes() const;
@@ -105,7 +99,7 @@
 private:
     std::string mName;
 
-    AudioAttributesVector mAttributesVector;
+    VolumeGroupAttributesVector mAttributesVector;
 
     product_strategy_t mId;
 
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 99507ee..9b78758 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -145,7 +145,7 @@
     };
     auto addSupportedAttributesToGroup = [](auto &group, auto &volumeGroup, auto &strategy) {
         for (const auto &attr : group.attributesVect) {
-            strategy->addAttributes({group.stream, volumeGroup->getId(), attr});
+            strategy->addAttributes({volumeGroup->getId(), group.stream, attr});
             volumeGroup->addSupportedAttributes(attr);
         }
     };
@@ -284,7 +284,7 @@
     for (const auto &iter : mProductStrategies) {
         const auto &productStrategy = iter.second;
         strategies.push_back(
-        {productStrategy->getName(), productStrategy->listAudioAttributes(),
+        {productStrategy->getName(), productStrategy->listVolumeGroupAttributes(),
          productStrategy->getId()});
     }
     return NO_ERROR;
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index b036e12..548a20d 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <EngineConfig.h>
+
 #include <system/audio.h>
 
 namespace android {
@@ -25,11 +27,11 @@
 const engineConfig::ProductStrategies gOrderedStrategies = {
     {"STRATEGY_PHONE",
      {
-         {"phone", AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
+         {AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_NONE, ""}},
          },
-         {"sco", AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
+         {AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_SCO,
             ""}},
          }
@@ -37,11 +39,11 @@
     },
     {"STRATEGY_SONIFICATION",
      {
-         {"ring", AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
+         {AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
-         {"alarm", AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
+         {AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_NONE, ""}},
          }
@@ -49,7 +51,7 @@
     },
     {"STRATEGY_ENFORCED_AUDIBLE",
      {
-         {"", AUDIO_STREAM_ENFORCED_AUDIBLE, "AUDIO_STREAM_ENFORCED_AUDIBLE",
+         {AUDIO_STREAM_ENFORCED_AUDIBLE, "AUDIO_STREAM_ENFORCED_AUDIBLE",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_AUDIBILITY_ENFORCED, ""}}
          }
@@ -57,7 +59,7 @@
     },
     {"STRATEGY_ACCESSIBILITY",
      {
-         {"", AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
+         {AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
@@ -65,7 +67,7 @@
     },
     {"STRATEGY_SONIFICATION_RESPECTFUL",
      {
-         {"", AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
+         {AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
                AUDIO_FLAG_NONE, ""},
@@ -77,11 +79,11 @@
     },
     {"STRATEGY_MEDIA",
      {
-         {"assistant", AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
+         {AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
-         {"music", AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
+         {AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT,
                AUDIO_FLAG_NONE, ""},
@@ -95,7 +97,7 @@
                AUDIO_FLAG_NONE, ""}
           },
          },
-         {"system", AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
+         {AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_SONIFICATION,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
@@ -103,7 +105,7 @@
     },
     {"STRATEGY_DTMF",
      {
-         {"", AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
+         {AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
@@ -113,7 +115,7 @@
     },
     {"STRATEGY_CALL_ASSISTANT",
      {
-         {"", AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
+         {AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_NONE, ""}}
          }
@@ -121,7 +123,7 @@
     },
     {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER",
      {
-         {"", AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
+         {AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
                 AUDIO_FLAG_BEACON, ""},
@@ -138,19 +140,19 @@
  * For compatibility reason why apm volume config file, volume group name is the stream type.
  */
 const engineConfig::ProductStrategies gOrderedSystemStrategies = {
-    {"rerouting",
+    {"STRATEGY_REROUTING",
      {
-         {"", AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
+         {AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
-            AUDIO_FLAG_NONE, ""}}
+            AUDIO_FLAG_NONE, AUDIO_TAG_APM_RESERVED_INTERNAL}}
          }
      },
     },
-    {"patch",
+    {"STRATEGY_PATCH",
      {
-         {"", AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
+         {AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
-            AUDIO_FLAG_NONE, ""}}
+            AUDIO_FLAG_NONE, AUDIO_TAG_APM_RESERVED_INTERNAL}}
          }
      },
     }
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index fbfcf72..c104c97 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -36,16 +36,16 @@
 {
 }
 
-void ProductStrategy::addAttributes(const AudioAttributes &audioAttributes)
+void ProductStrategy::addAttributes(const VolumeGroupAttributes &volumeGroupAttributes)
 {
-    mAttributesVector.push_back(audioAttributes);
+    mAttributesVector.push_back(volumeGroupAttributes);
 }
 
-std::vector<android::AudioAttributes> ProductStrategy::listAudioAttributes() const
+std::vector<android::VolumeGroupAttributes> ProductStrategy::listVolumeGroupAttributes() const
 {
-    std::vector<android::AudioAttributes> androidAa;
+    std::vector<android::VolumeGroupAttributes> androidAa;
     for (const auto &attr : mAttributesVector) {
-        androidAa.push_back({attr.mVolumeGroup, attr.mStream, attr.mAttributes});
+        androidAa.push_back({attr.getGroupId(), attr.getStreamType(), attr.getAttributes()});
     }
     return androidAa;
 }
@@ -54,7 +54,7 @@
 {
     AttributesVector attrVector;
     for (const auto &attrGroup : mAttributesVector) {
-        attrVector.push_back(attrGroup.mAttributes);
+        attrVector.push_back(attrGroup.getAttributes());
     }
     if (not attrVector.empty()) {
         return attrVector;
@@ -66,7 +66,7 @@
 {
     return std::find_if(begin(mAttributesVector), end(mAttributesVector),
                         [&attr](const auto &supportedAttr) {
-        return AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr);
+        return AudioProductStrategy::attributesMatches(supportedAttr.getAttributes(), attr);
     }) != end(mAttributesVector);
 }
 
@@ -75,11 +75,11 @@
 {
     const auto &iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
                                    [&attr](const auto &supportedAttr) {
-        return AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr); });
+        return AudioProductStrategy::attributesMatches(supportedAttr.getAttributes(), attr); });
     if (iter == end(mAttributesVector)) {
         return AUDIO_STREAM_DEFAULT;
     }
-    audio_stream_type_t streamType = iter->mStream;
+    audio_stream_type_t streamType = iter->getStreamType();
     ALOGW_IF(streamType == AUDIO_STREAM_DEFAULT,
              "%s: Strategy %s supporting attributes %s has not stream type associated"
              "fallback on MUSIC. Do not use stream volume API", __func__, mName.c_str(),
@@ -91,23 +91,23 @@
 {
     const auto iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
                                    [&streamType](const auto &supportedAttr) {
-        return supportedAttr.mStream == streamType; });
-    return iter != end(mAttributesVector) ? iter->mAttributes : AUDIO_ATTRIBUTES_INITIALIZER;
+        return supportedAttr.getStreamType() == streamType; });
+    return iter != end(mAttributesVector) ? iter->getAttributes() : AUDIO_ATTRIBUTES_INITIALIZER;
 }
 
 bool ProductStrategy::isDefault() const
 {
     return std::find_if(begin(mAttributesVector), end(mAttributesVector), [](const auto &attr) {
-        return attr.mAttributes == defaultAttr; }) != end(mAttributesVector);
+        return attr.getAttributes() == defaultAttr; }) != end(mAttributesVector);
 }
 
 StreamTypeVector ProductStrategy::getSupportedStreams() const
 {
     StreamTypeVector streams;
     for (const auto &supportedAttr : mAttributesVector) {
-        if (std::find(begin(streams), end(streams), supportedAttr.mStream) == end(streams) &&
-                supportedAttr.mStream != AUDIO_STREAM_DEFAULT) {
-            streams.push_back(supportedAttr.mStream);
+        if (std::find(begin(streams), end(streams), supportedAttr.getStreamType())
+                == end(streams) && supportedAttr.getStreamType() != AUDIO_STREAM_DEFAULT) {
+            streams.push_back(supportedAttr.getStreamType());
         }
     }
     return streams;
@@ -117,14 +117,14 @@
 {
     return std::find_if(begin(mAttributesVector), end(mAttributesVector),
                         [&streamType](const auto &supportedAttr) {
-        return supportedAttr.mStream == streamType; }) != end(mAttributesVector);
+        return supportedAttr.getStreamType() == streamType; }) != end(mAttributesVector);
 }
 
 volume_group_t ProductStrategy::getVolumeGroupForAttributes(const audio_attributes_t &attr) const
 {
     for (const auto &supportedAttr : mAttributesVector) {
-        if (AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr)) {
-            return supportedAttr.mVolumeGroup;
+        if (AudioProductStrategy::attributesMatches(supportedAttr.getAttributes(), attr)) {
+            return supportedAttr.getGroupId();
         }
     }
     return VOLUME_GROUP_NONE;
@@ -133,8 +133,8 @@
 volume_group_t ProductStrategy::getVolumeGroupForStreamType(audio_stream_type_t stream) const
 {
     for (const auto &supportedAttr : mAttributesVector) {
-        if (supportedAttr.mStream == stream) {
-            return supportedAttr.mVolumeGroup;
+        if (supportedAttr.getStreamType() == stream) {
+            return supportedAttr.getGroupId();
         }
     }
     return VOLUME_GROUP_NONE;
@@ -143,8 +143,10 @@
 volume_group_t ProductStrategy::getDefaultVolumeGroup() const
 {
     const auto &iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
-                                    [](const auto &attr) {return attr.mAttributes == defaultAttr;});
-    return iter != end(mAttributesVector) ? iter->mVolumeGroup : VOLUME_GROUP_NONE;
+                                    [](const auto &attr) {
+        return attr.getAttributes() == defaultAttr;
+    });
+    return iter != end(mAttributesVector) ? iter->getGroupId() : VOLUME_GROUP_NONE;
 }
 
 void ProductStrategy::dump(String8 *dst, int spaces) const
@@ -155,11 +157,11 @@
                        deviceLiteral.c_str(), mDeviceAddress.c_str());
 
     for (const auto &attr : mAttributesVector) {
-        dst->appendFormat("%*sGroup: %d stream: %s\n", spaces + 3, "", attr.mVolumeGroup,
-                          android::toString(attr.mStream).c_str());
+        dst->appendFormat("%*sGroup: %d stream: %s\n", spaces + 3, "", attr.getGroupId(),
+                          android::toString(attr.getStreamType()).c_str());
         dst->appendFormat("%*s Attributes: ", spaces + 3, "");
-        std::string attStr =
-                attr.mAttributes == defaultAttr ? "{ Any }" : android::toString(attr.mAttributes);
+        std::string attStr = attr.getAttributes() == defaultAttr ?
+                "{ Any }" : android::toString(attr.getAttributes());
         dst->appendFormat("%s\n", attStr.c_str());
     }
 }
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 2ebb7df..4de16c5 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -25,6 +25,12 @@
 struct _xmlNode;
 struct _xmlDoc;
 
+/**
+ * AudioAttributes custom tag to identify internal strategies, whose volumes are exclusively
+ * controlled by AudioPolicyManager
+ */
+#define AUDIO_TAG_APM_RESERVED_INTERNAL "reserved_internal_strategy"
+
 namespace android {
 namespace engineConfig {
 
@@ -35,7 +41,6 @@
 using StreamVector = std::vector<audio_stream_type_t>;
 
 struct AttributesGroup {
-    std::string name;
     audio_stream_type_t stream;
     std::string volumeGroup;
     AttributesVector attributesVect;
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 6f560d5..ac117f0 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -57,7 +57,6 @@
     static constexpr const char *collectionTag = "AttributesGroups";
 
     struct Attributes {
-        static constexpr const char *name = "name";
         static constexpr const char *streamType = "streamType";
         static constexpr const char *volumeGroup = "volumeGroup";
     };
@@ -313,12 +312,6 @@
 status_t AttributesGroupTraits::deserialize(_xmlDoc *doc, const _xmlNode *child,
                                             Collection &attributesGroup)
 {
-    std::string name = getXmlAttribute(child, Attributes::name);
-    if (name.empty()) {
-        ALOGV("AttributesGroupTraits No attribute %s found", Attributes::name);
-    }
-    ALOGV("%s: %s = %s", __FUNCTION__, Attributes::name, name.c_str());
-
     std::string volumeGroup = getXmlAttribute(child, Attributes::volumeGroup);
     if (volumeGroup.empty()) {
         ALOGE("%s: No attribute %s found", __FUNCTION__, Attributes::volumeGroup);
@@ -339,7 +332,7 @@
     AttributesVector attributesVect;
     deserializeAttributesCollection(doc, child, attributesVect);
 
-    attributesGroup.push_back({name, streamType, volumeGroup, attributesVect});
+    attributesGroup.push_back({streamType, volumeGroup, attributesVect});
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index 518f86e..70d25fc 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -173,10 +173,11 @@
      * @param[out] mix to be used if a mix has been installed for the given audio attributes.
      * @return selected input device for the audio attributes, may be null if error.
      */
-    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
-                                                             uid_t uid = 0,
-                                                             sp<AudioPolicyMix> *mix = nullptr)
-                                                             const = 0;
+    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
+            const audio_attributes_t &attr,
+            uid_t uid = 0,
+            audio_session_t session = AUDIO_SESSION_NONE,
+            sp<AudioPolicyMix> *mix = nullptr) const = 0;
 
     /**
      * Get the legacy stream type for a given audio attributes.
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 2831a9b..9d53017 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -315,6 +315,7 @@
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                          uid_t uid,
+                                                         audio_session_t session,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -336,6 +337,7 @@
     device = policyMixes.getDeviceAndMixForInputSource(attr,
                                                        availableInputDevices,
                                                        uid,
+                                                       session,
                                                        mix);
     if (device != nullptr) {
         return device;
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 4b559f0..6ac20cd 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -63,6 +63,7 @@
 
     sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                      uid_t uid = 0,
+                                                     audio_session_t session = AUDIO_SESSION_NONE,
                                                      sp<AudioPolicyMix> *mix = nullptr)
                                                      const override;
 
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 45c5eac..d96ae21 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -78,7 +78,7 @@
     case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
         if (config != AUDIO_POLICY_FORCE_SPEAKER && config != AUDIO_POLICY_FORCE_BT_SCO &&
             config != AUDIO_POLICY_FORCE_NONE) {
-            ALOGW("setForceUse() invalid config %d for FOR_COMMUNICATION", config);
+            ALOGW("setForceUse() invalid config %d for COMMUNICATION", config);
             return BAD_VALUE;
         }
         break;
@@ -88,14 +88,14 @@
             config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
             config != AUDIO_POLICY_FORCE_DIGITAL_DOCK && config != AUDIO_POLICY_FORCE_NONE &&
             config != AUDIO_POLICY_FORCE_NO_BT_A2DP && config != AUDIO_POLICY_FORCE_SPEAKER ) {
-            ALOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
+            ALOGW("setForceUse() invalid config %d for MEDIA", config);
             return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_RECORD:
         if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
             config != AUDIO_POLICY_FORCE_NONE) {
-            ALOGW("setForceUse() invalid config %d for FOR_RECORD", config);
+            ALOGW("setForceUse() invalid config %d for RECORD", config);
             return BAD_VALUE;
         }
         break;
@@ -105,19 +105,22 @@
             config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
             config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
             config != AUDIO_POLICY_FORCE_DIGITAL_DOCK) {
-            ALOGW("setForceUse() invalid config %d for FOR_DOCK", config);
+            ALOGW("setForceUse() invalid config %d for DOCK", config);
+            return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_SYSTEM:
         if (config != AUDIO_POLICY_FORCE_NONE &&
             config != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
-            ALOGW("setForceUse() invalid config %d for FOR_SYSTEM", config);
+            ALOGW("setForceUse() invalid config %d for SYSTEM", config);
+            return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
         if (config != AUDIO_POLICY_FORCE_NONE &&
             config != AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED) {
             ALOGW("setForceUse() invalid config %d for HDMI_SYSTEM_AUDIO", config);
+            return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
@@ -131,13 +134,13 @@
         break;
     case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
         if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_NONE) {
-            ALOGW("setForceUse() invalid config %d for FOR_VIBRATE_RINGING", config);
+            ALOGW("setForceUse() invalid config %d for VIBRATE_RINGING", config);
             return BAD_VALUE;
         }
         break;
     default:
         ALOGW("setForceUse() invalid usage %d", usage);
-        break; // TODO return BAD_VALUE?
+        return BAD_VALUE;
     }
     return EngineBase::setForceUse(usage, config);
 }
@@ -273,10 +276,15 @@
         break;
 
     case STRATEGY_PHONE: {
-        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
-        if (!devices.isEmpty()) break;
+        // TODO(b/243670205): remove this logic that gives preference to last removable devices
+        // once a UX decision has been made
         devices = availableOutputDevices.getFirstDevicesFromTypes(
-                        getLastRemovableMediaDevices(GROUP_NONE, {AUDIO_DEVICE_OUT_BLE_HEADSET}));
+                        getLastRemovableMediaDevices(GROUP_NONE, {
+                            // excluding HEARING_AID and BLE_HEADSET because Dialer uses
+                            // setCommunicationDevice to select them explicitly
+                            AUDIO_DEVICE_OUT_HEARING_AID,
+                            AUDIO_DEVICE_OUT_BLE_HEADSET
+                            }));
         if (!devices.isEmpty()) break;
         devices = availableOutputDevices.getFirstDevicesFromTypes({
                 AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE});
@@ -455,7 +463,7 @@
     }
 
     if (devices.isEmpty()) {
-        ALOGV("%s no device found for strategy %d", __func__, strategy);
+        ALOGI("%s no device found for strategy %d", __func__, strategy);
         sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
         if (defaultOutputDevice != nullptr) {
             devices.add(defaultOutputDevice);
@@ -691,6 +699,18 @@
     return preferredAvailableDevVec;
 }
 
+DeviceVector Engine::getDisabledDevicesForProductStrategy(
+        const DeviceVector &availableOutputDevices, product_strategy_t strategy) const {
+    DeviceVector disabledDevices = {};
+    AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+    if (status == NO_ERROR) {
+        disabledDevices =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+    }
+    return disabledDevices;
+}
 
 DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
     const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
@@ -714,6 +734,11 @@
         return preferredAvailableDevVec;
     }
 
+    // Remove all disabled devices from the available device list.
+    DeviceVector disabledDevVec =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    availableOutputDevices.remove(disabledDevVec);
+
     return getDevicesForStrategyInt(legacyStrategy,
                                     availableOutputDevices,
                                     outputs);
@@ -753,6 +778,7 @@
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                          uid_t uid,
+                                                         audio_session_t session,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -775,6 +801,7 @@
     device = policyMixes.getDeviceAndMixForInputSource(attr,
                                                        availableInputDevices,
                                                        uid,
+                                                       session,
                                                        mix);
     if (device != nullptr) {
         return device;
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 595e289..ab556ee 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -64,6 +64,7 @@
 
     sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                      uid_t uid = 0,
+                                                     audio_session_t session = AUDIO_SESSION_NONE,
                                                      sp<AudioPolicyMix> *mix = nullptr)
                                                      const override;
 
@@ -96,6 +97,8 @@
         const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
     DeviceVector getPreferredAvailableDevicesForProductStrategy(
         const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+    DeviceVector getDisabledDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
 
     DeviceStrategyMap mDevicesForStrategies;
 
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 28268c9..14f565b 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -259,13 +259,15 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
+    bool isBitPerfect;
 
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource;
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
-            &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized) != OK) {
+            &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+            &isBitPerfect) != OK) {
         return false;
     }
     if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c9ba603..1d4eb1e 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -104,7 +104,7 @@
                                                       const char* device_address,
                                                       const char* device_name,
                                                       audio_format_t encodedFormat) {
-    media::AudioPort aidlPort;
+    media::AudioPortFw aidlPort;
     if (status_t status = deviceToAudioPort(device, device_address, device_name, &aidlPort);
         status == OK) {
         return setDeviceConnectionState(state, aidlPort.hal, encodedFormat);
@@ -162,7 +162,7 @@
                                                          const char* device_address,
                                                          const char* device_name,
                                                          audio_format_t encodedFormat) {
-    media::AudioPort aidlPort;
+    media::AudioPortFw aidlPort;
     if (status_t status = deviceToAudioPort(deviceType, device_address, device_name, &aidlPort);
         status == OK) {
         return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat);
@@ -250,6 +250,9 @@
             // remove device from mReportedFormatsMap cache
             mReportedFormatsMap.erase(device);
 
+            // remove preferred mixer configurations
+            mPreferredMixerAttrInfos.erase(device->getId());
+
             } break;
 
         default:
@@ -306,10 +309,10 @@
             checkCloseOutputs();
         }
         (void)updateCallRouting(false /*fromCache*/);
-        std::vector<audio_io_handle_t> outputsToReopen;
         const DeviceVector msdOutDevices = getMsdAudioOutDevices();
         const DeviceVector activeMediaDevices =
                 mEngine->getActiveMediaDevices(mAvailableOutputDevices);
+        std::map<audio_io_handle_t, DeviceVector> outputsToReopenWithDevices;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc->isActive() && ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
@@ -323,6 +326,13 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                    // If the device is using preferred mixer attributes, the output need to reopen
+                    // with default configuration when the new selected devices are different from
+                    // current routing devices
+                    outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
+                    continue;
+                }
                 setOutputDevices(desc, newDevices, force, 0);
             }
             if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
@@ -333,7 +343,7 @@
                 // `mPendingReopenToQueryProfiles` in the SwOutputDescriptor so that the output
                 // can be reopened to query dynamic profiles when all clients are inactive.
                 if (areAllActiveTracksRerouted(desc)) {
-                    outputsToReopen.push_back(mOutputs.keyAt(i));
+                    outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), activeMediaDevices);
                 } else {
                     desc->mPendingReopenToQueryProfiles = true;
                 }
@@ -343,11 +353,7 @@
                 desc->mPendingReopenToQueryProfiles = false;
             }
         }
-        for (const auto& output : outputsToReopen) {
-            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
-            closeOutput(output);
-            openOutputWithProfileAndDevice(desc->mProfile, activeMediaDevices);
-        }
+        reopenOutputsWithDevices(outputsToReopenWithDevices);
 
         if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
             cleanUpForDevice(device);
@@ -444,7 +450,7 @@
 
 status_t AudioPolicyManager::deviceToAudioPort(audio_devices_t device, const char* device_address,
                                                const char* device_name,
-                                               media::AudioPort* aidlPort) {
+                                               media::AudioPortFw* aidlPort) {
     DeviceDescriptorBase devDescr(device, device_address);
     devDescr.setName(device_name);
     return devDescr.writeToParcelable(aidlPort);
@@ -814,7 +820,7 @@
     if (isStateInCall(oldState)) {
         ALOGV("setPhoneState() in call state management: new state is %d", state);
         // force reevaluating accessibility routing when call stops
-        mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+        invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
     }
 
     /**
@@ -872,23 +878,32 @@
         }
     }
 
+    std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
     // reevaluate routing on all outputs in case tracks have been started during the call
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
+            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                // If the device is using preferred mixer attributes, the output need to reopen
+                // with default configuration when the new selected devices are different from
+                // current routing devices.
+                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+                continue;
+            }
             setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
     }
+    reopenOutputsWithDevices(outputsToReopen);
 
     checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
 
     if (isStateInCall(state)) {
         ALOGV("setPhoneState() in call state management: new state is %d", state);
         // force reevaluating accessibility routing when call starts
-        mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+        invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
     }
 
     // Flag that ringtone volume must be limited to music volume until we exit MODE_RINGTONE
@@ -921,8 +936,7 @@
 
     // force client reconnection to reevaluate flag AUDIO_FLAG_AUDIBILITY_ENFORCED
     if (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM) {
-        mpClientInterface->invalidateStream(AUDIO_STREAM_SYSTEM);
-        mpClientInterface->invalidateStream(AUDIO_STREAM_ENFORCED_AUDIBLE);
+        invalidateStreams({AUDIO_STREAM_SYSTEM, AUDIO_STREAM_ENFORCED_AUDIBLE});
     }
 
     //FIXME: workaround for truncated touch sounds
@@ -1123,13 +1137,14 @@
         const audio_attributes_t *attr,
         audio_stream_type_t *stream,
         uid_t uid,
-        const audio_config_t *config,
+        audio_config_t *config,
         audio_output_flags_t *flags,
         audio_port_handle_t *selectedDeviceId,
         bool *isRequestedDeviceForExclusiveUse,
         std::vector<sp<AudioPolicyMix>> *secondaryMixes,
         output_type_t *outputType,
-        bool *isSpatialized)
+        bool *isSpatialized,
+        bool *isBitPerfect)
 {
     DeviceVector outputDevices;
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
@@ -1160,8 +1175,8 @@
         .channel_mask = config->channel_mask,
         .format = config->format,
     };
-    status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, *flags, primaryMix,
-                                           secondaryMixes);
+    status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, session, *flags,
+                                           primaryMix, secondaryMixes);
     if (status != OK) {
         return status;
     }
@@ -1260,10 +1275,33 @@
         }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
+        sp<PreferredMixerAttributesInfo> info = nullptr;
+        if (outputDevices.size() == 1) {
+            info = getPreferredMixerAttributesInfo(
+                    outputDevices.itemAt(0)->getId(),
+                    mEngine->getProductStrategyForAttributes(*resultAttr));
+            if (info != nullptr && info->getUid() != uid && info->getActiveClientCount() == 0) {
+                // Only use preferred mixer when the requested uid matched or
+                // there is active client on preferred mixer.
+                info = nullptr;
+            }
+        }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
-                flags, isSpatialized, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+                flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+        *isBitPerfect = (info != nullptr
+                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && *output != AUDIO_IO_HANDLE_NONE);
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
+        AudioProfileVector profiles;
+        status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
+        if (ret == NO_ERROR && !profiles.empty()) {
+            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
+                    : *profiles[0]->getChannels().begin();
+            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
+                    : *profiles[0]->getSampleRates().begin();
+            config->format = profiles[0]->getFormat();
+        }
         return INVALID_OPERATION;
     }
 
@@ -1291,13 +1329,14 @@
                                               audio_session_t session,
                                               audio_stream_type_t *stream,
                                               const AttributionSourceState& attributionSource,
-                                              const audio_config_t *config,
+                                              audio_config_t *config,
                                               audio_output_flags_t *flags,
                                               audio_port_handle_t *selectedDeviceId,
                                               audio_port_handle_t *portId,
                                               std::vector<audio_io_handle_t> *secondaryOutputs,
                                               output_type_t *outputType,
-                                              bool *isSpatialized)
+                                              bool *isSpatialized,
+                                              bool *isBitPerfect)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1319,7 +1358,8 @@
 
     status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
             config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-            secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized);
+            secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized,
+            isBitPerfect);
     if (status != NO_ERROR) {
         return status;
     }
@@ -1463,6 +1503,7 @@
         const audio_config_t *config,
         audio_output_flags_t *flags,
         bool *isSpatialized,
+        sp<PreferredMixerAttributesInfo> prefMixerConfigInfo,
         bool forceMutingHaptic)
 {
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -1538,11 +1579,36 @@
         // get which output is suitable for the specified stream. The actual
         // routing change will happen when startOutput() will be called
         SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
-
-        // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
-        *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(
-                outputs, *flags, config->format, channelMask, config->sample_rate, session);
+        if (prefMixerConfigInfo != nullptr) {
+            for (audio_io_handle_t outputHandle : outputs) {
+                sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputHandle);
+                if (outputDesc->mProfile == prefMixerConfigInfo->getProfile()) {
+                    output = outputHandle;
+                    break;
+                }
+            }
+            if (output == AUDIO_IO_HANDLE_NONE) {
+                // No output open with the preferred profile. Open a new one.
+                audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                config.channel_mask = prefMixerConfigInfo->getConfigBase().channel_mask;
+                config.sample_rate = prefMixerConfigInfo->getConfigBase().sample_rate;
+                config.format = prefMixerConfigInfo->getConfigBase().format;
+                sp<SwAudioOutputDescriptor> preferredOutput = openOutputWithProfileAndDevice(
+                        prefMixerConfigInfo->getProfile(), devices, nullptr /*mixerConfig*/,
+                        &config, prefMixerConfigInfo->getFlags());
+                if (preferredOutput == nullptr) {
+                    ALOGE("%s failed to open output with preferred mixer config", __func__);
+                } else {
+                    output = preferredOutput->mIoHandle;
+                }
+            }
+        } else {
+            // at this stage we should ignore the DIRECT flag as no direct output could be
+            // found earlier
+            *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            output = selectOutput(
+                    outputs, *flags, config->format, channelMask, config->sample_rate, session);
+        }
     }
     ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
             "sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -2015,8 +2081,47 @@
 
     if (status != NO_ERROR) {
         outputDesc->stop();
+        if (status == DEAD_OBJECT) {
+            sp<SwAudioOutputDescriptor> desc =
+                    reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+            if (desc == nullptr) {
+                // This is not common, it may indicate something wrong with the HAL.
+                ALOGE("%s unable to open output with default config", __func__);
+                return status;
+            }
+            desc->mUsePreferredMixerAttributes = true;
+        }
         return status;
     }
+
+    // If the client is the first one active on preferred mixer parameters, reopen the output
+    // if the current mixer parameters doesn't match the preferred one.
+    if (outputDesc->devices().size() == 1) {
+        sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+                outputDesc->devices()[0]->getId(), client->strategy());
+        if (info != nullptr && info->getUid() == client->uid()) {
+            if (info->getActiveClientCount() == 0 && !outputDesc->isConfigurationMatched(
+                    info->getConfigBase(), info->getFlags())) {
+                stopSource(outputDesc, client);
+                outputDesc->stop();
+                audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                config.channel_mask = info->getConfigBase().channel_mask;
+                config.sample_rate = info->getConfigBase().sample_rate;
+                config.format = info->getConfigBase().format;
+                sp<SwAudioOutputDescriptor> desc =
+                        reopenOutput(outputDesc, &config, info->getFlags(), __func__);
+                if (desc == nullptr) {
+                    return BAD_VALUE;
+                }
+                desc->mUsePreferredMixerAttributes = true;
+                // Intentionally return error to let the client side resending request for
+                // creating and starting.
+                return DEAD_OBJECT;
+            }
+            info->increaseActiveClient();
+        }
+    }
+
     if (delayMs != 0) {
         usleep(delayMs * 1000);
     }
@@ -2165,8 +2270,14 @@
             }
         }
 
+        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+            // If the output is open with preferred mixer attributes, but the routed device is
+            // changed when calling this function, returning DEAD_OBJECT to indicate routing
+            // changed.
+            return DEAD_OBJECT;
+        }
         const uint32_t muteWaitMs =
-                setOutputDevices(outputDesc, devices, force, 0, NULL, requiresMuteCheck);
+                setOutputDevices(outputDesc, devices, force, 0, nullptr, requiresMuteCheck);
 
         // apply volume rules for current stream and device if necessary
         auto &curves = getVolumeCurves(client->attributes());
@@ -2186,7 +2297,7 @@
 
         // force reevaluating accessibility routing when ringtone or alarm starts
         if (followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_ALARM))) {
-            mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+            invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
         }
 
         if (waitMs > muteWaitMs) {
@@ -2229,6 +2340,7 @@
     bool isUnicastActive = isLeUnicastActive();
 
     if (wasUnicastActive != isUnicastActive) {
+        std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
         //reroute all outputs routed to LE broadcast if LE unicast activy changed on any output
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -2241,6 +2353,13 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
+                if (desc->mUsePreferredMixerAttributes && force) {
+                    // If the device is using preferred mixer attributes, the output need to reopen
+                    // with default configuration when the new selected devices are different from
+                    // current routing devices.
+                    outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+                    continue;
+                }
                 setOutputDevices(desc, newDevices, force, delayMs);
                 // re-apply device specific volume if not done by setOutputDevice()
                 if (!force) {
@@ -2248,6 +2367,7 @@
                 }
             }
         }
+        reopenOutputsWithDevices(outputsToReopen);
     }
 }
 
@@ -2269,6 +2389,19 @@
 
     if (status == NO_ERROR ) {
         outputDesc->stop();
+    } else {
+        return status;
+    }
+
+    if (outputDesc->devices().size() == 1) {
+        sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+                outputDesc->devices()[0]->getId(), client->strategy());
+        if (info != nullptr && info->getUid() == client->uid()) {
+            info->decreaseActiveClient();
+            if (info->getActiveClientCount() == 0) {
+                reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+            }
+        }
     }
     return status;
 }
@@ -2328,6 +2461,7 @@
 
             // force restoring the device selection on other active outputs if it differs from the
             // one being selected for this output
+            std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
             uint32_t delayMs = outputDesc->latency()*2;
             for (size_t i = 0; i < mOutputs.size(); i++) {
                 sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -2338,6 +2472,13 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
+                    if (desc->mUsePreferredMixerAttributes && force) {
+                        // If the device is using preferred mixer attributes, the output need to
+                        // reopen with default configuration when the new selected devices are
+                        // different from current routing devices.
+                        outputsToReopen.emplace(mOutputs.keyAt(i), newDevices2);
+                        continue;
+                    }
                     setOutputDevices(desc, newDevices2, force, delayMs);
 
                     // re-apply device specific volume if not done by setOutputDevice()
@@ -2346,6 +2487,7 @@
                     }
                 }
             }
+            reopenOutputsWithDevices(outputsToReopen);
             // update the outputs if stopping one with a stream that can affect notification routing
             handleNotificationRoutingForStream(stream);
         }
@@ -2424,7 +2566,7 @@
                                              audio_unique_id_t riid,
                                              audio_session_t session,
                                              const AttributionSourceState& attributionSource,
-                                             const audio_config_base_t *config,
+                                             audio_config_base_t *config,
                                              audio_input_flags_t flags,
                                              audio_port_handle_t *selectedDeviceId,
                                              input_type_t *inputType,
@@ -2507,7 +2649,7 @@
     *inputType = API_INPUT_INVALID;
 
     if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
-            strncmp(attributes.tags, "addr=", strlen("addr=")) == 0) {
+            extractAddressFromAudioAttributes(attributes).has_value()) {
         status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
         if (status != NO_ERROR) {
             ALOGW("%s could not find input mix for attr %s",
@@ -2535,7 +2677,7 @@
         } else {
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
-            device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+            device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
             ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
                 __FUNCTION__, device->type());
         }
@@ -2565,6 +2707,16 @@
     *input = getInputForDevice(device, session, attributes, config, flags, policyMix);
     if (*input == AUDIO_IO_HANDLE_NONE) {
         status = INVALID_OPERATION;
+        AudioProfileVector profiles;
+        status_t ret = getProfilesForDevices(
+                DeviceVector(device), profiles, flags, true /*isInput*/);
+        if (ret == NO_ERROR && !profiles.empty()) {
+            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
+                    : *profiles[0]->getChannels().begin();
+            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
+                    : *profiles[0]->getSampleRates().begin();
+            config->format = profiles[0]->getFormat();
+        }
         goto error;
     }
 
@@ -2596,7 +2748,7 @@
 audio_io_handle_t AudioPolicyManager::getInputForDevice(const sp<DeviceDescriptor> &device,
                                                         audio_session_t session,
                                                         const audio_attributes_t &attributes,
-                                                        const audio_config_base_t *config,
+                                                        audio_config_base_t *config,
                                                         audio_input_flags_t flags,
                                                         const sp<AudioPolicyMix> &policyMix)
 {
@@ -2623,31 +2775,19 @@
         flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_ULTRASOUND);
     }
 
-    // find a compatible input profile (not necessarily identical in parameters)
-    sp<IOProfile> profile;
     // sampling rate and flags may be updated by getInputProfile
     uint32_t profileSamplingRate = (config->sample_rate == 0) ?
             SAMPLE_RATE_HZ_DEFAULT : config->sample_rate;
-    audio_format_t profileFormat;
+    audio_format_t profileFormat = config->format;
     audio_channel_mask_t profileChannelMask = config->channel_mask;
     audio_input_flags_t profileFlags = flags;
-    for (;;) {
-        profileFormat = config->format; // reset each time through loop, in case it is updated
-        profile = getInputProfile(device, profileSamplingRate, profileFormat, profileChannelMask,
-                                  profileFlags);
-        if (profile != 0) {
-            break; // success
-        } else if (profileFlags & AUDIO_INPUT_FLAG_RAW) {
-            profileFlags = (audio_input_flags_t) (profileFlags & ~AUDIO_INPUT_FLAG_RAW); // retry
-        } else if (profileFlags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(config->format)) {
-            profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
-        } else { // fail
-            ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
-                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
-                  config->sample_rate, config->format, config->channel_mask, flags);
-            return input;
-        }
+    // find a compatible input profile (not necessarily identical in parameters)
+    sp<IOProfile> profile = getInputProfile(
+            device, profileSamplingRate, profileFormat, profileChannelMask, profileFlags);
+    if (profile == nullptr) {
+        return input;
     }
+
     // Pick input sampling rate if not specified by client
     uint32_t samplingRate = config->sample_rate;
     if (samplingRate == 0) {
@@ -2944,7 +3084,8 @@
             bool close = false;
             for (const auto& client : input->clientsList()) {
                 sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
+                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+                                                         client->session());
                 if (!input->supportedDevices().contains(device)) {
                     close = true;
                     break;
@@ -3677,6 +3818,7 @@
         // Only apply special touch sound delay once
         delayMs = 0;
     }
+    std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(outputDesc, true /*fromCache*/);
@@ -3686,6 +3828,13 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             bool forceRouting = !newDevices.isEmpty();
+            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+                // If the device is using preferred mixer attributes, the output need to reopen
+                // with default configuration when the new selected devices are different from
+                // current routing devices.
+                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+                continue;
+            }
             waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
                                       true /*requiresMuteCheck*/,
                                       !forceRouting /*requiresVolumeCheck*/);
@@ -3696,6 +3845,7 @@
             applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
         }
     }
+    reopenOutputsWithDevices(outputsToReopen);
     checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
 }
 
@@ -3900,6 +4050,15 @@
         dst->appendFormat("   - uid=%d flag_mask=%#x\n", policy.first, policy.second);
     }
 
+    dst->appendFormat(" Preferred mixer audio configuration:\n");
+    for (const auto it : mPreferredMixerAttrInfos) {
+        dst->appendFormat("   - device port id: %d\n", it.first);
+        for (const auto preferredMixerInfoIt : it.second) {
+            dst->appendFormat("     - strategy: %d; ", preferredMixerInfoIt.first);
+            preferredMixerInfoIt.second->dump(dst);
+        }
+    }
+
     dst->appendFormat("\nPolicy Engine dump:\n");
     mEngine->dump(dst);
 }
@@ -4117,8 +4276,8 @@
     if (mEffects.isNonOffloadableEffectEnabled()) {
         return OK;
     }
-    AudioDeviceTypeAddrVector devices;
-    status_t status = getDevicesForAttributes(*attr, &devices, false /* forVolume */);
+    DeviceVector devices;
+    status_t status = getDevicesForAttributes(*attr, devices, false /* forVolume */);
     if (status != OK) {
         return status;
     }
@@ -4126,43 +4285,182 @@
     if (devices.empty()) {
         return OK; // no output devices for the attributes
     }
+    return getProfilesForDevices(devices, audioProfilesVector,
+                                 AUDIO_OUTPUT_FLAG_DIRECT /*flags*/, false /*isInput*/);
+}
 
+status_t AudioPolicyManager::getSupportedMixerAttributes(
+        audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> &mixerAttrs) {
+    ALOGV("%s, portId=%d", __func__, portId);
+    sp<DeviceDescriptor> deviceDescriptor = mAvailableOutputDevices.getDeviceFromId(portId);
+    if (deviceDescriptor == nullptr) {
+        ALOGE("%s the requested device is currently unavailable", __func__);
+        return BAD_VALUE;
+    }
     for (const auto& hwModule : mHwModules) {
-        // the MSD module checks for different conditions
-        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+        for (const auto& curProfile : hwModule->getOutputProfiles()) {
+            if (curProfile->supportsDevice(deviceDescriptor)) {
+                curProfile->toSupportedMixerAttributes(&mixerAttrs);
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::setPreferredMixerAttributes(
+        const audio_attributes_t *attr,
+        audio_port_handle_t portId,
+        uid_t uid,
+        const audio_mixer_attributes_t *mixerAttributes) {
+    ALOGV("%s, attr=%s, mixerAttributes={format=%#x, channelMask=%#x, samplingRate=%u, "
+          "mixerBehavior=%d}, uid=%d, portId=%u",
+          __func__, toString(*attr).c_str(), mixerAttributes->config.format,
+          mixerAttributes->config.channel_mask, mixerAttributes->config.sample_rate,
+          mixerAttributes->mixer_behavior, uid, portId);
+    if (attr->usage != AUDIO_USAGE_MEDIA) {
+        ALOGE("%s failed, only media is allowed, the given usage is %d", __func__, attr->usage);
+        return BAD_VALUE;
+    }
+    sp<DeviceDescriptor> deviceDescriptor = mAvailableOutputDevices.getDeviceFromId(portId);
+    if (deviceDescriptor == nullptr) {
+        ALOGE("%s the requested device is currently unavailable", __func__);
+        return BAD_VALUE;
+    }
+    if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+        ALOGE("%s(%d), type=%d, is not a usb output device",
+              __func__, portId, deviceDescriptor->type());
+        return BAD_VALUE;
+    }
+
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+    audio_flags_to_audio_output_flags(attr->flags, &flags);
+    flags = (audio_output_flags_t) (flags |
+            audio_output_flags_from_mixer_behavior(mixerAttributes->mixer_behavior));
+    sp<IOProfile> profile = nullptr;
+    DeviceVector devices(deviceDescriptor);
+    for (const auto& hwModule : mHwModules) {
+        for (const auto& curProfile : hwModule->getOutputProfiles()) {
+            if (curProfile->hasDynamicAudioProfile()
+                    && curProfile->isCompatibleProfile(devices,
+                                                       mixerAttributes->config.sample_rate,
+                                                       nullptr /*updatedSamplingRate*/,
+                                                       mixerAttributes->config.format,
+                                                       nullptr /*updatedFormat*/,
+                                                       mixerAttributes->config.channel_mask,
+                                                       nullptr /*updatedChannelMask*/,
+                                                       flags,
+                                                       false /*exactMatchRequiredForInputFlags*/)) {
+                profile = curProfile;
+                break;
+            }
+        }
+    }
+    if (profile == nullptr) {
+        ALOGE("%s, there is no compatible profile found", __func__);
+        return BAD_VALUE;
+    }
+
+    sp<PreferredMixerAttributesInfo> mixerAttrInfo =
+            sp<PreferredMixerAttributesInfo>::make(
+                    uid, portId, profile, flags, *mixerAttributes);
+    const product_strategy_t strategy = mEngine->getProductStrategyForAttributes(*attr);
+    mPreferredMixerAttrInfos[portId][strategy] = mixerAttrInfo;
+
+    // If 1) there is any client from the preferred mixer configuration owner that is currently
+    // active and matches the strategy and 2) current output is on the preferred device and the
+    // mixer configuration doesn't match the preferred one, reopen output with preferred mixer
+    // configuration.
+    std::vector<audio_io_handle_t> outputsToReopen;
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        const auto output = mOutputs.valueAt(i);
+        if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
+            if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
+                output->mUsePreferredMixerAttributes = true;
+            } else {
+                for (const auto &client: output->getActiveClients()) {
+                    if (client->uid() == uid && client->strategy() == strategy) {
+                        client->setIsInvalid();
+                        outputsToReopen.push_back(output->mIoHandle);
+                    }
+                }
+            }
+        }
+    }
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mixerAttributes->config.sample_rate;
+    config.channel_mask = mixerAttributes->config.channel_mask;
+    config.format = mixerAttributes->config.format;
+    for (const auto output : outputsToReopen) {
+        sp<SwAudioOutputDescriptor> desc =
+                reopenOutput(mOutputs.valueFor(output), &config, flags, __func__);
+        if (desc == nullptr) {
+            ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        for (const auto& outputProfile : hwModule->getOutputProfiles()) {
-            if (!outputProfile->asAudioPort()->isDirectOutput()) {
-                continue;
-            }
-            // allow only profiles that support all the available and routed devices
-            if (outputProfile->getSupportedDevices().getDevicesFromDeviceTypeAddrVec(devices).size()
-                    != devices.size()) {
-                continue;
-            }
-            audioProfilesVector.addAllValidProfiles(
-                    outputProfile->asAudioPort()->getAudioProfiles());
-        }
+        desc->mUsePreferredMixerAttributes = true;
     }
 
-    // add the direct profiles from MSD if present and has audio patches to all the output(s)
-    const auto& msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
-    if (msdModule != nullptr) {
-        if (msdHasPatchesToAllDevices(devices)) {
-            ALOGV("%s: MSD audio patches set to all output devices.", __func__);
-            for (const auto& outputProfile : msdModule->getOutputProfiles()) {
-                if (!outputProfile->asAudioPort()->isDirectOutput()) {
-                    continue;
-                }
-                audioProfilesVector.addAllValidProfiles(
-                        outputProfile->asAudioPort()->getAudioProfiles());
-            }
-        } else {
-            ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
-        }
+    return NO_ERROR;
+}
+
+sp<PreferredMixerAttributesInfo> AudioPolicyManager::getPreferredMixerAttributesInfo(
+        audio_port_handle_t devicePortId, product_strategy_t strategy) {
+    auto it = mPreferredMixerAttrInfos.find(devicePortId);
+    if (it == mPreferredMixerAttrInfos.end()) {
+        return nullptr;
+    }
+    auto mixerAttrInfoIt = it->second.find(strategy);
+    if (mixerAttrInfoIt == it->second.end()) {
+        return nullptr;
+    }
+    return mixerAttrInfoIt->second;
+}
+
+status_t AudioPolicyManager::getPreferredMixerAttributes(
+        const audio_attributes_t *attr,
+        audio_port_handle_t portId,
+        audio_mixer_attributes_t* mixerAttributes) {
+    sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+            portId, mEngine->getProductStrategyForAttributes(*attr));
+    if (info == nullptr) {
+        return NAME_NOT_FOUND;
+    }
+    *mixerAttributes = info->getMixerAttributes();
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::clearPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                           audio_port_handle_t portId,
+                                                           uid_t uid) {
+    const product_strategy_t strategy = mEngine->getProductStrategyForAttributes(*attr);
+    const auto preferredMixerAttrInfo = getPreferredMixerAttributesInfo(portId, strategy);
+    if (preferredMixerAttrInfo == nullptr) {
+        return NAME_NOT_FOUND;
+    }
+    if (preferredMixerAttrInfo->getUid() != uid) {
+        ALOGE("%s, requested uid=%d, owned uid=%d",
+              __func__, uid, preferredMixerAttrInfo->getUid());
+        return PERMISSION_DENIED;
+    }
+    mPreferredMixerAttrInfos[portId].erase(strategy);
+    if (mPreferredMixerAttrInfos[portId].empty()) {
+        mPreferredMixerAttrInfos.erase(portId);
     }
 
+    // Reconfig existing output
+    std::vector<audio_io_handle_t> potentialOutputsToReopen;
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        if (mOutputs.valueAt(i)->mProfile == preferredMixerAttrInfo->getProfile()) {
+            potentialOutputsToReopen.push_back(mOutputs.keyAt(i));
+        }
+    }
+    for (const auto output : potentialOutputsToReopen) {
+        sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+        if (desc->isConfigurationMatched(preferredMixerAttrInfo->getConfigBase(),
+                                         preferredMixerAttrInfo->getFlags())) {
+            reopenOutput(desc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
+    }
     return NO_ERROR;
 }
 
@@ -4230,6 +4528,7 @@
             *num_ports += numOutputs;
         }
     }
+
     *generation = curAudioPortGeneration();
     ALOGV("listAudioPorts() got %zu ports needed %d", portsWritten, *num_ports);
     return NO_ERROR;
@@ -4563,17 +4862,22 @@
                     audio_attributes_t resultAttr;
                     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
                     config.sample_rate = sourceDesc->config().sample_rate;
-                    config.channel_mask = sourceDesc->config().channel_mask;
+                    audio_channel_mask_t sourceMask = sourceDesc->config().channel_mask;
+                    config.channel_mask =
+                            (audio_channel_mask_get_representation(sourceMask)
+                                == AUDIO_CHANNEL_REPRESENTATION_INDEX) ? sourceMask
+                                    : audio_channel_mask_in_to_out(sourceMask);
                     config.format = sourceDesc->config().format;
                     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
                     bool isRequestedDeviceForExclusiveUse = false;
                     output_type_t outputType;
                     bool isSpatialized;
+                    bool isBitPerfect;
                     getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
                                         &stream, sourceDesc->uid(), &config, &flags,
                                         &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-                                        nullptr, &outputType, &isSpatialized);
+                                        nullptr, &outputType, &isSpatialized, &isBitPerfect);
                     if (output == AUDIO_IO_HANDLE_NONE) {
                         ALOGV("%s no output for device %s",
                               __FUNCTION__, sinkDevice->toString().c_str());
@@ -4865,6 +5169,7 @@
     auto attributes = mEngine->getAllAttributesForProductStrategy(ps).front();
     DeviceVector devices = mEngine->getOutputDevicesForAttributes(attributes, nullptr, false);
     SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
+    std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
     for (size_t j = 0; j < mOutputs.size(); j++) {
         if (mOutputs.keyAt(j) == ouptutToSkip) {
             continue;
@@ -4877,14 +5182,20 @@
         // invalidate all tracks in this strategy to force re connection.
         // Otherwise select new device on the output mix.
         if (outputs.indexOf(mOutputs.keyAt(j)) < 0) {
-            for (auto stream : mEngine->getStreamTypesForProductStrategy(ps)) {
-                mpClientInterface->invalidateStream(stream);
-            }
+            invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
-            setOutputDevices(
-                        outputDesc, getNewOutputDevices(outputDesc, false /*fromCache*/), false);
+            DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
+            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+                // If the device is using preferred mixer attributes, the output need to reopen
+                // with default configuration when the new selected devices are different from
+                // current routing devices.
+                outputsToReopen.emplace(mOutputs.keyAt(j), newDevices);
+                continue;
+            }
+            setOutputDevices(outputDesc, newDevices, false);
         }
     }
+    reopenOutputsWithDevices(outputsToReopen);
 }
 
 void AudioPolicyManager::clearSessionRoutes(uid_t uid)
@@ -5439,9 +5750,7 @@
         }
     }
 
-    for (audio_stream_type_t stream : streamsToInvalidate) {
-        mpClientInterface->invalidateStream(stream);
-    }
+    invalidateStreams(StreamTypeVector(streamsToInvalidate.begin(), streamsToInvalidate.end()));
 }
 
 
@@ -6377,7 +6686,7 @@
             }
             sp<AudioPolicyMix> primaryMix;
             status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
-                    client->uid(), client->flags(), primaryMix, nullptr);
+                    client->uid(), client->session(), client->flags(), primaryMix, nullptr);
             if (status != OK) {
                 continue;
             }
@@ -6455,9 +6764,7 @@
         }
         // Move tracks associated to this stream (and linked) from previous output to new output
         if (!invalidatedOutputs.empty()) {
-            for (auto stream :  mEngine->getStreamTypesForProductStrategy(psId)) {
-                mpClientInterface->invalidateStream(stream);
-            }
+            invalidateStreams(mEngine->getStreamTypesForProductStrategy(psId));
             for (sp<SwAudioOutputDescriptor> desc : invalidatedOutputs) {
                 desc->setTracksInvalidatedStatusByStrategy(psId);
             }
@@ -6475,7 +6782,7 @@
 }
 
 void AudioPolicyManager::checkSecondaryOutputs() {
-    std::set<audio_stream_type_t> streamsToInvalidate;
+    PortHandleVector clientsToInvalidate;
     TrackSecondaryOutputsMap trackSecondaryOutputs;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& outputDescriptor = mOutputs[i];
@@ -6483,7 +6790,7 @@
             sp<AudioPolicyMix> primaryMix;
             std::vector<sp<AudioPolicyMix>> secondaryMixes;
             status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
-                    client->uid(), client->flags(), primaryMix, &secondaryMixes);
+                    client->uid(), client->session(), client->flags(), primaryMix, &secondaryMixes);
             std::vector<sp<SwAudioOutputDescriptor>> secondaryDescs;
             for (auto &secondaryMix : secondaryMixes) {
                 sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
@@ -6493,8 +6800,11 @@
                 }
             }
 
-            if (status != OK) {
-                streamsToInvalidate.insert(client->stream());
+            if (status != OK &&
+                (client->flags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == AUDIO_OUTPUT_FLAG_NONE) {
+                // When it failed to query secondary output, only invalidate the client that is not
+                // MMAP. The reason is that MMAP stream will not support secondary output.
+                clientsToInvalidate.push_back(client->portId());
             } else if (!std::equal(
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
@@ -6502,7 +6812,7 @@
                 if (!audio_is_linear_pcm(client->config().format)) {
                     // If the format is not PCM, the tracks should be invalidated to get correct
                     // behavior when the secondary output is changed.
-                    streamsToInvalidate.insert(client->stream());
+                    clientsToInvalidate.push_back(client->portId());
                 } else {
                     std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
                     std::vector<audio_io_handle_t> secondaryOutputIds;
@@ -6519,9 +6829,9 @@
     if (!trackSecondaryOutputs.empty()) {
         mpClientInterface->updateSecondaryOutputs(trackSecondaryOutputs);
     }
-    for (audio_stream_type_t stream : streamsToInvalidate) {
-        ALOGD("%s Invalidate stream %d due to fail getting output for attr", __func__, stream);
-        mpClientInterface->invalidateStream(stream);
+    if (!clientsToInvalidate.empty()) {
+        ALOGD("%s Invalidate clients due to fail getting output for attr", __func__);
+        mpClientInterface->invalidateTracks(clientsToInvalidate);
     }
 }
 
@@ -6691,20 +7001,23 @@
     // a null sp<>, causing the patch on the input stream to be released.
     audio_attributes_t attributes;
     uid_t uid;
+    audio_session_t session;
     sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
     if (topClient != nullptr) {
         attributes = topClient->attributes();
         uid = topClient->uid();
+        session = topClient->session();
     } else {
         attributes = { .source = AUDIO_SOURCE_DEFAULT };
         uid = 0;
+        session = AUDIO_SESSION_NONE;
     }
 
     if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
         attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     }
     if (attributes.source != AUDIO_SOURCE_DEFAULT) {
-        device = mEngine->getInputDeviceForAttributes(attributes, uid);
+        device = mEngine->getInputDeviceForAttributes(attributes, uid, session);
     }
 
     return device;
@@ -6715,70 +7028,15 @@
     return (stream1 == stream2);
 }
 
-// TODO - consider MSD routes b/214971780
 status_t AudioPolicyManager::getDevicesForAttributes(
         const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices, bool forVolume) {
     if (devices == nullptr) {
         return BAD_VALUE;
     }
 
-    // Devices are determined in the following precedence:
-    //
-    // 1) Devices associated with a dynamic policy matching the attributes.  This is often
-    //    a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
-    //
-    // If no such dynamic policy then
-    // 2) Devices containing an active client using setPreferredDevice
-    //    with same strategy as the attributes.
-    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
-    //
-    // If no corresponding active client with setPreferredDevice then
-    // 3) Devices associated with the strategy determined by the attributes
-    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
-    //
-    // See related getOutputForAttrInt().
-
-    // check dynamic policies but only for primary descriptors (secondary not used for audible
-    // audio routing, only used for duplication for playback capture)
-    sp<AudioPolicyMix> policyMix;
-    status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
-            0 /*uid unknown here*/, AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr);
-    if (status != OK) {
-        return status;
-    }
-
     DeviceVector curDevices;
-    if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
-            // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
-            // as they are unaffected by device/stream volume
-            // (per SwAudioOutputDescriptor::isFixedVolume()).
-            (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
-            ) {
-        sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
-                policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
-        curDevices.add(deviceDesc);
-    } else {
-        // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
-        // which selects setPreferredDevice if active.  This means forVolume call
-        // will take an active setPreferredDevice, if such exists.
-
-        curDevices = mEngine->getOutputDevicesForAttributes(
-                attr, nullptr /* preferredDevice */, false /* fromCache */);
-    }
-
-    if (forVolume) {
-        // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
-        // for single volume control in AudioService (such relationship should exist if
-        // SPEAKER_SAFE is present).
-        //
-        // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
-        DeviceVector speakerSafeDevices =
-                curDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
-        if (!speakerSafeDevices.isEmpty()) {
-            curDevices.merge(
-                    mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
-            curDevices.remove(speakerSafeDevices);
-        }
+    if (status_t status = getDevicesForAttributes(attr, curDevices, forVolume); status != OK) {
+        return status;
     }
     for (const auto& device : curDevices) {
         devices->push_back(device->getDeviceTypeAddr());
@@ -6963,6 +7221,7 @@
                                               audio_patch_handle_t *patchHandle,
                                               bool requiresMuteCheck, bool requiresVolumeCheck)
 {
+    // TODO(b/262404095): Consider if the output need to be reopened.
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
     uint32_t muteWaitMs;
 
@@ -7133,51 +7392,68 @@
 {
     // Choose an input profile based on the requested capture parameters: select the first available
     // profile supporting all requested parameters.
+    // The flags can be ignored if it doesn't contain a much match flag.
     //
     // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
     // the best matching profile, not the first one.
 
-    sp<IOProfile> firstInexact;
-    uint32_t updatedSamplingRate = 0;
-    audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
-    audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
-    for (const auto& hwModule : mHwModules) {
-        for (const auto& profile : hwModule->getInputProfiles()) {
-            // profile->log();
-            //updatedFormat = format;
-            if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
-                                             &samplingRate  /*updatedSamplingRate*/,
-                                             format,
-                                             &format,       /*updatedFormat*/
-                                             channelMask,
-                                             &channelMask   /*updatedChannelMask*/,
-                                             // FIXME ugly cast
-                                             (audio_output_flags_t) flags,
-                                             true /*exactMatchRequiredForInputFlags*/)) {
-                return profile;
-            }
-            if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
-                                             samplingRate,
-                                             &updatedSamplingRate,
-                                             format,
-                                             &updatedFormat,
-                                             channelMask,
-                                             &updatedChannelMask,
-                                             // FIXME ugly cast
-                                             (audio_output_flags_t) flags,
-                                             false /*exactMatchRequiredForInputFlags*/)) {
-                firstInexact = profile;
-            }
+    const audio_input_flags_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ;
+    const audio_input_flags_t oriFlags = flags;
 
+    for (;;) {
+        sp<IOProfile> firstInexact = nullptr;
+        uint32_t updatedSamplingRate = 0;
+        audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
+        audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
+        for (const auto& hwModule : mHwModules) {
+            for (const auto& profile : hwModule->getInputProfiles()) {
+                // profile->log();
+                //updatedFormat = format;
+                if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
+                                                 &samplingRate  /*updatedSamplingRate*/,
+                                                 format,
+                                                 &format,       /*updatedFormat*/
+                                                 channelMask,
+                                                 &channelMask   /*updatedChannelMask*/,
+                                                 // FIXME ugly cast
+                                                 (audio_output_flags_t) flags,
+                                                 true /*exactMatchRequiredForInputFlags*/)) {
+                    return profile;
+                }
+                if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
+                                                 samplingRate,
+                                                 &updatedSamplingRate,
+                                                 format,
+                                                 &updatedFormat,
+                                                 channelMask,
+                                                 &updatedChannelMask,
+                                                 // FIXME ugly cast
+                                                 (audio_output_flags_t) flags,
+                                                 false /*exactMatchRequiredForInputFlags*/)) {
+                    firstInexact = profile;
+                }
+            }
+        }
+
+        if (firstInexact != nullptr) {
+            samplingRate = updatedSamplingRate;
+            format = updatedFormat;
+            channelMask = updatedChannelMask;
+            return firstInexact;
+        } else if (flags & AUDIO_INPUT_FLAG_RAW) {
+            flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_RAW); // retry
+        } else if ((flags & mustMatchFlag) == AUDIO_INPUT_FLAG_NONE &&
+                flags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(format)) {
+            flags = AUDIO_INPUT_FLAG_NONE;
+        } else { // fail
+            ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
+                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
+                  samplingRate, format, channelMask, oriFlags);
+            break;
         }
     }
-    if (firstInexact != nullptr) {
-        samplingRate = updatedSamplingRate;
-        format = updatedFormat;
-        channelMask = updatedChannelMask;
-        return firstInexact;
-    }
-    return NULL;
+
+    return nullptr;
 }
 
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
@@ -7823,19 +8099,23 @@
 
 sp<SwAudioOutputDescriptor> AudioPolicyManager::openOutputWithProfileAndDevice(
         const sp<IOProfile>& profile, const DeviceVector& devices,
-        const audio_config_base_t *mixerConfig)
+        const audio_config_base_t *mixerConfig, const audio_config_t *halConfig,
+        audio_output_flags_t flags)
 {
     for (const auto& device : devices) {
         // TODO: This should be checking if the profile supports the device combo.
         if (!profile->supportsDevice(device)) {
+            ALOGE("%s profile(%s) doesn't support device %#x", __func__, profile->getName().c_str(),
+                  device->type());
             return nullptr;
         }
     }
     sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-    status_t status = desc->open(nullptr /* halConfig */, mixerConfig, devices,
-            AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+    status_t status = desc->open(halConfig, mixerConfig, devices,
+            AUDIO_STREAM_DEFAULT, flags, &output);
     if (status != NO_ERROR) {
+        ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
     }
 
@@ -7853,7 +8133,9 @@
         ALOGW("%s() missing param", __func__);
         desc->close();
         return nullptr;
-    } else if (profile->hasDynamicAudioProfile()) {
+    } else if (profile->hasDynamicAudioProfile() && halConfig == nullptr) {
+        // Reopen the output with the best audio profile picked by APM when the profile supports
+        // dynamic audio profile and the hal config is not specified.
         desc->close();
         output = AUDIO_IO_HANDLE_NONE;
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -7863,8 +8145,7 @@
         config.offload_info.channel_mask = config.channel_mask;
         config.offload_info.format = config.format;
 
-        status = desc->open(&config, mixerConfig, devices,
-                            AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
         if (status != NO_ERROR) {
             return nullptr;
         }
@@ -7919,4 +8200,151 @@
     return desc;
 }
 
+status_t AudioPolicyManager::getDevicesForAttributes(
+        const audio_attributes_t &attr, DeviceVector &devices, bool forVolume) {
+    // Devices are determined in the following precedence:
+    //
+    // 1) Devices associated with a dynamic policy matching the attributes.  This is often
+    //    a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
+    //
+    // If no such dynamic policy then
+    // 2) Devices containing an active client using setPreferredDevice
+    //    with same strategy as the attributes.
+    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
+    //
+    // If no corresponding active client with setPreferredDevice then
+    // 3) Devices associated with the strategy determined by the attributes
+    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
+    //
+    // See related getOutputForAttrInt().
+
+    // check dynamic policies but only for primary descriptors (secondary not used for audible
+    // audio routing, only used for duplication for playback capture)
+    sp<AudioPolicyMix> policyMix;
+    status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
+            0 /*uid unknown here*/, AUDIO_SESSION_NONE, AUDIO_OUTPUT_FLAG_NONE, policyMix,
+            nullptr /* secondaryMixes */);
+    if (status != OK) {
+        return status;
+    }
+
+    if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
+            // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
+            // as they are unaffected by device/stream volume
+            // (per SwAudioOutputDescriptor::isFixedVolume()).
+            (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
+            ) {
+        sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
+                policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
+        devices.add(deviceDesc);
+    } else {
+        // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
+        // which selects setPreferredDevice if active.  This means forVolume call
+        // will take an active setPreferredDevice, if such exists.
+
+        devices = mEngine->getOutputDevicesForAttributes(
+                attr, nullptr /* preferredDevice */, false /* fromCache */);
+    }
+
+    if (forVolume) {
+        // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
+        // for single volume control in AudioService (such relationship should exist if
+        // SPEAKER_SAFE is present).
+        //
+        // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
+        DeviceVector speakerSafeDevices =
+                devices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
+        if (!speakerSafeDevices.isEmpty()) {
+            devices.merge(mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
+            devices.remove(speakerSafeDevices);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getProfilesForDevices(const DeviceVector& devices,
+                                                   AudioProfileVector& audioProfiles,
+                                                   uint32_t flags,
+                                                   bool isInput) {
+    for (const auto& hwModule : mHwModules) {
+        // the MSD module checks for different conditions
+        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+            continue;
+        }
+        IOProfileCollection ioProfiles = isInput ? hwModule->getInputProfiles()
+                                                 : hwModule->getOutputProfiles();
+        for (const auto& profile : ioProfiles) {
+            if (!profile->areAllDevicesSupported(devices) ||
+                    !profile->isCompatibleProfileForFlags(
+                            flags, false /*exactMatchRequiredForInputFlags*/)) {
+                continue;
+            }
+            audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
+        }
+    }
+
+    if (!isInput) {
+        // add the direct profiles from MSD if present and has audio patches to all the output(s)
+        const auto &msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+        if (msdModule != nullptr) {
+            if (msdHasPatchesToAllDevices(devices.toTypeAddrVector())) {
+                ALOGV("%s: MSD audio patches set to all output devices.", __func__);
+                for (const auto &profile: msdModule->getOutputProfiles()) {
+                    if (!profile->asAudioPort()->isDirectOutput()) {
+                        continue;
+                    }
+                    audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
+                }
+            } else {
+                ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+sp<SwAudioOutputDescriptor> AudioPolicyManager::reopenOutput(sp<SwAudioOutputDescriptor> outputDesc,
+                                                             const audio_config_t *config,
+                                                             audio_output_flags_t flags,
+                                                             const char* caller) {
+    closeOutput(outputDesc->mIoHandle);
+    sp<SwAudioOutputDescriptor> preferredOutput = openOutputWithProfileAndDevice(
+            outputDesc->mProfile, outputDesc->devices(), nullptr /*mixerConfig*/, config, flags);
+    if (preferredOutput == nullptr) {
+        ALOGE("%s failed to reopen output device=%d, caller=%s",
+              __func__, outputDesc->devices()[0]->getId(), caller);
+    }
+    return preferredOutput;
+}
+
+void AudioPolicyManager::reopenOutputsWithDevices(
+        const std::map<audio_io_handle_t, DeviceVector> &outputsToReopen) {
+    for (const auto& [output, devices] : outputsToReopen) {
+        sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+        closeOutput(output);
+        openOutputWithProfileAndDevice(desc->mProfile, devices);
+    }
+}
+
+PortHandleVector AudioPolicyManager::getClientsForStream(
+        audio_stream_type_t streamType) const {
+    PortHandleVector clients;
+    for (size_t i = 0; i < mOutputs.size(); ++i) {
+        PortHandleVector clientsForStream = mOutputs.valueAt(i)->getClientsForStream(streamType);
+        clients.insert(clients.end(), clientsForStream.begin(), clientsForStream.end());
+    }
+    return clients;
+}
+
+void AudioPolicyManager::invalidateStreams(StreamTypeVector streams) const {
+    PortHandleVector clients;
+    for (auto stream : streams) {
+        PortHandleVector clientsForStream = getClientsForStream(stream);
+        clients.insert(clients.end(), clientsForStream.begin(), clientsForStream.end());
+    }
+    mpClientInterface->invalidateTracks(clients);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index a69e088..885f7c6 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -47,6 +47,7 @@
 #include <AudioOutputDescriptor.h>
 #include <AudioPolicyMix.h>
 #include <EffectDescriptor.h>
+#include <PreferredMixerAttributesInfo.h>
 #include <SoundTriggerSession.h>
 #include "EngineLibrary.h"
 #include "TypeConverter.h"
@@ -117,13 +118,14 @@
                                   audio_session_t session,
                                   audio_stream_type_t *stream,
                                   const AttributionSourceState& attributionSource,
-                                  const audio_config_t *config,
+                                  audio_config_t *config,
                                   audio_output_flags_t *flags,
                                   audio_port_handle_t *selectedDeviceId,
                                   audio_port_handle_t *portId,
                                   std::vector<audio_io_handle_t> *secondaryOutputs,
                                   output_type_t *outputType,
-                                  bool *isSpatialized) override;
+                                  bool *isSpatialized,
+                                  bool *isBitPerfect) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual bool releaseOutput(audio_port_handle_t portId);
@@ -132,7 +134,7 @@
                                          audio_unique_id_t riid,
                                          audio_session_t session,
                                          const AttributionSourceState& attributionSource,
-                                         const audio_config_base_t *config,
+                                         audio_config_base_t *config,
                                          audio_input_flags_t flags,
                                          audio_port_handle_t *selectedDeviceId,
                                          input_type_t *inputType,
@@ -356,11 +358,10 @@
         }
 
         virtual status_t getProductStrategyFromAudioAttributes(
-                const AudioAttributes &aa, product_strategy_t &productStrategy,
+                const audio_attributes_t &aa, product_strategy_t &productStrategy,
                 bool fallbackOnDefault)
         {
-            productStrategy = mEngine->getProductStrategyForAttributes(
-                    aa.getAttributes(), fallbackOnDefault);
+            productStrategy = mEngine->getProductStrategyForAttributes(aa, fallbackOnDefault);
             return (fallbackOnDefault && productStrategy == PRODUCT_STRATEGY_NONE) ?
                     BAD_VALUE : NO_ERROR;
         }
@@ -371,10 +372,9 @@
         }
 
         virtual status_t getVolumeGroupFromAudioAttributes(
-                const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
+                const audio_attributes_t &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
         {
-            volumeGroup = mEngine->getVolumeGroupForAttributes(
-                        aa.getAttributes(), fallbackOnDefault);
+            volumeGroup = mEngine->getVolumeGroupForAttributes(aa, fallbackOnDefault);
             return (fallbackOnDefault && volumeGroup == VOLUME_GROUP_NONE) ?
                     BAD_VALUE : NO_ERROR;
         }
@@ -397,6 +397,21 @@
         virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                                          AudioProfileVector& audioProfiles);
 
+        status_t getSupportedMixerAttributes(
+                audio_port_handle_t portId,
+                std::vector<audio_mixer_attributes_t>& mixerAttrs) override;
+        status_t setPreferredMixerAttributes(
+                const audio_attributes_t* attr,
+                audio_port_handle_t portId,
+                uid_t uid,
+                const audio_mixer_attributes_t* mixerAttributes) override;
+        status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+                                             audio_port_handle_t portId,
+                                             audio_mixer_attributes_t* mixerAttributes) override;
+        status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+                                               audio_port_handle_t portId,
+                                               uid_t uid) override;
+
         bool isCallScreenModeSupported() override;
 
         void onNewAudioModulesAvailable() override;
@@ -987,6 +1002,10 @@
         sp<SourceClientDescriptor> mCallRxSourceClient;
         sp<SourceClientDescriptor> mCallTxSourceClient;
 
+        std::map<audio_port_handle_t,
+                 std::map<product_strategy_t,
+                          sp<PreferredMixerAttributesInfo>>> mPreferredMixerAttrInfos;
+
         // Support for Multi-Stream Decoder (MSD) module
         sp<DeviceDescriptor> getMsdAudioInDevice() const;
         DeviceVector getMsdAudioOutDevices() const;
@@ -1016,7 +1035,7 @@
 
         // Called by setDeviceConnectionState()
         status_t deviceToAudioPort(audio_devices_t deviceType, const char* device_address,
-                                   const char* device_name, media::AudioPort* aidPort);
+                                   const char* device_name, media::AudioPortFw* aidPort);
         bool isMsdPatch(const audio_patch_handle_t &handle) const;
 
 private:
@@ -1058,13 +1077,14 @@
                 const audio_attributes_t *attr,
                 audio_stream_type_t *stream,
                 uid_t uid,
-                const audio_config_t *config,
+                audio_config_t *config,
                 audio_output_flags_t *flags,
                 audio_port_handle_t *selectedDeviceId,
                 bool *isRequestedDeviceForExclusiveUse,
                 std::vector<sp<AudioPolicyMix>> *secondaryMixes,
                 output_type_t *outputType,
-                bool *isSpatialized);
+                bool *isSpatialized,
+                bool *isBitPerfect);
         // internal method to return the output handle for the given device and format
         audio_io_handle_t getOutputForDevices(
                 const DeviceVector &devices,
@@ -1073,6 +1093,7 @@
                 const audio_config_t *config,
                 audio_output_flags_t *flags,
                 bool *isSpatialized,
+                sp<PreferredMixerAttributesInfo> prefMixerAttrInfo = nullptr,
                 bool forceMutingHaptic = false);
 
         // Internal method checking if a direct output can be opened matching the requested
@@ -1139,7 +1160,9 @@
          * @param session requester session id
          * @param uid requester uid
          * @param attributes requester audio attributes (e.g. input source and tags matter)
-         * @param config requester audio configuration (e.g. sample rate, format, channel mask).
+         * @param config requested audio configuration (e.g. sample rate, format, channel mask),
+         *               will be updated if current configuration doesn't support but another
+         *               one does
          * @param flags requester input flags
          * @param policyMix may be null, policy rules to be followed by the requester
          * @return input io handle aka unique input identifier selected for this device.
@@ -1147,7 +1170,7 @@
         audio_io_handle_t getInputForDevice(const sp<DeviceDescriptor> &device,
                 audio_session_t session,
                 const audio_attributes_t &attributes,
-                const audio_config_base_t *config,
+                audio_config_base_t *config,
                 audio_input_flags_t flags,
                 const sp<AudioPolicyMix> &policyMix);
 
@@ -1238,11 +1261,15 @@
          * @param[in] profile IOProfile to use as template
          * @param[in] devices initial route to apply to this output stream
          * @param[in] mixerConfig if not null, use this to configure the mixer
+         * @param[in] halConfig if not null, use this to configure the HAL
+         * @param[in] flags the flags to be used to open the output
          * @return an output descriptor for the newly opened stream or null in case of error.
          */
         sp<SwAudioOutputDescriptor> openOutputWithProfileAndDevice(
                 const sp<IOProfile>& profile, const DeviceVector& devices,
-                const audio_config_base_t *mixerConfig = nullptr);
+                const audio_config_base_t *mixerConfig = nullptr,
+                const audio_config_t *halConfig = nullptr,
+                audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE);
 
         bool isOffloadPossible(const audio_offload_info_t& offloadInfo,
                                bool durationIgnored = false);
@@ -1266,6 +1293,30 @@
 
         // Filters only the relevant flags for getProfileForOutput
         audio_output_flags_t getRelevantFlags (audio_output_flags_t flags, bool directOnly);
+
+        status_t getDevicesForAttributes(const audio_attributes_t &attr,
+                                         DeviceVector &devices,
+                                         bool forVolume);
+
+        status_t getProfilesForDevices(const DeviceVector& devices,
+                                       AudioProfileVector& audioProfiles,
+                                       uint32_t flags,
+                                       bool isInput);
+
+        sp<PreferredMixerAttributesInfo> getPreferredMixerAttributesInfo(
+                audio_port_handle_t devicePortId, product_strategy_t strategy);
+
+        sp<SwAudioOutputDescriptor> reopenOutput(
+                sp<SwAudioOutputDescriptor> outputDesc,
+                const audio_config_t *config,
+                audio_output_flags_t flags,
+                const char* caller);
+
+        void reopenOutputsWithDevices(
+                const std::map<audio_io_handle_t, DeviceVector>& outputsToReopen);
+
+        PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+        void invalidateStreams(StreamTypeVector streams) const;
 };
 
 };
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 4c19d40..10403fa 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -36,6 +36,7 @@
         "libaudiohal",
         "libaudiopolicy",
         "libaudiopolicymanagerdefault",
+        "libaudiousecasevalidation",
         "libaudioutils",
         "libbinder",
         "libcutils",
@@ -85,6 +86,7 @@
 
     export_shared_lib_headers: [
         "libactivitymanager_aidl",
+        "libaudiousecasevalidation",
         "libheadtracking",
         "libheadtracking-binding",
         "libsensorprivacy",
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index c766a15..1bb89df 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -69,6 +69,12 @@
         *halConfig = VALUE_OR_RETURN_STATUS(
                 aidl2legacy_AudioConfig_audio_config_t(response.config, false /*isInput*/));
         *latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(response.latencyMs));
+
+        audio_config_base_t config = {.sample_rate = halConfig->sample_rate,
+            .channel_mask = halConfig->channel_mask,
+            .format = halConfig->format,
+        };
+        mAudioPolicyService->registerOutput(*output, config, flags);
     }
     return status;
 }
@@ -91,7 +97,7 @@
     if (af == 0) {
         return PERMISSION_DENIED;
     }
-
+    mAudioPolicyService->unregisterOutput(output);
     return af->closeOutput(output);
 }
 
@@ -168,16 +174,6 @@
                                                delay_ms);
 }
 
-status_t AudioPolicyService::AudioPolicyClient::invalidateStream(audio_stream_type_t stream)
-{
-    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
-    if (af == 0) {
-        return PERMISSION_DENIED;
-    }
-
-    return af->invalidateStream(stream);
-}
-
 void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
                    const String8& keyValuePairs,
                    int delay_ms)
@@ -322,5 +318,15 @@
     return af->setDeviceConnectedState(port, connected);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::invalidateTracks(
+        const std::vector<audio_port_handle_t>& portIds) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        return PERMISSION_DENIED;
+    }
+
+    return af->invalidateTracks(portIds);
+}
+
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 49224c5..5c32209 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -373,6 +373,7 @@
     AutoCallerClear acc;
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized = false;
+    bool isBitPerfect = false;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
                                                             attributionSource,
@@ -380,7 +381,8 @@
                                                             &flags, &selectedDeviceId, &portId,
                                                             &secondaryOutputs,
                                                             &outputType,
-                                                            &isSpatialized);
+                                                            &isSpatialized,
+                                                            &isBitPerfect);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -415,6 +417,9 @@
     }
 
     if (result == NO_ERROR) {
+        attr = VALUE_OR_RETURN_BINDER_STATUS(
+                mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+
         sp<AudioPlaybackClient> client =
                 new AudioPlaybackClient(attr, output, attributionSource, session,
                     portId, selectedDeviceId, stream, isSpatialized);
@@ -432,6 +437,16 @@
                 convertContainer<std::vector<int32_t>>(secondaryOutputs,
                                                        legacy2aidl_audio_io_handle_t_int32_t));
         _aidl_return->isSpatialized = isSpatialized;
+        _aidl_return->isBitPerfect = isBitPerfect;
+        _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    } else {
+        _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
+        _aidl_return->configBase.channelMask = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                        config.channel_mask, false /*isInput*/));
+        _aidl_return->configBase.sampleRate = config.sample_rate;
     }
     return binderStatusFromStatusT(result);
 }
@@ -476,6 +491,10 @@
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->startOutput(portId);
     if (status == NO_ERROR) {
+        //TODO b/257922898: decide if/how we need to handle attributes update when playback starts
+        // or during playback
+        (void)mUsecaseValidator->startClient(client->io, client->portId, client->attributionSource,
+                client->attributes, nullptr /* callback */);
         client->active = true;
         onUpdateActiveSpatializerTracks_l();
     }
@@ -516,6 +535,7 @@
     if (status == NO_ERROR) {
         client->active = false;
         onUpdateActiveSpatializerTracks_l();
+        mUsecaseValidator->stopClient(client->io, client->portId);
     }
     return status;
 }
@@ -719,6 +739,9 @@
             if (status == PERMISSION_DENIED) {
                 AutoCallerClear acc;
                 mAudioPolicyManager->releaseInput(portId);
+            } else {
+                _aidl_return->config = VALUE_OR_RETURN_BINDER_STATUS(
+                        legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
             }
             return binderStatusFromStatusT(status);
         }
@@ -1130,12 +1153,12 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesEx& attrAidl,
+Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesInternal& attrAidl,
                                                    bool forVolume,
                                                    std::vector<AudioDevice>* _aidl_return)
 {
-    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesEx_AudioAttributes(attrAidl));
+    audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
     AudioDeviceTypeAddrVector devices;
 
     if (mAudioPolicyManager == NULL) {
@@ -1144,8 +1167,7 @@
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->getDevicesForAttributes(
-                    aa.getAttributes(), &devices, forVolume)));
+            mAudioPolicyManager->getDevicesForAttributes(aa, &devices, forVolume)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             convertContainer<std::vector<AudioDevice>>(devices,
                                                        legacy2aidl_AudioDeviceTypeAddress));
@@ -1485,7 +1507,7 @@
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
                                           media::AudioPortType typeAidl, Int* count,
-                                          std::vector<media::AudioPort>* portsAidl,
+                                          std::vector<media::AudioPortFw>* portsAidl,
                                           int32_t* _aidl_return) {
     audio_port_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPortRole_audio_port_role_t(roleAidl));
@@ -1517,7 +1539,7 @@
 }
 
 Status AudioPolicyService::getAudioPort(int portId,
-                                        media::AudioPort* _aidl_return) {
+                                        media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
@@ -1529,7 +1551,8 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::createAudioPatch(const media::AudioPatch& patchAidl, int32_t handleAidl,
+Status AudioPolicyService::createAudioPatch(const media::AudioPatchFw& patchAidl,
+                                            int32_t handleAidl,
                                             int32_t* _aidl_return) {
     audio_patch patch = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPatch_audio_patch(patchAidl));
@@ -1570,7 +1593,7 @@
 }
 
 Status AudioPolicyService::listAudioPatches(Int* count,
-                                            std::vector<media::AudioPatch>* patchesAidl,
+                                            std::vector<media::AudioPatchFw>* patchesAidl,
                                             int32_t* _aidl_return) {
     unsigned int num_patches = VALUE_OR_RETURN_BINDER_STATUS(
             convertIntegral<unsigned int>(count->value));
@@ -1597,7 +1620,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfig& configAidl)
+Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfigFw& configAidl)
 {
     audio_port_config config = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPortConfig_audio_port_config(configAidl));
@@ -1770,7 +1793,7 @@
     return binderStatusFromStatusT(mAudioPolicyManager->removeUserIdDeviceAffinities(userId));
 }
 
-Status AudioPolicyService::startAudioSource(const media::AudioPortConfig& sourceAidl,
+Status AudioPolicyService::startAudioSource(const media::AudioPortConfigFw& sourceAidl,
                                             const media::AudioAttributesInternal& attributesAidl,
                                             int32_t* _aidl_return) {
     audio_port_config source = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2038,9 +2061,10 @@
 }
 
 Status AudioPolicyService::getProductStrategyFromAudioAttributes(
-        const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
-    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+        const media::AudioAttributesInternal& aaAidl,
+        bool fallbackOnDefault, int32_t* _aidl_return) {
+    audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
     product_strategy_t productStrategy;
 
     if (mAudioPolicyManager == NULL) {
@@ -2071,9 +2095,10 @@
 }
 
 Status AudioPolicyService::getVolumeGroupFromAudioAttributes(
-        const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
-    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+        const media::AudioAttributesInternal& aaAidl,
+        bool fallbackOnDefault, int32_t* _aidl_return) {
+    audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
     volume_group_t volumeGroup;
 
     if (mAudioPolicyManager == NULL) {
@@ -2354,4 +2379,89 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::getSupportedMixerAttributes(
+        int32_t portIdAidl, std::vector<media::AudioMixerAttributesInternal>* _aidl_return) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    std::vector<audio_mixer_attributes_t> mixerAttrs;
+    Mutex::Autolock _l(mLock);
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(mAudioPolicyManager->getSupportedMixerAttributes(
+                    portId, mixerAttrs)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioMixerAttributesInternal>>(
+                    mixerAttrs,
+                    legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal));
+    return Status::ok();
+}
+
+Status AudioPolicyService::setPreferredMixerAttributes(
+        const media::AudioAttributesInternal& attrAidl,
+        int32_t portIdAidl,
+        int32_t uidAidl,
+        const media::AudioMixerAttributesInternal& mixerAttrAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_mixer_attributes_t mixerAttr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(mixerAttrAidl));
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setPreferredMixerAttributes(&attr, portId, uid, &mixerAttr));
+}
+
+Status AudioPolicyService::getPreferredMixerAttributes(
+        const media::AudioAttributesInternal& attrAidl,
+        int32_t portIdAidl,
+        std::optional<media::AudioMixerAttributesInternal>* _aidl_return) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    Mutex::Autolock _l(mLock);
+    audio_mixer_attributes_t mixerAttr = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(mAudioPolicyManager->getPreferredMixerAttributes(
+                    &attr, portId, &mixerAttr)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(mixerAttr));
+    return Status::ok();
+}
+
+Status AudioPolicyService::clearPreferredMixerAttributes(
+        const media::AudioAttributesInternal& attrAidl,
+        int32_t portIdAidl,
+        int32_t uidAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 09b6f3b..2be5121 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -155,7 +155,11 @@
 BINDER_METHOD_ENTRY(getSpatializer) \
 BINDER_METHOD_ENTRY(canBeSpatialized) \
 BINDER_METHOD_ENTRY(getDirectPlaybackSupport) \
-BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+BINDER_METHOD_ENTRY(getDirectProfilesForAttributes)  \
+BINDER_METHOD_ENTRY(getSupportedMixerAttributes) \
+BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
+BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
+BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
 
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
@@ -201,7 +205,8 @@
       mPhoneState(AUDIO_MODE_INVALID),
       mCaptureStateNotifier(false),
       mCreateAudioPolicyManager(createAudioPolicyManager),
-      mDestroyAudioPolicyManager(destroyAudioPolicyManager) {
+      mDestroyAudioPolicyManager(destroyAudioPolicyManager),
+      mUsecaseValidator(media::createUsecaseValidator()) {
       setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
@@ -1007,10 +1012,11 @@
         //     AND is on TOP or latest started
         //     AND there is no active privacy sensitive capture or call
         //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+        bool allowSensitiveCapture =
+            !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
         bool allowCapture = !isAssistantOnTop
                 && (isTopOrLatestActive || isTopOrLatestSensitive)
-                && !(isSensitiveActive
-                    && !(isTopOrLatestSensitive || current->canCaptureOutput))
+                && allowSensitiveCapture
                 && canCaptureIfInCallOrCommunication(current);
 
         if (!current->hasOp()) {
@@ -1033,7 +1039,7 @@
                 if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
                     allowCapture = true;
                 }
-            } else if (!(isSensitiveActive && !current->canCaptureOutput)
+            } else if (allowSensitiveCapture
                     && canCaptureIfInCallOrCommunication(current)) {
                 if (isTopOrLatestAssistant
                     && (source == AUDIO_SOURCE_VOICE_RECOGNITION
@@ -1054,7 +1060,7 @@
                 if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
                     allowCapture = true;
                 }
-            } else if (!(isSensitiveActive && !current->canCaptureOutput)
+            } else if (allowSensitiveCapture
                         && canCaptureIfInCallOrCommunication(current)) {
                 if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
                 {
@@ -1069,7 +1075,7 @@
             //     OR
             //         Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
             if (!isAssistantOnTop
-                    && !(isSensitiveActive && !current->canCaptureOutput)
+                    && allowSensitiveCapture
                     && canCaptureIfInCallOrCommunication(current)) {
                 allowCapture = true;
             }
@@ -1323,7 +1329,9 @@
         case TRANSACTION_removeDevicesRoleForCapturePreset:
         case TRANSACTION_clearDevicesRoleForCapturePreset:
         case TRANSACTION_getDevicesForRoleAndCapturePreset:
-        case TRANSACTION_getSpatializer: {
+        case TRANSACTION_getSpatializer:
+        case TRANSACTION_setPreferredMixerAttributes:
+        case TRANSACTION_clearPreferredMixerAttributes: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1530,6 +1538,16 @@
         "  help print this message\n");
 }
 
+status_t AudioPolicyService::registerOutput(audio_io_handle_t output,
+                        const audio_config_base_t& config,
+                        const audio_output_flags_t flags) {
+    return mUsecaseValidator->registerStream(output, config, flags);
+}
+
+status_t AudioPolicyService::unregisterOutput(audio_io_handle_t output) {
+    return mUsecaseValidator->unregisterStream(output);
+}
+
 // -----------  AudioPolicyService::UidPolicy implementation ----------
 
 void AudioPolicyService::UidPolicy::registerSelf() {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5c37f99..50f2180 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -36,6 +36,7 @@
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
 #include <media/AudioPolicy.h>
+#include <media/UsecaseValidator.h>
 #include <mediautils/ServiceUtilities.h>
 #include "AudioPolicyEffects.h"
 #include "CaptureStateNotifier.h"
@@ -134,7 +135,7 @@
                                                   int32_t* _aidl_return) override;
     binder::Status getStrategyForStream(AudioStreamType stream,
                                         int32_t* _aidl_return) override;
-    binder::Status getDevicesForAttributes(const media::AudioAttributesEx& attr,
+    binder::Status getDevicesForAttributes(const media::AudioAttributesInternal& attr,
                                            bool forVolume,
                                            std::vector<AudioDevice>* _aidl_return) override;
     binder::Status getOutputForEffect(const media::EffectDescriptor& desc,
@@ -172,16 +173,16 @@
                                            const media::AudioAttributesInternal& attributes,
                                            bool* _aidl_return) override;
     binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
-                                  Int* count, std::vector<media::AudioPort>* ports,
+                                  Int* count, std::vector<media::AudioPortFw>* ports,
                                   int32_t* _aidl_return) override;
     binder::Status getAudioPort(int portId,
-                                media::AudioPort* _aidl_return) override;
-    binder::Status createAudioPatch(const media::AudioPatch& patch, int32_t handle,
+                                media::AudioPortFw* _aidl_return) override;
+    binder::Status createAudioPatch(const media::AudioPatchFw& patch, int32_t handle,
                                     int32_t* _aidl_return) override;
     binder::Status releaseAudioPatch(int32_t handle) override;
-    binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatch>* patches,
+    binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatchFw>* patches,
                                     int32_t* _aidl_return) override;
-    binder::Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+    binder::Status setAudioPortConfig(const media::AudioPortConfigFw& config) override;
     binder::Status registerClient(const sp<media::IAudioPolicyServiceClient>& client) override;
     binder::Status setAudioPortCallbacksEnabled(bool enabled) override;
     binder::Status setAudioVolumeGroupCallbacksEnabled(bool enabled) override;
@@ -197,7 +198,7 @@
             int32_t userId,
             const std::vector<AudioDevice>& devices) override;
     binder::Status removeUserIdDeviceAffinities(int32_t userId) override;
-    binder::Status startAudioSource(const media::AudioPortConfig& source,
+    binder::Status startAudioSource(const media::AudioPortConfigFw& source,
                                     const media::AudioAttributesInternal& attributes,
                                     int32_t* _aidl_return) override;
     binder::Status stopAudioSource(int32_t portId) override;
@@ -224,12 +225,12 @@
     binder::Status isUltrasoundSupported(bool* _aidl_return) override;
     binder::Status listAudioProductStrategies(
             std::vector<media::AudioProductStrategy>* _aidl_return) override;
-    binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesEx& aa,
+    binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesInternal& aa,
                                                          bool fallbackOnDefault,
                                                          int32_t* _aidl_return) override;
     binder::Status listAudioVolumeGroups(
             std::vector<media::AudioVolumeGroup>* _aidl_return) override;
-    binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesEx& aa,
+    binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesInternal& aa,
                                                      bool fallbackOnDefault,
                                                      int32_t* _aidl_return) override;
     binder::Status setRttEnabled(bool enabled) override;
@@ -277,6 +278,22 @@
     binder::Status getDirectProfilesForAttributes(const media::AudioAttributesInternal& attr,
                         std::vector<media::audio::common::AudioProfile>* _aidl_return) override;
 
+    binder::Status getSupportedMixerAttributes(
+            int32_t portId,
+            std::vector<media::AudioMixerAttributesInternal>* _aidl_return) override;
+    binder::Status setPreferredMixerAttributes(
+            const media::AudioAttributesInternal& attr,
+            int32_t portId,
+            int32_t uid,
+            const media::AudioMixerAttributesInternal& mixerAttr) override;
+    binder::Status getPreferredMixerAttributes(
+            const media::AudioAttributesInternal& attr,
+            int32_t portId,
+            std::optional<media::AudioMixerAttributesInternal>* _aidl_return) override;
+    binder::Status clearPreferredMixerAttributes(const media::AudioAttributesInternal& attr,
+                                                 int32_t portId,
+                                                 int32_t uid) override;
+
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
     // IBinder::DeathRecipient
@@ -415,6 +432,11 @@
      */
     static bool isAppOpSource(audio_source_t source);
 
+    status_t registerOutput(audio_io_handle_t output,
+                            const audio_config_base_t& config,
+                            const audio_output_flags_t flags);
+    status_t unregisterOutput(audio_io_handle_t output);
+
     // If recording we need to make sure the UID is allowed to do that. If the UID is idle
     // then it cannot record and gets buffers with zeros - silence. As soon as the UID
     // transitions to an active state we will start reporting buffers with data. This approach
@@ -760,9 +782,6 @@
         // for each output (destination device) it is attached to.
         virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
 
-        // invalidate a stream type, causing a reroute to an unspecified new output
-        virtual status_t invalidateStream(audio_stream_type_t stream);
-
         // function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
         virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
         // function enabling to receive proprietary informations directly from audio hardware interface to audio policy manager.
@@ -822,6 +841,8 @@
         status_t setDeviceConnectedState(
                 const struct audio_port_v7 *port, bool connected) override;
 
+        status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
@@ -887,7 +908,7 @@
 
         const audio_attributes_t attributes; // source, flags ...
         const audio_io_handle_t io;          // audio HAL stream IO handle
-        const AttributionSourceState& attributionSource; //client attributionsource
+        const AttributionSourceState attributionSource; //client attributionsource
         const audio_session_t session;       // audio session ID
         const audio_port_handle_t portId;
         const audio_port_handle_t deviceId;  // selected input device port ID
@@ -1066,6 +1087,7 @@
     void *mLibraryHandle = nullptr;
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
+    std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 2fe7b9e..51a916f 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -851,9 +851,10 @@
         // create FX instance on output
         AttributionSourceState attributionSource = AttributionSourceState();
         mEngine = new AudioEffect(attributionSource);
-        mEngine->set(nullptr, &mEngineDescriptor.uuid, 0, Spatializer::engineCallback /* cbf */,
-                     this /* user */, AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */,
-                     false /* probe */, true /* notifyFramesProcessed */);
+        mEngine->set(nullptr /* type */, &mEngineDescriptor.uuid, 0 /* priority */,
+                     wp<AudioEffect::IAudioEffectCallback>::fromExisting(this),
+                     AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */, false /* probe */,
+                     true /* notifyFramesProcessed */);
         status_t status = mEngine->initCheck();
         ALOGV("%s mEngine create status %d", __func__, (int)status);
         if (status != NO_ERROR) {
@@ -1023,27 +1024,10 @@
     }
 }
 
-void Spatializer::engineCallback(int32_t event, void *user, void *info) {
-    if (user == nullptr) {
-        return;
-    }
-    Spatializer* const me = reinterpret_cast<Spatializer *>(user);
-    switch (event) {
-        case AudioEffect::EVENT_FRAMES_PROCESSED: {
-            int frames = info == nullptr ? 0 : *(int*)info;
-            ALOGV("%s frames processed %d for me %p", __func__, frames, me);
-            me->postFramesProcessedMsg(frames);
-        } break;
-        default:
-            ALOGV("%s event %d", __func__, event);
-            break;
-    }
-}
-
-void Spatializer::postFramesProcessedMsg(int frames) {
+void Spatializer::onFramesProcessed(int32_t framesProcessed) {
     sp<AMessage> msg =
             new AMessage(EngineCallbackHandler::kWhatOnFramesProcessed, mHandler);
-    msg->setInt32(EngineCallbackHandler::kNumFramesKey, frames);
+    msg->setInt32(EngineCallbackHandler::kNumFramesKey, framesProcessed);
     msg->post();
 }
 
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 7415b1e..3e4dd69 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -88,6 +88,7 @@
  * spatializer mixer thread is destroyed.
  */
 class Spatializer : public media::BnSpatializer,
+                    public AudioEffect::IAudioEffectCallback,
                     public IBinder::DeathRecipient,
                     private SpatializerPoseController::Listener,
                     public virtual AudioSystem::SupportedLatencyModesCallback {
@@ -325,7 +326,7 @@
         return NO_ERROR;
     }
 
-    void postFramesProcessedMsg(int frames);
+    virtual void onFramesProcessed(int32_t framesProcessed) override;
 
     /**
      * Checks if head and screen sensors must be actively monitored based on
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 96f58d2..6eca7cc 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -131,8 +131,6 @@
 
     size_t getAudioPortListUpdateCount() const { return mAudioPortListUpdateCount; }
 
-    virtual void addSupportedFormat(audio_format_t /* format */) {}
-
     void onRoutingUpdated() override {
         mRoutingUpdatedUpdateCount++;
     }
@@ -178,6 +176,38 @@
         return &(*it);
     }
 
+    String8 getParameters(audio_io_handle_t /* ioHandle */, const String8&  /* keys*/ ) override {
+        AudioParameter mAudioParameters;
+        std::string formats;
+        for (const auto& f : mSupportedFormats) {
+            if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+            formats += audio_format_to_string(f);
+        }
+        mAudioParameters.add(
+                String8(AudioParameter::keyStreamSupportedFormats),
+                String8(formats.c_str()));
+        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
+        std::string channelMasks;
+        for (const auto& cm : mSupportedChannelMasks) {
+            if (!audio_channel_mask_is_valid(cm)) {
+                continue;
+            }
+            if (!channelMasks.empty()) channelMasks += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+            channelMasks += audio_channel_mask_to_string(cm);
+        }
+        mAudioParameters.add(
+                String8(AudioParameter::keyStreamSupportedChannels), String8(channelMasks.c_str()));
+        return mAudioParameters.toString();
+    }
+
+    void addSupportedFormat(audio_format_t format) {
+        mSupportedFormats.insert(format);
+    }
+
+    void addSupportedChannelMask(audio_channel_mask_t channelMask) {
+        mSupportedChannelMasks.insert(channelMask);
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -188,6 +218,8 @@
     size_t mRoutingUpdatedUpdateCount = 0;
     std::vector<struct audio_port_v7> mConnectedDevicePorts;
     std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
+    std::set<audio_format_t> mSupportedFormats;
+    std::set<audio_channel_mask_t> mSupportedChannelMasks;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
deleted file mode 100644
index 7343b9b..0000000
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <map>
-#include <set>
-
-#include <system/audio.h>
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-#include "AudioPolicyTestClient.h"
-
-namespace android {
-
-class AudioPolicyManagerTestClientForHdmi : public AudioPolicyManagerTestClient {
-public:
-    String8 getParameters(audio_io_handle_t /* ioHandle */, const String8&  /* keys*/ ) override {
-        AudioParameter mAudioParameters;
-        std::string formats;
-        for (const auto& f : mSupportedFormats) {
-            if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
-            formats += audio_format_to_string(f);
-        }
-        mAudioParameters.add(
-                String8(AudioParameter::keyStreamSupportedFormats),
-                String8(formats.c_str()));
-        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
-        mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
-        return mAudioParameters.toString();
-    }
-
-    void addSupportedFormat(audio_format_t format) override {
-        mSupportedFormats.insert(format);
-    }
-
-private:
-    std::set<audio_format_t> mSupportedFormats;
-};
-
-} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 8a85fee..0c04e35 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -54,7 +54,6 @@
                              float /*volume*/,
                              audio_io_handle_t /*output*/,
                              int /*delayMs*/) override { return NO_INIT; }
-    status_t invalidateStream(audio_stream_type_t /*stream*/) override { return NO_INIT; }
     void setParameters(audio_io_handle_t /*ioHandle*/,
                        const String8& /*keyValuePairs*/,
                        int /*delayMs*/) override { }
@@ -101,6 +100,9 @@
             const struct audio_port_v7 *port __unused, bool connected __unused) override {
         return NO_INIT;
     }
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& /*portIds*/) override {
+        return NO_INIT;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index 10f8dc0..798332c 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -111,7 +111,7 @@
             continue;
         }
         std::string address = "11:22:33:44:55:66";
-        media::AudioPort aidlPort;
+        media::AudioPortFw aidlPort;
         ASSERT_EQ(OK, manager.deviceToAudioPort(device->type(), address.c_str(), "" /*name*/,
                                                  &aidlPort));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index c341b32..7f5c7a5 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstring>
 #include <memory>
 #include <string>
 #include <sys/wait.h>
@@ -34,7 +35,6 @@
 
 #include "AudioPolicyInterface.h"
 #include "AudioPolicyManagerTestClient.h"
-#include "AudioPolicyManagerTestClientForHdmi.h"
 #include "AudioPolicyTestClient.h"
 #include "AudioPolicyTestManager.h"
 
@@ -66,6 +66,14 @@
     return criterion;
 }
 
+AudioMixMatchCriterion createSessionIdCriterion(audio_session_t session, bool exclude = false) {
+    AudioMixMatchCriterion criterion;
+    criterion.mValue.mAudioSessionId = session;
+    criterion.mRule = exclude ?
+        RULE_EXCLUDE_AUDIO_SESSION_ID : RULE_MATCH_AUDIO_SESSION_ID;
+    return criterion;
+}
+
 } // namespace
 
 TEST(AudioPolicyManagerTestInit, EngineFailure) {
@@ -151,9 +159,12 @@
             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
             audio_io_handle_t *output = nullptr,
             audio_port_handle_t *portId = nullptr,
-            audio_attributes_t attr = {});
+            audio_attributes_t attr = {},
+            audio_session_t session = AUDIO_SESSION_NONE,
+            int uid = 0);
     void getInputForAttr(
             const audio_attributes_t &attr,
+            audio_session_t session,
             audio_unique_id_t riid,
             audio_port_handle_t *selectedDeviceId,
             audio_format_t format,
@@ -233,7 +244,9 @@
         audio_output_flags_t flags,
         audio_io_handle_t *output,
         audio_port_handle_t *portId,
-        audio_attributes_t attr) {
+        audio_attributes_t attr,
+        audio_session_t session,
+        int uid) {
     audio_io_handle_t localOutput;
     if (!output) output = &localOutput;
     *output = AUDIO_IO_HANDLE_NONE;
@@ -247,19 +260,21 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
+    bool isBitPerfect;
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource = AttributionSourceState();
-    attributionSource.uid = 0;
+    attributionSource.uid = uid;
     attributionSource.token = sp<BBinder>::make();
     ASSERT_EQ(OK, mManager->getOutputForAttr(
-                    &attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
-                    selectedDeviceId, portId, {}, &outputType, &isSpatialized));
+                    &attr, output, session, &stream, attributionSource, &config, &flags,
+                    selectedDeviceId, portId, {}, &outputType, &isSpatialized, &isBitPerfect));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
 }
 
 void AudioPolicyManagerTest::getInputForAttr(
         const audio_attributes_t &attr,
+        const audio_session_t session,
         audio_unique_id_t riid,
         audio_port_handle_t *selectedDeviceId,
         audio_format_t format,
@@ -281,7 +296,7 @@
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     ASSERT_EQ(OK, mManager->getInputForAttr(
-            &attr, &input, riid, AUDIO_SESSION_NONE, attributionSource, &config, flags,
+            &attr, &input, riid, session, attributionSource, &config, flags,
             selectedDeviceId, &inputType, portId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
@@ -914,8 +929,8 @@
     audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     audio_attributes_t attr = {
         AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, 1, &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
-                    AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+     AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
 
     std::vector<audio_port_v7> ports;
     ASSERT_NO_FATAL_FAILURE(
@@ -969,6 +984,141 @@
     }
 }
 
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferredMixerAttributes) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    audio_port_handle_t maxPortId = 0;
+    audio_port_handle_t speakerPortId;
+    audio_port_handle_t usbPortId;
+    for (auto device : devices) {
+        maxPortId = std::max(maxPortId, device->getId());
+        if (device->type() == AUDIO_DEVICE_OUT_SPEAKER) {
+            speakerPortId = device->getId();
+        } else if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            usbPortId = device->getId();
+        }
+    }
+
+    const uid_t uid = 1234;
+    const uid_t otherUid = 4321;
+    const audio_attributes_t mediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+    const audio_attributes_t alarmAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ALARM,
+    };
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+    for (const auto attrToSet : mixerAttributes) {
+        audio_mixer_attributes_t attrFromQuery = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+
+        // The given device is not available
+        EXPECT_EQ(BAD_VALUE,
+                  mManager->setPreferredMixerAttributes(
+                          &mediaAttr, maxPortId + 1, uid, &attrToSet));
+        // The only allowed device is USB
+        EXPECT_EQ(BAD_VALUE,
+                  mManager->setPreferredMixerAttributes(
+                          &mediaAttr, speakerPortId, uid, &attrToSet));
+        // The only allowed usage is media
+        EXPECT_EQ(BAD_VALUE,
+                  mManager->setPreferredMixerAttributes(&alarmAttr, usbPortId, uid, &attrToSet));
+        // Nothing set yet, must get null when query
+        EXPECT_EQ(NAME_NOT_FOUND,
+                  mManager->getPreferredMixerAttributes(&mediaAttr, usbPortId, &attrFromQuery));
+        EXPECT_EQ(NO_ERROR,
+                  mManager->setPreferredMixerAttributes(
+                          &mediaAttr, usbPortId, uid, &attrToSet));
+        EXPECT_EQ(NO_ERROR,
+                  mManager->getPreferredMixerAttributes(&mediaAttr, usbPortId, &attrFromQuery));
+        EXPECT_EQ(attrToSet.config.format, attrFromQuery.config.format);
+        EXPECT_EQ(attrToSet.config.sample_rate, attrFromQuery.config.sample_rate);
+        EXPECT_EQ(attrToSet.config.channel_mask, attrFromQuery.config.channel_mask);
+        EXPECT_EQ(attrToSet.mixer_behavior, attrFromQuery.mixer_behavior);
+        EXPECT_EQ(NAME_NOT_FOUND,
+                  mManager->clearPreferredMixerAttributes(&mediaAttr, speakerPortId, uid));
+        EXPECT_EQ(PERMISSION_DENIED,
+                  mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, otherUid));
+        EXPECT_EQ(NO_ERROR,
+                  mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+    }
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, RoutingChangedWithPreferredMixerAttributes) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            usbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+
+    const uid_t uid = 1234;
+    const audio_attributes_t mediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &mediaAttr, usbPortId, uid, &mixerAttributes[0]));
+
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+            AUDIO_SESSION_NONE, uid);
+    status_t status = mManager->startOutput(portId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+                AUDIO_SESSION_NONE, uid);
+        status = mManager->startOutput(portId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    EXPECT_NE(nullptr, mManager->getOutputs().valueFor(output));
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+    // When BT device is connected, it will be selected as media device and trigger routing changed.
+    // When this happens, existing output that is opened with preferred mixer attributes will be
+    // closed and reopened with default config.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(output));
+
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+}
+
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     void TearDown() override;
@@ -1130,9 +1280,6 @@
     std::map<audio_format_t, bool> getSurroundFormatsHelper();
     std::vector<audio_format_t> getReportedSurroundFormatsHelper();
     std::unordered_set<audio_format_t> getFormatsFromPorts();
-    AudioPolicyManagerTestClient* getClient() override {
-        return new AudioPolicyManagerTestClientForHdmi;
-    }
     void TearDown() override;
 
     static const std::string sTvConfig;
@@ -1352,19 +1499,45 @@
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+struct DPTestParam {
+    DPTestParam(const std::vector<AudioMixMatchCriterion>& mixCriteria,
+                bool expected_match = false)
+     : mixCriteria(mixCriteria), attributes(defaultAttr), session(AUDIO_SESSION_NONE),
+       expected_match(expected_match) {}
+
+    DPTestParam& withUsage(audio_usage_t usage) {
+        attributes.usage = usage;
+        return *this;
+    }
+
+    DPTestParam& withTags(const char *tags) {
+        std::strncpy(attributes.tags, tags, sizeof(attributes.tags));
+        return *this;
+    }
+
+    DPTestParam& withSource(audio_source_t source) {
+        attributes.source = source;
+        return *this;
+    }
+
+    DPTestParam& withSessionId(audio_session_t sessionId) {
+        session = sessionId;
+        return *this;
+    }
+
+    std::vector<AudioMixMatchCriterion> mixCriteria;
+    audio_attributes_t attributes;
+    audio_session_t session;
+    bool expected_match;
+};
+
 class AudioPolicyManagerTestDPPlaybackReRouting : public AudioPolicyManagerTestDynamicPolicy,
-        public testing::WithParamInterface<audio_attributes_t> {
+        public testing::WithParamInterface<DPTestParam> {
 protected:
     void SetUp() override;
     void TearDown() override;
 
     std::unique_ptr<RecordingActivityTracker> mTracker;
-
-    std::vector<AudioMixMatchCriterion> mUsageRules = {
-            createUsageCriterion(AUDIO_USAGE_MEDIA),
-            createUsageCriterion(AUDIO_USAGE_ALARM)
-    };
-
     struct audio_port_v7 mInjectionPort;
     audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 };
@@ -1378,8 +1551,10 @@
     audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
     audioConfig.sample_rate = k48000SamplingRate;
+
+    DPTestParam param = GetParam();
     status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
     ASSERT_EQ(NO_ERROR, ret);
 
     struct audio_port_v7 extractionPort;
@@ -1392,8 +1567,9 @@
         AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
     std::string tags = "addr=" + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
-    getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
-            AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &mPortId);
+    getInputForAttr(attr, param.session, mTracker->getRiid(), &selectedDeviceId,
+                    AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+                    AUDIO_INPUT_FLAG_NONE, &mPortId);
     ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
     ASSERT_EQ(extractionPort.id, selectedDeviceId);
 
@@ -1406,152 +1582,169 @@
     AudioPolicyManagerTestDynamicPolicy::TearDown();
 }
 
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, InitSuccess) {
-    // SetUp must finish with no assertions
-}
-
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, Dump) {
-    dumpToLog();
-}
-
 TEST_P(AudioPolicyManagerTestDPPlaybackReRouting, PlaybackReRouting) {
-    const audio_attributes_t attr = GetParam();
-    const audio_usage_t usage = attr.usage;
+    const DPTestParam param = GetParam();
+    const audio_attributes_t& attr = param.attributes;
 
     audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
             k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
-            attr);
-    if (std::find_if(begin(mUsageRules), end(mUsageRules),
-                [&usage](const AudioMixMatchCriterion &c) {
-                              return c.mRule == RULE_MATCH_ATTRIBUTE_USAGE &&
-                                     c.mValue.mUsage == usage;}) != end(mUsageRules) ||
-            (strncmp(attr.tags, "addr=", strlen("addr=")) == 0 &&
-                    strncmp(attr.tags + strlen("addr="), mMixAddress.c_str(),
-                    AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0)) {
+            attr, param.session);
+    if (param.expected_match) {
         EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
     } else {
         EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
     }
 }
 
-INSTANTIATE_TEST_CASE_P(
-        PlaybackReroutingUsageMatch,
-        AudioPolicyManagerTestDPPlaybackReRouting,
-        testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
-                )
-        );
+const std::vector<AudioMixMatchCriterion> USAGE_MEDIA_ALARM_CRITERIA = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA),
+            createUsageCriterion(AUDIO_USAGE_ALARM)
+};
 
-INSTANTIATE_TEST_CASE_P(
-        PlaybackReroutingAddressPriorityMatch,
-        AudioPolicyManagerTestDPPlaybackReRouting,
-        testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
-                )
-        );
+INSTANTIATE_TEST_SUITE_P(
+    PlaybackReroutingUsageMatch,
+    AudioPolicyManagerTestDPPlaybackReRouting,
+    testing::Values(
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_MEDIA),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=other"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ALARM),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_GAME),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANT)));
 
-INSTANTIATE_TEST_CASE_P(
-        PlaybackReroutingUnHandledUsages,
-        AudioPolicyManagerTestDPPlaybackReRouting,
-        testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
-                )
-        );
+INSTANTIATE_TEST_SUITE_P(
+    PlaybackReroutingAddressPriorityMatch,
+    AudioPolicyManagerTestDPPlaybackReRouting,
+    testing::Values(
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION).withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ALARM)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_GAME)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_VIRTUAL_SOURCE)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("sometag;addr=remote_submix_media;othertag=somevalue"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("addr=remote_submix_media;othertag"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("sometag;othertag;addr=remote_submix_media")));
+
+static constexpr audio_session_t TEST_SESSION_ID = static_cast<audio_session_t>(42);
+static constexpr audio_session_t OTHER_SESSION_ID = static_cast<audio_session_t>(77);
+
+INSTANTIATE_TEST_SUITE_P(
+    PlaybackReRoutingWithSessionId,
+    AudioPolicyManagerTestDPPlaybackReRouting,
+    testing::Values(
+        // Mix is matched because the session id matches the one specified by the mix rule.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                    /*expected_match=*/ true)
+            .withSessionId(TEST_SESSION_ID),
+        // Mix is not matched because the session id doesn't match the one specified
+        // by the mix rule.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                    /*expected_match=*/ false)
+            .withSessionId(OTHER_SESSION_ID),
+        // Mix is matched, the session id doesn't match the one specified by rule,
+        // but there's address specified in the tags which takes precedence.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                    /*expected_match=*/ true)
+            .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+        // Mix is matched, both the session id and the usage match ones specified by mix rule.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                      createUsageCriterion(AUDIO_USAGE_MEDIA)},
+                    /*expected_match=*/ true)
+            .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA),
+        // Mix is not matched, the session id matches the one specified by mix rule,
+        // but usage does not.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                      createUsageCriterion(AUDIO_USAGE_MEDIA)},
+                    /*expected_match=*/ false)
+                    .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_GAME),
+        // Mix is not matched, the usage matches the one specified by mix rule,
+        // but the session id is excluded.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID, /*exclude=*/ true),
+                                     createUsageCriterion(AUDIO_USAGE_MEDIA)},
+                    /*expected_match=*/ false)
+                    .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA)));
 
 class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
-        public testing::WithParamInterface<audio_attributes_t> {
+        public testing::WithParamInterface<DPTestParam> {
 protected:
     void SetUp() override;
     void TearDown() override;
 
     std::unique_ptr<RecordingActivityTracker> mTracker;
-
-    std::vector<AudioMixMatchCriterion> mSourceRules = {
-        createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER),
-        createCapturePresetCriterion(AUDIO_SOURCE_MIC),
-        createCapturePresetCriterion(AUDIO_SOURCE_VOICE_COMMUNICATION)
-    };
-
     struct audio_port_v7 mExtractionPort;
     audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 };
@@ -1565,8 +1758,10 @@
     audioConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
     audioConfig.sample_rate = k48000SamplingRate;
+
+    DPTestParam param = GetParam();
     status_t ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
+            AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
     ASSERT_EQ(NO_ERROR, ret);
 
     struct audio_port_v7 injectionPort;
@@ -1593,73 +1788,94 @@
     AudioPolicyManagerTestDynamicPolicy::TearDown();
 }
 
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, InitSuccess) {
-    // SetUp mush finish with no assertions.
-}
-
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, Dump) {
-    dumpToLog();
-}
-
 TEST_P(AudioPolicyManagerTestDPMixRecordInjection, RecordingInjection) {
-    const audio_attributes_t attr = GetParam();
-    const audio_source_t source = attr.source;
+    const DPTestParam param = GetParam();
 
     audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    getInputForAttr(attr, mTracker->getRiid(), &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT,
-            AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &portId);
-    if (std::find_if(begin(mSourceRules), end(mSourceRules),
-               [&source](const AudioMixMatchCriterion &c) {
-            return c.mRule == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET &&
-                   c.mValue.mSource == source;})
-            != end(mSourceRules)) {
+    getInputForAttr(param.attributes, param.session, mTracker->getRiid(), &captureRoutedPortId,
+        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+        AUDIO_INPUT_FLAG_NONE, &portId);
+    if (param.expected_match) {
         EXPECT_EQ(mExtractionPort.id, captureRoutedPortId);
     } else {
         EXPECT_NE(mExtractionPort.id, captureRoutedPortId);
     }
 }
 
+const std::vector<AudioMixMatchCriterion> SOURCE_CAM_MIC_VOICE_CRITERIA = {
+        createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER),
+        createCapturePresetCriterion(AUDIO_SOURCE_MIC),
+        createCapturePresetCriterion(AUDIO_SOURCE_VOICE_COMMUNICATION)
+};
+
 // No address priority rule for remote recording, address is a "don't care"
 INSTANTIATE_TEST_CASE_P(
-        RecordInjectionSourceMatch,
+        RecordInjectionSource,
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"}
-                )
-        );
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_CAMCORDER),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_CAMCORDER)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_MIC),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_MIC)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_VOICE_RECOGNITION),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_VOICE_RECOGNITION)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_HOTWORD),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_HOTWORD)
+                .withTags("addr=remote_submix_media")));
 
-// No address priority rule for remote recording
 INSTANTIATE_TEST_CASE_P(
-        RecordInjectionSourceNotMatch,
+        RecordInjectionWithSessionId,
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"}
-                )
-        );
+            // Mix is matched because the session id matches the one specified by the mix rule.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                        /*expected_match=*/ true)
+                .withSessionId(TEST_SESSION_ID),
+            // Mix is not matched because the session id doesn't match the one specified
+            // by the mix rule.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                        /*expected_match=*/ false)
+                .withSessionId(OTHER_SESSION_ID),
+            // Mix is not matched, the session id doesn't match the one specified by rule,
+            // but tand address specified in the tags is ignored for recorder mix.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                        /*expected_match=*/ false)
+                .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+            // Mix is matched, both the session id and the source match ones specified by mix rule
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                          createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+                        /*expected_match=*/ true)
+                .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_CAMCORDER),
+            // Mix is not matched, the session id matches the one specified by mix rule,
+            // but source does not.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                          createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+                        /*expected_match=*/ false)
+                .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC),
+            // Mix is not matched, the source matches the one specified by mix rule,
+            // but the session id is excluded.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID,
+                                                                       /*exclude=*/ true),
+                                          createCapturePresetCriterion(AUDIO_SOURCE_MIC)},
+                        /*expected_match=*/ false)
+                .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC)));
 
 using DeviceConnectionTestParams =
         std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/>;
@@ -1762,8 +1978,9 @@
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
     } else if (audio_is_input_device(type)) {
         RecordingActivityTracker tracker;
-        getInputForAttr({}, tracker.getRiid(), &routedPortId, AUDIO_FORMAT_PCM_16_BIT,
-                AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE);
+        getInputForAttr({}, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
+         AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+         AUDIO_INPUT_FLAG_NONE);
     }
     ASSERT_EQ(devicePort.id, routedPortId);
 
@@ -1790,7 +2007,7 @@
     }
     const std::string name = std::get<1>(GetParam());
     const std::string address = std::get<2>(GetParam());
-    android::media::AudioPort audioPort;
+    android::media::AudioPortFw audioPort;
     ASSERT_EQ(NO_ERROR,
             mManager->deviceToAudioPort(type, address.c_str(), name.c_str(), &audioPort));
     android::media::audio::common::AudioPort& port = audioPort.hal;
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index d342aea..c937d3a 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -54,6 +54,7 @@
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                            samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
+                <mixPort name="hifi_output" role="source"/>
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -74,6 +75,8 @@
                 <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
                             encodedFormats="AUDIO_FORMAT_LDAC AUDIO_FORMAT_APTX AUDIO_FORMAT_APTX_HD AUDIO_FORMAT_AAC AUDIO_FORMAT_SBC">
                 </devicePort>
+                <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+                </devicePort>
             </devicePorts>
             <routes>
                 <route type="mix" sink="Speaker"
@@ -89,7 +92,9 @@
                 <route type="mix" sink="mixport_bt_hfp_input"
                        sources="BT SCO Headset Mic"/>
                 <route type="mix" sink="BT A2DP Out"
-                       sources="primary output"/>
+                       sources="primary output,hifi_output"/>
+                <route type="mix" sink="USB Device Out"
+                       sources="primary output,hifi_output"/>
             </routes>
         </module>
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..1e6524f 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -57,7 +57,6 @@
         "api1/client2/StreamingProcessor.cpp",
         "api1/client2/JpegProcessor.cpp",
         "api1/client2/CallbackProcessor.cpp",
-        "api1/client2/JpegCompressor.cpp",
         "api1/client2/CaptureSequencer.cpp",
         "api1/client2/ZslProcessor.cpp",
         "api2/CameraDeviceClient.cpp",
@@ -66,6 +65,7 @@
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
+        "api2/JpegRCompositeStream.cpp",
         "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
@@ -156,14 +156,14 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device-V1-ndk",
+        "android.hardware.camera.device-V2-ndk",
         "media_permission-aidl-cpp",
     ],
 
@@ -173,6 +173,9 @@
         "libbinderthreadstateutils",
         "media_permission-aidl-cpp",
         "libcameraservice_device_independent",
+        "libjpegrecoverymap",
+        "libjpegencoder",
+        "libjpegdecoder",
     ],
 
     export_shared_lib_headers: [
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 2a04658..0213623 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -142,7 +142,10 @@
 // Set to keep track of logged service error events.
 static std::set<String8> sServiceErrorEventSet;
 
-CameraService::CameraService() :
+CameraService::CameraService(
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
+                std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
         mNumberOfCameras(0),
         mNumberOfCamerasWithoutSystemCamera(0),
@@ -195,7 +198,7 @@
 
     // This needs to be last call in this function, so that it's as close to
     // ServiceManager::addService() as possible.
-    CameraServiceProxyWrapper::pingCameraServiceProxy();
+    mCameraServiceProxyWrapper->pingCameraServiceProxy();
     ALOGI("CameraService pinged cameraservice proxy");
 }
 
@@ -276,7 +279,10 @@
                     cameraId.c_str());
             continue;
         }
-        i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
+        auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
+                String16{cameraId});
+        i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+                __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -547,8 +553,12 @@
                         id.c_str());
                 continue;
             }
-            listener->getListener()->onPhysicalCameraStatusChanged(mapToInterface(newStatus),
-                    id16, physicalId16);
+            auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
+                    mapToInterface(newStatus), id16, physicalId16);
+            listener->handleBinderStatus(ret,
+                    "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+                    __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                    ret.exceptionCode());
         }
     }
 }
@@ -589,8 +599,11 @@
         int32_t newStrengthLevel) {
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
-        i->getListener()->onTorchStrengthLevelChanged(String16{cameraId},
+        auto ret = i->getListener()->onTorchStrengthLevelChanged(String16{cameraId},
                 newStrengthLevel);
+        i->handleBinderStatus(ret,
+                "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
+                i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -985,17 +998,18 @@
     }
     if (effectiveApiLevel == API_1) { // Camera1 API route
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-        *client = new Camera2Client(cameraService, tmp, packageName, featureId,
-                cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
-                servicePid, overrideForPerfClass, overrideToPortrait);
+        *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+                packageName, featureId, cameraId, api1CameraId, facing, sensorOrientation,
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
         ALOGI("%s: Camera1 API (legacy), override to portrait %d", __FUNCTION__,
                 overrideToPortrait);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
-        *client = new CameraDeviceClient(cameraService, tmp, packageName,
-                systemNativeClient, featureId, cameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
+        *client = new CameraDeviceClient(cameraService, tmp,
+                cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+                featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+                overrideForPerfClass, overrideToPortrait);
         ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
@@ -1722,7 +1736,7 @@
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
-    if (CameraServiceProxyWrapper::isCameraDisabled(clientUserId)) {
+    if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
         String8 msg =
                 String8::format("Camera disabled by device policy");
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -1731,7 +1745,8 @@
 
     // enforce system camera permissions
     if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
+            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+            !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
         String8 msg =
                 String8::format("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
@@ -2000,8 +2015,17 @@
             client->setRotateAndCropOverride(rotateAndCropMode);
         } else {
             client->setRotateAndCropOverride(
-              CameraServiceProxyWrapper::getRotateAndCropOverride(
-                  clientPackageName, facing, multiuser_get_user_id(clientUid)));
+                mCameraServiceProxyWrapper->getRotateAndCropOverride(
+                    clientPackageName, facing, multiuser_get_user_id(clientUid)));
+        }
+
+        // Set autoframing override behaviour
+        if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+            client->setAutoframingOverride(mOverrideAutoframingMode);
+        } else {
+            client->setAutoframingOverride(
+                mCameraServiceProxyWrapper->getAutoframingOverride(
+                    clientPackageName));
         }
 
         // Set camera muting behavior
@@ -2050,7 +2074,7 @@
     device = client;
 
     int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
-    CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+    mCameraServiceProxyWrapper->logOpen(cameraId, facing, clientPackageName,
             effectiveApiLevel, isNonSystemNdk, openLatencyMs);
 
     {
@@ -2116,6 +2140,10 @@
                 onlineClientDesc->getOwnerId(), onlinePriority.getState(),
                 // native clients don't have offline processing support.
                 /*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+        if (offlineClientDesc == nullptr) {
+            ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
+            return BAD_VALUE;
+        }
 
         // Allow only one offline device per camera
         auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -2476,10 +2504,8 @@
 
     for (const auto& it : mListenerList) {
         auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
-        if (!ret.isOk()) {
-            ALOGE("%s: Failed to trigger permission callback: %d", __FUNCTION__,
-                    ret.exceptionCode());
-        }
+        it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+                __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -2543,7 +2569,7 @@
             const auto basicClient = current->getValue();
             if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
                 basicClient->setRotateAndCropOverride(
-                        CameraServiceProxyWrapper::getRotateAndCropOverride(
+                        mCameraServiceProxyWrapper->getRotateAndCropOverride(
                                 basicClient->getPackageName(),
                                 basicClient->getCameraFacing(),
                                 multiuser_get_user_id(basicClient->getClientUid())));
@@ -4717,8 +4743,12 @@
                             cameraId.c_str());
                     continue;
                 }
-                listener->getListener()->onStatusChanged(mapToInterface(status),
+                auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
                         String16(cameraId));
+                listener->handleBinderStatus(ret,
+                        "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                        __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                        ret.exceptionCode());
             }
         });
 }
@@ -4751,10 +4781,10 @@
         } else {
             ret = it->getListener()->onCameraClosed(cameraId64);
         }
-        if (!ret.isOk()) {
-            ALOGE("%s: Failed to trigger onCameraOpened/onCameraClosed callback: %d", __FUNCTION__,
-                    ret.exceptionCode());
-        }
+
+        it->handleBinderStatus(ret,
+                "%s: Failed to trigger onCameraOpened/onCameraClosed callback for %d:%d: %d",
+                __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -4855,8 +4885,12 @@
                         String8(physicalCameraId).c_str());
                 continue;
             }
-            listener->getListener()->onPhysicalCameraStatusChanged(status,
+            auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
                     logicalCameraId, physicalCameraId);
+            listener->handleBinderStatus(ret,
+                    "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+                    __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                    ret.exceptionCode());
         }
     }
 }
@@ -4904,6 +4938,10 @@
         return handleSetRotateAndCrop(args);
     } else if (args.size() >= 1 && args[0] == String16("get-rotate-and-crop")) {
         return handleGetRotateAndCrop(out);
+    } else if (args.size() >= 2 && args[0] == String16("set-autoframing")) {
+        return handleSetAutoframing(args);
+    } else if (args.size() >= 1 && args[0] == String16("get-autoframing")) {
+        return handleGetAutoframing(out);
     } else if (args.size() >= 2 && args[0] == String16("set-image-dump-mask")) {
         return handleSetImageDumpMask(args);
     } else if (args.size() >= 1 && args[0] == String16("get-image-dump-mask")) {
@@ -5007,6 +5045,34 @@
     return OK;
 }
 
+status_t CameraService::handleSetAutoframing(const Vector<String16>& args) {
+    char* end;
+    int autoframingValue = (int) strtol(String8(args[1]), &end, /*base=*/10);
+    if ((*end != '\0') ||
+            (autoframingValue != ANDROID_CONTROL_AUTOFRAMING_OFF &&
+             autoframingValue != ANDROID_CONTROL_AUTOFRAMING_ON &&
+             autoframingValue != ANDROID_CONTROL_AUTOFRAMING_AUTO)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+    mOverrideAutoframingMode = autoframingValue;
+
+    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) return OK;
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                basicClient->setAutoframingOverride(autoframingValue);
+            }
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraService::handleSetCameraServiceWatchdog(const Vector<String16>& args) {
     int enableWatchdog = atoi(String8(args[1]));
 
@@ -5035,6 +5101,12 @@
     return dprintf(out, "rotateAndCrop override: %d\n", mOverrideRotateAndCropMode);
 }
 
+status_t CameraService::handleGetAutoframing(int out) {
+    Mutex::Autolock lock(mServiceLock);
+
+    return dprintf(out, "autoframing override: %d\n", mOverrideAutoframingMode);
+}
+
 status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
     char *endPtr;
     errno = 0;
@@ -5432,6 +5504,9 @@
         "  set-rotate-and-crop <ROTATION> overrides the rotate-and-crop value for AUTO backcompat\n"
         "      Valid values 0=0 deg, 1=90 deg, 2=180 deg, 3=270 deg, 4=No override\n"
         "  get-rotate-and-crop returns the current override rotate-and-crop value\n"
+        "  set-autoframing <VALUE> overrides the autoframing value for AUTO\n"
+        "      Valid values 0=false, 1=true, 2=auto\n"
+        "  get-autoframing returns the current override autoframing value\n"
         "  set-image-dump-mask <MASK> specifies the formats to be saved to disk\n"
         "      Valid values 0=OFF, 1=ON for JPEG\n"
         "  get-image-dump-mask returns the current image-dump-mask value\n"
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 840e9b6..4f92f15 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -48,6 +48,7 @@
 #include "utils/AutoConditionLock.h"
 #include "utils/ClientManager.h"
 #include "utils/IPCTransport.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 #include <set>
 #include <string>
@@ -100,7 +101,10 @@
     // Implementation of BinderService<T>
     static char const* getServiceName() { return "media.camera"; }
 
-                        CameraService();
+                        // Non-null arguments for cameraServiceProxyWrapper should be provided for
+                        // testing purposes only.
+                        CameraService(std::shared_ptr<CameraServiceProxyWrapper>
+                                cameraServiceProxyWrapper = nullptr);
     virtual             ~CameraService();
 
     /////////////////////////////////////////////////////////////////////
@@ -339,6 +343,9 @@
         // Override rotate-and-crop AUTO behavior
         virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) = 0;
 
+        // Override autoframing AUTO behaviour
+        virtual status_t setAutoframingOverride(uint8_t autoframingValue) = 0;
+
         // Whether the client supports camera muting (black only output)
         virtual bool supportsCameraMute() = 0;
 
@@ -781,6 +788,8 @@
 
     sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
 
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
     // Delay-load the Camera HAL module
     virtual void onFirstRef();
 
@@ -1076,6 +1085,29 @@
                 return IInterface::asBinder(mListener)->linkToDeath(this);
             }
 
+            template<typename... args_t>
+            void handleBinderStatus(const binder::Status &ret, const char *logOnError,
+                    args_t... args) {
+                if (!ret.isOk() &&
+                        (ret.exceptionCode() != binder::Status::Exception::EX_TRANSACTION_FAILED
+                        || !mLastTransactFailed)) {
+                    ALOGE(logOnError, args...);
+                }
+
+                // If the transaction failed, the process may have died (or other things, see
+                // b/28321379). Mute consecutive errors from this listener to avoid log spam.
+                if (ret.exceptionCode() == binder::Status::Exception::EX_TRANSACTION_FAILED) {
+                    if (!mLastTransactFailed) {
+                        ALOGE("%s: Muting similar errors from listener %d:%d", __FUNCTION__,
+                                mListenerUid, mListenerPid);
+                    }
+                    mLastTransactFailed = true;
+                } else {
+                    // Reset mLastTransactFailed when binder becomes healthy again.
+                    mLastTransactFailed = false;
+                }
+            }
+
             virtual void binderDied(const wp<IBinder> &/*who*/) {
                 auto parent = mParent.promote();
                 if (parent.get() != nullptr) {
@@ -1096,6 +1128,9 @@
             int mListenerPid = -1;
             bool mIsVendorListener = false;
             bool mOpenCloseCallbackAllowed = false;
+
+            // Flag for preventing log spam when binder becomes unhealthy
+            bool mLastTransactFailed = false;
     };
 
     // Guarded by mStatusListenerMutex
@@ -1207,6 +1242,12 @@
     // Get the rotate-and-crop AUTO override behavior
     status_t handleGetRotateAndCrop(int out);
 
+    // Set the autoframing AUTO override behaviour.
+    status_t handleSetAutoframing(const Vector<String16>& args);
+
+    // Get the autoframing AUTO override behaviour
+    status_t handleGetAutoframing(int out);
+
     // Set the mask for image dump to disk
     status_t handleSetImageDumpMask(const Vector<String16>& args);
 
@@ -1302,6 +1343,9 @@
     // Current override cmd rotate-and-crop mode; AUTO means no override
     uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
 
+    // Current autoframing mode
+    uint8_t mOverrideAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_AUTO;
+
     // Current image dump mask
     uint8_t mImageDumpMask = 0;
 
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index e101dd3..74497d1 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -41,7 +41,8 @@
             tidToCycleCounterMap[currentThreadId]++;
 
             if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
-                ALOGW("CameraServiceWatchdog triggering abort for pid: %d", getpid());
+                ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
+                        currentThreadId);
                 // We use abort here so we can get a tombstone for better
                 // debugging.
                 abort();
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 0887ced..d447fe0 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -52,6 +52,7 @@
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraDeviceId,
@@ -63,7 +64,7 @@
         int servicePid,
         bool overrideForPerfClass,
         bool overrideToPortrait):
-        Camera2ClientBase(cameraService, cameraClient, clientPackageName,
+        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
@@ -141,28 +142,53 @@
     mFrameProcessor = new FrameProcessor(mDevice, this);
     threadName = String8::format("C2-%d-FrameProc",
             mCameraId);
-    mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mCaptureSequencer = new CaptureSequencer(this);
     threadName = String8::format("C2-%d-CaptureSeq",
             mCameraId);
-    mCaptureSequencer->run(threadName.string());
+    res = mCaptureSequencer->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
     threadName = String8::format("C2-%d-JpegProc",
             mCameraId);
-    mJpegProcessor->run(threadName.string());
+    res = mJpegProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
 
     threadName = String8::format("C2-%d-ZslProc",
             mCameraId);
-    mZslProcessor->run(threadName.string());
+    res = mZslProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mCallbackProcessor = new CallbackProcessor(this);
     threadName = String8::format("C2-%d-CallbkProc",
             mCameraId);
-    mCallbackProcessor->run(threadName.string());
+    res = mCallbackProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start callback processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     if (gLogLevel >= 1) {
         SharedParameters::Lock l(mParameters);
@@ -480,7 +506,7 @@
     CameraService::Client::disconnect();
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
-    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+    mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs);
 
     return res;
 }
@@ -2339,6 +2365,13 @@
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
 
+status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
+    if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+    return mDevice->setAutoframingAutoBehavior(
+        static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
 bool Camera2Client::supportsCameraMute() {
     return mDevice->supportsCameraMute();
 }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 9c540a4..c5324db 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -86,6 +86,7 @@
     virtual status_t        setAudioRestriction(int mode);
     virtual int32_t         getGlobalAudioRestriction();
     virtual status_t        setRotateAndCropOverride(uint8_t rotateAndCrop);
+    virtual status_t        setAutoframingOverride(uint8_t autoframingMode);
 
     virtual bool            supportsCameraMute();
     virtual status_t        setCameraMute(bool enabled);
@@ -98,6 +99,7 @@
 
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
             const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId,
             const String8& cameraDeviceId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
deleted file mode 100644
index 01951a0..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Camera2-JpegCompressor"
-
-#include <utils/Log.h>
-#include <ui/GraphicBufferMapper.h>
-
-#include "JpegCompressor.h"
-
-namespace android {
-namespace camera2 {
-
-JpegCompressor::JpegCompressor():
-        Thread(false),
-        mIsBusy(false),
-        mCaptureTime(0) {
-}
-
-JpegCompressor::~JpegCompressor() {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock lock(mMutex);
-}
-
-status_t JpegCompressor::start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
-        nsecs_t captureTime) {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock busyLock(mBusyMutex);
-
-    if (mIsBusy) {
-        ALOGE("%s: Already processing a buffer!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    mIsBusy = true;
-
-    mBuffers = buffers;
-    mCaptureTime = captureTime;
-
-    status_t res;
-    res = run("JpegCompressor");
-    if (res != OK) {
-        ALOGE("%s: Unable to start up compression thread: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        //delete mBuffers;  // necessary?
-    }
-    return res;
-}
-
-status_t JpegCompressor::cancel() {
-    ALOGV("%s", __FUNCTION__);
-    requestExitAndWait();
-    return OK;
-}
-
-status_t JpegCompressor::readyToRun() {
-    ALOGV("%s", __FUNCTION__);
-    return OK;
-}
-
-bool JpegCompressor::threadLoop() {
-    ALOGV("%s", __FUNCTION__);
-
-    mAuxBuffer = mBuffers[0];    // input
-    mJpegBuffer = mBuffers[1];    // output
-
-    // Set up error management
-    mJpegErrorInfo = NULL;
-    JpegError error;
-    error.parent = this;
-
-    mCInfo.err = jpeg_std_error(&error);
-    mCInfo.err->error_exit = jpegErrorHandler;
-
-    jpeg_create_compress(&mCInfo);
-    if (checkError("Error initializing compression")) return false;
-
-    // Route compressed data straight to output stream buffer
-    JpegDestination jpegDestMgr;
-    jpegDestMgr.parent = this;
-    jpegDestMgr.init_destination = jpegInitDestination;
-    jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
-    jpegDestMgr.term_destination = jpegTermDestination;
-
-    mCInfo.dest = &jpegDestMgr;
-
-    // Set up compression parameters
-    mCInfo.image_width = mAuxBuffer->width;
-    mCInfo.image_height = mAuxBuffer->height;
-    mCInfo.input_components = 1; // 3;
-    mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB
-
-    ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height);
-
-    jpeg_set_defaults(&mCInfo);
-    if (checkError("Error configuring defaults")) return false;
-
-    // Do compression
-    jpeg_start_compress(&mCInfo, TRUE);
-    if (checkError("Error starting compression")) return false;
-
-    size_t rowStride = mAuxBuffer->stride;// * 3;
-    const size_t kChunkSize = 32;
-    while (mCInfo.next_scanline < mCInfo.image_height) {
-        JSAMPROW chunk[kChunkSize];
-        for (size_t i = 0 ; i < kChunkSize; i++) {
-            chunk[i] = (JSAMPROW)
-                    (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride);
-        }
-        jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
-        if (checkError("Error while compressing")) return false;
-        if (exitPending()) {
-            ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
-            cleanUp();
-            return false;
-        }
-    }
-
-    jpeg_finish_compress(&mCInfo);
-    if (checkError("Error while finishing compression")) return false;
-
-    cleanUp();
-    return false;
-}
-
-bool JpegCompressor::isBusy() {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock busyLock(mBusyMutex);
-    return mIsBusy;
-}
-
-// old function -- TODO: update for new buffer type
-bool JpegCompressor::isStreamInUse(uint32_t /*id*/) {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock lock(mBusyMutex);
-
-    if (mBuffers.size() && mIsBusy) {
-        for (size_t i = 0; i < mBuffers.size(); i++) {
-//            if ( mBuffers[i].streamId == (int)id ) return true;
-        }
-    }
-    return false;
-}
-
-bool JpegCompressor::waitForDone(nsecs_t timeout) {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock lock(mBusyMutex);
-    status_t res = OK;
-    if (mIsBusy) {
-        res = mDone.waitRelative(mBusyMutex, timeout);
-    }
-    return (res == OK);
-}
-
-bool JpegCompressor::checkError(const char *msg) {
-    ALOGV("%s", __FUNCTION__);
-    if (mJpegErrorInfo) {
-        char errBuffer[JMSG_LENGTH_MAX];
-        mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
-        ALOGE("%s: %s: %s",
-                __FUNCTION__, msg, errBuffer);
-        cleanUp();
-        mJpegErrorInfo = NULL;
-        return true;
-    }
-    return false;
-}
-
-void JpegCompressor::cleanUp() {
-    ALOGV("%s", __FUNCTION__);
-    jpeg_destroy_compress(&mCInfo);
-    Mutex::Autolock lock(mBusyMutex);
-    mIsBusy = false;
-    mDone.signal();
-}
-
-void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
-    ALOGV("%s", __FUNCTION__);
-    JpegError *error = static_cast<JpegError*>(cinfo->err);
-    error->parent->mJpegErrorInfo = cinfo;
-}
-
-void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
-    ALOGV("%s", __FUNCTION__);
-    JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
-    ALOGV("%s: Setting destination to %p, size %zu",
-            __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize);
-    dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data);
-    dest->free_in_buffer = kMaxJpegSize;
-}
-
-boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) {
-    ALOGV("%s", __FUNCTION__);
-    ALOGE("%s: JPEG destination buffer overflow!",
-            __FUNCTION__);
-    return true;
-}
-
-void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
-    (void) cinfo; // TODO: clean up
-    ALOGV("%s", __FUNCTION__);
-    ALOGV("%s: Done writing JPEG data. %zu bytes left in buffer",
-            __FUNCTION__, cinfo->dest->free_in_buffer);
-}
-
-}; // namespace camera2
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.h b/services/camera/libcameraservice/api1/client2/JpegCompressor.h
deleted file mode 100644
index 589a2fd..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * This class simulates a hardware JPEG compressor.  It receives image buffers
- * in RGBA_8888 format, processes them in a worker thread, and then pushes them
- * out to their destination stream.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-
-#include "utils/Thread.h"
-#include "utils/Mutex.h"
-#include "utils/Timers.h"
-#include "utils/Vector.h"
-//#include "Base.h"
-#include <stdio.h>
-#include <gui/CpuConsumer.h>
-
-extern "C" {
-#include <jpeglib.h>
-}
-
-
-namespace android {
-namespace camera2 {
-
-class JpegCompressor: private Thread, public virtual RefBase {
-  public:
-
-    JpegCompressor();
-    ~JpegCompressor();
-
-    // Start compressing COMPRESSED format buffers; JpegCompressor takes
-    // ownership of the Buffers vector.
-    status_t start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
-            nsecs_t captureTime);
-
-    status_t cancel();
-
-    bool isBusy();
-    bool isStreamInUse(uint32_t id);
-
-    bool waitForDone(nsecs_t timeout);
-
-    // TODO: Measure this
-    static const size_t kMaxJpegSize = 300000;
-
-  private:
-    Mutex mBusyMutex;
-    Mutex mMutex;
-    bool mIsBusy;
-    Condition mDone;
-    nsecs_t mCaptureTime;
-
-    Vector<CpuConsumer::LockedBuffer*> mBuffers;
-    CpuConsumer::LockedBuffer *mJpegBuffer;
-    CpuConsumer::LockedBuffer *mAuxBuffer;
-
-    jpeg_compress_struct mCInfo;
-
-    struct JpegError : public jpeg_error_mgr {
-        JpegCompressor *parent;
-    };
-    j_common_ptr mJpegErrorInfo;
-
-    struct JpegDestination : public jpeg_destination_mgr {
-        JpegCompressor *parent;
-    };
-
-    static void jpegErrorHandler(j_common_ptr cinfo);
-
-    static void jpegInitDestination(j_compress_ptr cinfo);
-    static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
-    static void jpegTermDestination(j_compress_ptr cinfo);
-
-    bool checkError(const char *msg);
-    void cleanUp();
-
-    /**
-     * Inherited Thread virtual overrides
-     */
-  private:
-    virtual status_t readyToRun();
-    virtual bool threadLoop();
-};
-
-}; // namespace camera2
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 39f8d00..c08aff3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -31,12 +31,12 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "api2/CameraDeviceClient.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 #include <camera_metadata_hidden.h>
 
 #include "DepthCompositeStream.h"
 #include "HeicCompositeStream.h"
+#include "JpegRCompositeStream.h"
 
 // Convenience methods for constructing binder::Status objects for error returns
 
@@ -89,6 +89,7 @@
 
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
@@ -100,9 +101,10 @@
         int servicePid,
         bool overrideForPerfClass,
         bool overrideToPortrait) :
-    Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
+    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+            systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+            sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
+            overrideToPortrait),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
@@ -130,7 +132,12 @@
     String8 threadName;
     mFrameProcessor = new FrameProcessorBase(mDevice);
     threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
-    mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
                                       camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -695,7 +702,7 @@
 
         nsecs_t configureEnd = systemTime();
         int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
-        CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+        mCameraServiceProxyWrapper->logStreamConfigured(mCameraIdStr, operatingMode,
                 false /*internalReconfig*/, configureDurationMs);
     }
 
@@ -882,6 +889,7 @@
     int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int mirrorMode = outputConfiguration.getMirrorMode();
+    int32_t colorSpace = outputConfiguration.getColorSpace();
 
     res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
@@ -926,7 +934,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                 isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode);
+                streamUseCase, timestampBase, mirrorMode, colorSpace);
 
         if (!res.isOk())
             return res;
@@ -948,19 +956,24 @@
     bool isDepthCompositeStream =
             camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
     bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
-    if (isDepthCompositeStream || isHeicCompisiteStream) {
+    bool isJpegRCompositeStream =
+        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]);
+    if (isDepthCompositeStream || isHeicCompisiteStream || isJpegRCompositeStream) {
         sp<CompositeStream> compositeStream;
         if (isDepthCompositeStream) {
             compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
-        } else {
+        } else if (isHeicCompisiteStream) {
             compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+        } else {
+            compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
         }
 
         err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
-                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+                streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
         if (err == OK) {
             Mutex::Autolock l(mCompositeLock);
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -973,7 +986,7 @@
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                 outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
                 /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
-                streamInfo.timestampBase, streamInfo.mirrorMode);
+                streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace);
     }
 
     if (err != OK) {
@@ -1025,6 +1038,7 @@
     int width, height, format, surfaceType;
     uint64_t consumerUsage;
     android_dataspace dataSpace;
+    int32_t colorSpace;
     status_t err;
     binder::Status res;
 
@@ -1038,6 +1052,7 @@
     surfaceType = outputConfiguration.getSurfaceType();
     format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+    colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
     // Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
     consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
     if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1087,7 +1102,8 @@
                         outputConfiguration.getDynamicRangeProfile(),
                         outputConfiguration.getStreamUseCase(),
                         outputConfiguration.getTimestampBase(),
-                        outputConfiguration.getMirrorMode()));
+                        outputConfiguration.getMirrorMode(),
+                        colorSpace));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
@@ -1278,6 +1294,7 @@
     int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int32_t colorSpace = outputConfiguration.getColorSpace();
     int mirrorMode = outputConfiguration.getMirrorMode();
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1286,7 +1303,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                 /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode);
+                streamUseCase, timestampBase, mirrorMode, colorSpace);
         if (!res.isOk())
             return res;
 
@@ -1644,7 +1661,8 @@
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
-    int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+    int32_t colorSpace = outputConfiguration.getColorSpace();
+    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int mirrorMode = outputConfiguration.getMirrorMode();
     for (auto& bufferProducer : bufferProducers) {
@@ -1660,7 +1678,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode);
+                streamUseCase, timestampBase, mirrorMode, colorSpace);
 
         if (!res.isOk())
             return res;
@@ -1744,6 +1762,13 @@
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
 
+status_t CameraDeviceClient::setAutoframingOverride(uint8_t autoframingValue) {
+    if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+    return mDevice->setAutoframingAutoBehavior(
+        static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
 bool CameraDeviceClient::supportsCameraMute() {
     return mDevice->supportsCameraMute();
 }
@@ -1804,7 +1829,8 @@
         for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
             isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
-                camera3::HeicCompositeStream::isHeicCompositeStream(s);
+                camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+                camera3::JpegRCompositeStream::isJpegRCompositeStream(s);
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
                 if (compositeIdx == NAME_NOT_FOUND) {
@@ -2064,7 +2090,7 @@
     Camera2ClientBase::detachDevice();
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
-    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+    mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs);
 }
 
 /** Device-related methods */
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 6bb64d6..0c87872 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -29,6 +29,7 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 #include "CompositeStream.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include "utils/SessionConfigurationUtils.h"
 
 using android::camera3::OutputStreamInfo;
@@ -179,6 +180,7 @@
 
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
             const String16& clientPackageName,
             bool clientPackageOverride,
             const std::optional<String16>& clientFeatureId,
@@ -197,6 +199,8 @@
 
     virtual status_t      setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
+    virtual status_t      setAutoframingOverride(uint8_t autoframingValue) override;
+
     virtual bool          supportsCameraMute();
     virtual status_t      setCameraMute(bool enabled);
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index beb655b..ea90987 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -44,7 +44,12 @@
     String8 threadName;
     mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
     threadName = String8::format("Offline-%s-FrameProc", mCameraIdStr.string());
-    mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
                                       camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -76,6 +81,10 @@
     return OK;
 }
 
+status_t CameraOfflineSessionClient::setAutoframingOverride(uint8_t) {
+    return OK;
+}
+
 bool CameraOfflineSessionClient::supportsCameraMute() {
     // Offline mode doesn't support muting
     return false;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 8edb64a..89b27f8 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -82,6 +82,8 @@
 
     status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
+    status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
     bool supportsCameraMute() override;
     status_t setCameraMute(bool enabled) override;
 
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 4b840fc..8cc47ee 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -49,7 +49,8 @@
         camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> * surfaceIds,
-        int streamSetId, bool isShared, bool isMultiResolution) {
+        int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+        int64_t dynamicProfile, int64_t streamUseCase) {
     if (hasDeferredConsumer) {
         ALOGE("%s: Deferred consumers not supported in case of composite streams!",
                 __FUNCTION__);
@@ -75,7 +76,8 @@
     }
 
     return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
-            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
+            colorSpace, dynamicProfile, streamUseCase);
 }
 
 status_t CompositeStream::deleteStream() {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 600bd28..99067dd 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -46,7 +46,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared, bool isMultiResolution);
+            int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase);
 
     status_t deleteStream();
 
@@ -59,7 +60,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) = 0;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) = 0;
 
     // Release all internal streams and corresponding resources.
     virtual status_t deleteInternalStreams() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 048d85d..2746289 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -581,7 +581,8 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/) {
+        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/) {
     if (mSupportedDepthSizes.empty()) {
         ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
         return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index c1c75c1..b4a4b05 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -53,7 +53,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) override;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) override;
     status_t deleteInternalStreams() override;
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index cd57299..52ab22f 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -120,7 +120,8 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/) {
+        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/) {
 
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 1077a1f..cdcaded 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -48,7 +48,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) override;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) override;
 
     status_t deleteInternalStreams() override;
 
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
new file mode 100644
index 0000000..8f9d813
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -0,0 +1,822 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "hardware/gralloc.h"
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
+#define LOG_TAG "Camera3-JpegRCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
+#include "common/CameraProviderManager.h"
+#include <gui/Surface.h>
+#include <jpegrecoverymap/recoverymap.h>
+#include <utils/ExifUtils.h>
+#include <utils/Log.h>
+#include "utils/SessionConfigurationUtils.h"
+#include <utils/Trace.h>
+
+#include "JpegRCompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
+JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
+        wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+        CompositeStream(device, cb),
+        mBlobStreamId(-1),
+        mBlobSurfaceId(-1),
+        mP010StreamId(-1),
+        mP010SurfaceId(-1),
+        mBlobWidth(0),
+        mBlobHeight(0),
+        mP010BufferAcquired(false),
+        mBlobBufferAcquired(false),
+        mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+        mProducerListener(new ProducerListener()),
+        mMaxJpegBufferSize(-1),
+        mUHRMaxJpegBufferSize(-1),
+        mStaticInfo(device->info()) {
+    auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
+    if (entry.count > 0) {
+        mMaxJpegBufferSize = entry.data.i32[0];
+    } else {
+        ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
+    }
+
+    mUHRMaxJpegSize =
+            SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+                    /*ultraHighResolution*/true);
+    mDefaultMaxJpegSize =
+            SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+                    /*isUltraHighResolution*/false);
+
+    mUHRMaxJpegBufferSize =
+        SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+                mMaxJpegBufferSize);
+}
+
+JpegRCompositeStream::~JpegRCompositeStream() {
+    mBlobConsumer.clear(),
+    mBlobSurface.clear(),
+    mBlobStreamId = -1;
+    mBlobSurfaceId = -1;
+    mP010Consumer.clear();
+    mP010Surface.clear();
+    mP010Consumer = nullptr;
+    mP010Surface = nullptr;
+}
+
+void JpegRCompositeStream::compilePendingInputLocked() {
+    CpuConsumer::LockedBuffer imgBuffer;
+
+    while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
+        auto it = mInputJpegBuffers.begin();
+        auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
+        if (res == NOT_ENOUGH_DATA) {
+            // Can not lock any more buffers.
+            break;
+        } else if (res != OK) {
+            ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+            mPendingInputFrames[*it].error = true;
+            mInputJpegBuffers.erase(it);
+            continue;
+        }
+
+        if (*it != imgBuffer.timestamp) {
+            ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
+                    "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+        }
+
+        if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+                (mPendingInputFrames[imgBuffer.timestamp].error)) {
+            mBlobConsumer->unlockBuffer(imgBuffer);
+        } else {
+            mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
+            mBlobBufferAcquired = true;
+        }
+        mInputJpegBuffers.erase(it);
+    }
+
+    while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
+        auto it = mInputP010Buffers.begin();
+        auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
+        if (res == NOT_ENOUGH_DATA) {
+            // Can not lock any more buffers.
+            break;
+        } else if (res != OK) {
+            ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+            mPendingInputFrames[*it].error = true;
+            mInputP010Buffers.erase(it);
+            continue;
+        }
+
+        if (*it != imgBuffer.timestamp) {
+            ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
+                    "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+        }
+
+        if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+                (mPendingInputFrames[imgBuffer.timestamp].error)) {
+            mP010Consumer->unlockBuffer(imgBuffer);
+        } else {
+            mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
+            mP010BufferAcquired = true;
+        }
+        mInputP010Buffers.erase(it);
+    }
+
+    while (!mCaptureResults.empty()) {
+        auto it = mCaptureResults.begin();
+        // Negative timestamp indicates that something went wrong during the capture result
+        // collection process.
+        if (it->first >= 0) {
+            mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
+            mPendingInputFrames[it->first].result = std::get<1>(it->second);
+        }
+        mCaptureResults.erase(it);
+    }
+
+    while (!mFrameNumberMap.empty()) {
+        auto it = mFrameNumberMap.begin();
+        mPendingInputFrames[it->second].frameNumber = it->first;
+        mFrameNumberMap.erase(it);
+    }
+
+    auto it = mErrorFrameNumbers.begin();
+    while (it != mErrorFrameNumbers.end()) {
+        bool frameFound = false;
+        for (auto &inputFrame : mPendingInputFrames) {
+            if (inputFrame.second.frameNumber == *it) {
+                inputFrame.second.error = true;
+                frameFound = true;
+                break;
+            }
+        }
+
+        if (frameFound) {
+            it = mErrorFrameNumbers.erase(it);
+        } else {
+            ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+                    *it);
+            it++;
+        }
+    }
+}
+
+bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
+    if (currentTs == nullptr) {
+        return false;
+    }
+
+    bool newInputAvailable = false;
+    for (const auto& it : mPendingInputFrames) {
+        if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+                ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
+                (it.first < *currentTs)) {
+            *currentTs = it.first;
+            newInputAvailable = true;
+        }
+    }
+
+    return newInputAvailable;
+}
+
+int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
+    int64_t ret = -1;
+    if (currentTs == nullptr) {
+        return ret;
+    }
+
+    for (const auto& it : mPendingInputFrames) {
+        if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+            *currentTs = it.first;
+            ret = it.second.frameNumber;
+        }
+    }
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
+    status_t res;
+    sp<ANativeWindow> outputANW = mOutputSurface;
+    ANativeWindowBuffer *anb;
+    int fenceFd;
+    void *dstBuffer;
+
+    size_t maxJpegRBufferSize = 0;
+    if (mMaxJpegBufferSize > 0) {
+        // If this is an ultra high resolution sensor and the input frames size
+        // is > default res jpeg.
+        if (mUHRMaxJpegSize.width != 0 &&
+                inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+                mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+            maxJpegRBufferSize = mUHRMaxJpegBufferSize;
+        } else {
+            maxJpegRBufferSize = mMaxJpegBufferSize;
+        }
+    } else {
+        maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
+    }
+
+    uint8_t jpegQuality = 100;
+    auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
+    if (entry.count > 0) {
+        jpegQuality = entry.data.u8[0];
+    }
+
+    uint8_t jpegOrientation = 0;
+    entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
+    if (entry.count > 0) {
+        jpegOrientation = entry.data.i32[0];
+    }
+
+    if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
+            != OK) {
+        ALOGE("%s: Unable to configure stream buffer dimensions"
+                " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
+        return res;
+    }
+
+    res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+                res);
+        return res;
+    }
+
+    sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
+    GraphicBufferLocker gbLocker(gb);
+    res = gbLocker.lockAsync(&dstBuffer, fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return res;
+    }
+
+    if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
+        ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
+                maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return BAD_VALUE;
+    }
+
+    size_t actualJpegRSize = 0;
+    if (mSupportInternalJpeg) {
+        recoverymap::jpegr_uncompressed_struct p010;
+        recoverymap::jpegr_compressed_struct jpeg;
+        recoverymap::jpegr_compressed_struct jpegR;
+
+        p010.height = inputFrame.p010Buffer.height;
+        p010.width = inputFrame.p010Buffer.width;
+        p010.colorGamut = recoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT2100;
+        size_t yChannelSizeInByte = p010.width * p010.height * 2;
+        size_t uvChannelSizeInByte = p010.width * p010.height;
+        p010.data = new uint8_t[yChannelSizeInByte + uvChannelSizeInByte];
+        std::unique_ptr<uint8_t[]> p010_data;
+        p010_data.reset(reinterpret_cast<uint8_t*>(p010.data));
+        memcpy((uint8_t*)p010.data, inputFrame.p010Buffer.data, yChannelSizeInByte);
+        memcpy((uint8_t*)p010.data + yChannelSizeInByte, inputFrame.p010Buffer.dataCb,
+               uvChannelSizeInByte);
+
+        jpeg.data = inputFrame.jpegBuffer.data;
+        jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
+                inputFrame.jpegBuffer.width);
+        if (jpeg.length == 0) {
+            ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
+                    __FUNCTION__);
+            jpeg.length = inputFrame.jpegBuffer.width;
+        }
+
+        if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
+            jpeg.colorGamut = recoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_P3;
+        } else {
+            jpeg.colorGamut = recoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT709;
+        }
+
+        recoverymap::jpegr_transfer_function transferFunction;
+        switch (mP010DynamicRange) {
+            case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+            case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+                transferFunction = recoverymap::jpegr_transfer_function::JPEGR_TF_PQ;
+                break;
+            default:
+                transferFunction = recoverymap::jpegr_transfer_function::JPEGR_TF_HLG;
+        }
+
+        jpegR.data = dstBuffer;
+        jpegR.maxLength = maxJpegRBufferSize;
+
+        recoverymap::RecoveryMap recoveryMap;
+        res = recoveryMap.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
+        if (res != OK) {
+            ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+
+        actualJpegRSize = jpegR.length;
+        p010_data.release();
+    } else {
+        const uint8_t* exifBuffer = nullptr;
+        size_t exifBufferSize = 0;
+        std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+        utils->initializeEmpty();
+        utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
+                inputFrame.p010Buffer.height);
+        if (utils->generateApp1()) {
+            exifBuffer = utils->getApp1Buffer();
+            exifBufferSize = utils->getApp1Length();
+        } else {
+            ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+        }
+    }
+
+    size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
+    if (finalJpegRSize > maxJpegRBufferSize) {
+        ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return NO_MEMORY;
+    }
+
+    res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+                getStreamId(), strerror(-res), res);
+        return res;
+    }
+
+    ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
+    uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
+        (gb->getWidth() - sizeof(CameraBlob));
+    CameraBlob blobHeader = {
+        .blobId = CameraBlobId::JPEG,
+        .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
+    };
+    memcpy(header, &blobHeader, sizeof(CameraBlob));
+    outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+
+    return res;
+}
+
+void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+    if (inputFrame == nullptr) {
+        return;
+    }
+
+    if (inputFrame->p010Buffer.data != nullptr) {
+        mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
+        inputFrame->p010Buffer.data = nullptr;
+        mP010BufferAcquired = false;
+    }
+
+    if (inputFrame->jpegBuffer.data != nullptr) {
+        mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
+        inputFrame->jpegBuffer.data = nullptr;
+        mBlobBufferAcquired = false;
+    }
+
+    if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+        //TODO: Figure out correct requestId
+        notifyError(inputFrame->frameNumber, -1 /*requestId*/);
+        inputFrame->errorNotified = true;
+    }
+}
+
+void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+    auto it = mPendingInputFrames.begin();
+    while (it != mPendingInputFrames.end()) {
+        if (it->first <= currentTs) {
+            releaseInputFrameLocked(&it->second);
+            it = mPendingInputFrames.erase(it);
+        } else {
+            it++;
+        }
+    }
+}
+
+bool JpegRCompositeStream::threadLoop() {
+    int64_t currentTs = INT64_MAX;
+    bool newInputAvailable = false;
+
+    {
+        Mutex::Autolock l(mMutex);
+
+        if (mErrorState) {
+            // In case we landed in error state, return any pending buffers and
+            // halt all further processing.
+            compilePendingInputLocked();
+            releaseInputFramesLocked(currentTs);
+            return false;
+        }
+
+        while (!newInputAvailable) {
+            compilePendingInputLocked();
+            newInputAvailable = getNextReadyInputLocked(&currentTs);
+            if (!newInputAvailable) {
+                auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
+                if (failingFrameNumber >= 0) {
+                    // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+                    // possible for two internal stream buffers to fail. In such scenario the
+                    // composite stream should notify the client about a stream buffer error only
+                    // once and this information is kept within 'errorNotified'.
+                    // Any present failed input frames will be removed on a subsequent call to
+                    // 'releaseInputFramesLocked()'.
+                    releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+                    currentTs = INT64_MAX;
+                }
+
+                auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+                if (ret == TIMED_OUT) {
+                    return true;
+                } else if (ret != OK) {
+                    ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+                            strerror(-ret), ret);
+                    return false;
+                }
+            }
+        }
+    }
+
+    auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+    Mutex::Autolock l(mMutex);
+    if (res != OK) {
+        ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
+                currentTs, strerror(-res), res);
+        mPendingInputFrames[currentTs].error = true;
+    }
+
+    releaseInputFramesLocked(currentTs);
+
+    return true;
+}
+
+bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
+    if (CameraProviderManager::kFrameworkJpegRDisabled) {
+        return false;
+    }
+    ANativeWindow *anw = surface.get();
+    status_t err;
+    int format;
+    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+        ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
+                err);
+        return false;
+    }
+
+    int dataspace;
+    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+        ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
+                err);
+        return false;
+    }
+
+    if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
+void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
+        int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
+    if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
+        return;
+    }
+
+    switch (dynamicProfile) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+            *dynamicRange = dynamicProfile;
+            *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
+            break;
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+            *dynamicRange = dynamicProfile;
+            *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+            break;
+        default:
+            *dynamicRange = kP010DefaultDynamicRange;
+            *dataSpace = kP010DefaultDataSpace;
+    }
+
+}
+
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+        bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+        int64_t dynamicProfile, int64_t streamUseCase) {
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (!device.get()) {
+        ALOGE("%s: Invalid camera device!", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
+    mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+            mStaticInfo, mP010DynamicRange,
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+
+    sp<IGraphicBufferProducer> producer;
+    sp<IGraphicBufferConsumer> consumer;
+    BufferQueue::createBufferQueue(&producer, &consumer);
+    mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
+    mP010Consumer->setFrameAvailableListener(this);
+    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+    mP010Surface = new Surface(producer);
+
+    auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
+            static_cast<android_dataspace>(mP010DataSpace), rotation,
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
+            GRALLOC_USAGE_SW_READ_OFTEN,
+            mP010DynamicRange,
+            streamUseCase);
+    if (ret == OK) {
+        mP010StreamId = *id;
+        mP010SurfaceId = (*surfaceIds)[0];
+        mOutputSurface = consumers[0];
+    } else {
+        return ret;
+    }
+
+    if (mSupportInternalJpeg) {
+        BufferQueue::createBufferQueue(&producer, &consumer);
+        mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        mBlobConsumer->setFrameAvailableListener(this);
+        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+        mBlobSurface = new Surface(producer);
+        std::vector<int> blobSurfaceId;
+        ret = device->createStream(mBlobSurface, width, height, format,
+                kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
+                &blobSurfaceId,
+                /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
+                /*isShared*/  false,
+                /*isMultiResolution*/ false,
+                /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
+                /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+                streamUseCase,
+                /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+                /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
+                /*colorSpace*/ colorSpace);
+        if (ret == OK) {
+            mBlobSurfaceId = blobSurfaceId[0];
+        } else {
+            return ret;
+        }
+
+        ret = registerCompositeStreamListener(mBlobStreamId);
+        if (ret != OK) {
+            ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
+            return ret;
+        }
+    }
+
+    ret = registerCompositeStreamListener(getStreamId());
+    if (ret != OK) {
+        ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
+        return ret;
+    }
+
+    mOutputColorSpace = colorSpace;
+    mBlobWidth = width;
+    mBlobHeight = height;
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::configureStream() {
+    if (isRunning()) {
+        // Processing thread is already running, nothing more to do.
+        return NO_ERROR;
+    }
+
+    if (mOutputSurface.get() == nullptr) {
+        ALOGE("%s: No valid output surface set!", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    if (res != OK) {
+        ALOGE("%s: Unable to connect to native window for stream %d",
+                __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+            != OK) {
+        ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+                mP010StreamId);
+        return res;
+    }
+
+    int maxProducerBuffers;
+    ANativeWindow *anw = mP010Surface.get();
+    if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    ANativeWindow *anwConsumer = mOutputSurface.get();
+    int maxConsumerBuffers;
+    if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+                    &maxConsumerBuffers)) != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_buffer_count(
+                    anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+        ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    run("JpegRCompositeStreamProc");
+
+    return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::deleteInternalStreams() {
+    // The 'CameraDeviceClient' parent will delete the P010 stream
+    requestExit();
+
+    auto ret = join();
+    if (ret != OK) {
+        ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+                strerror(-ret), ret);
+    }
+
+    if (mBlobStreamId >= 0) {
+        // Camera devices may not be valid after switching to offline mode.
+        // In this case, all offline streams including internal composite streams
+        // are managed and released by the offline session.
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device.get() != nullptr) {
+            ret = device->deleteStream(mBlobStreamId);
+        }
+
+        mBlobStreamId = -1;
+    }
+
+    if (mOutputSurface != nullptr) {
+        mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
+        mOutputSurface.clear();
+    }
+
+    return ret;
+}
+
+void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
+    if (item.mDataSpace == kJpegDataSpace) {
+        ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
+                __func__, ns2ms(item.mTimestamp));
+
+        Mutex::Autolock l(mMutex);
+        if (!mErrorState) {
+            mInputJpegBuffers.push_back(item.mTimestamp);
+            mInputReadyCondition.signal();
+        }
+    } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
+        ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
+                ns2ms(item.mTimestamp));
+
+        Mutex::Autolock l(mMutex);
+        if (!mErrorState) {
+            mInputP010Buffers.push_back(item.mTimestamp);
+            mInputReadyCondition.signal();
+        }
+    } else {
+        ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+    }
+}
+
+status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+        Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+    if (outputStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
+        outputStreamIds->push_back(mP010StreamId);
+    }
+    (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
+
+    if (mSupportInternalJpeg) {
+        if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
+            outputStreamIds->push_back(mBlobStreamId);
+        }
+        (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
+    }
+
+    if (currentStreamId != nullptr) {
+        *currentStreamId = mP010StreamId;
+    }
+
+    return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::insertCompositeStreamIds(
+        std::vector<int32_t>* compositeStreamIds /*out*/) {
+    if (compositeStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    compositeStreamIds->push_back(mP010StreamId);
+    if (mSupportInternalJpeg) {
+        compositeStreamIds->push_back(mBlobStreamId);
+    }
+
+    return OK;
+}
+
+void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+    // Processing can continue even in case of result errors.
+    // At the moment Jpeg/R composite stream processing relies mainly on static camera
+    // characteristics data. The actual result data can be used for the jpeg quality but
+    // in case it is absent we can default to maximum.
+    eraseResult(resultExtras.frameNumber);
+}
+
+bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+    bool ret = false;
+    // Buffer errors concerning internal composite streams should not be directly visible to
+    // camera clients. They must only receive a single buffer error with the public composite
+    // stream id.
+    if ((resultExtras.errorStreamId == mP010StreamId) ||
+            (resultExtras.errorStreamId == mBlobStreamId)) {
+        flagAnErrorFrameNumber(resultExtras.frameNumber);
+        ret = true;
+    }
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+            const CameraMetadata& staticInfo,
+            std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+    if (compositeOutput == nullptr) {
+        return BAD_VALUE;
+    }
+
+    int64_t dynamicRange, dataSpace;
+    deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
+
+    compositeOutput->clear();
+    compositeOutput->push_back({});
+    (*compositeOutput)[0].width = streamInfo.width;
+    (*compositeOutput)[0].height = streamInfo.height;
+    (*compositeOutput)[0].format = kP010PixelFormat;
+    (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
+    (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+    (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
+    (*compositeOutput)[0].colorSpace =
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
+                streamInfo.dynamicRangeProfile,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+        compositeOutput->push_back({});
+        (*compositeOutput)[1].width = streamInfo.width;
+        (*compositeOutput)[1].height = streamInfo.height;
+        (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+        (*compositeOutput)[1].dataSpace = kJpegDataSpace;
+        (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+        (*compositeOutput)[1].dynamicRangeProfile =
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
+    }
+
+    return NO_ERROR;
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
new file mode 100644
index 0000000..181a05d
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+
+#include <gui/CpuConsumer.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "system/graphics-base-v1.1.h"
+
+#include "api1/client2/JpegProcessor.h"
+
+#include "CompositeStream.h"
+
+namespace android {
+
+class CameraDeviceClient;
+class CameraMetadata;
+class Surface;
+
+namespace camera3 {
+
+class JpegRCompositeStream : public CompositeStream, public Thread,
+        public CpuConsumer::FrameAvailableListener {
+
+public:
+    JpegRCompositeStream(sp<CameraDeviceBase> device,
+            wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+    ~JpegRCompositeStream() override;
+
+    static bool isJpegRCompositeStream(const sp<Surface> &surface);
+
+    // CompositeStream overrides
+    status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+            bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase) override;
+    status_t deleteInternalStreams() override;
+    status_t configureStream() override;
+    status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+            int32_t* /*out*/currentStreamId) override;
+    status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+    int getStreamId() override { return mP010StreamId; }
+
+    // CpuConsumer listener implementation
+    void onFrameAvailable(const BufferItem& item) override;
+
+    // Return stream information about the internal camera streams
+    static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+            const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+protected:
+
+    bool threadLoop() override;
+    bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+    void onResultError(const CaptureResultExtras& resultExtras) override;
+
+private:
+    struct InputFrame {
+        CpuConsumer::LockedBuffer p010Buffer;
+        CpuConsumer::LockedBuffer jpegBuffer;
+        CameraMetadata            result;
+        bool                      error;
+        bool                      errorNotified;
+        int64_t                   frameNumber;
+        int32_t                   requestId;
+
+        InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1) { }
+    };
+
+    status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
+
+    // Buffer/Results handling
+    void compilePendingInputLocked();
+    void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+    void releaseInputFramesLocked(int64_t currentTs);
+
+    // Find first complete and valid frame with smallest timestamp
+    bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
+
+    // Find next failing frame number with smallest timestamp and return respective frame number
+    int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
+
+    static void deriveDynamicRangeAndDataspace(int64_t dynamicProfile, int64_t* /*out*/dynamicRange,
+            int64_t* /*out*/dataSpace);
+
+    static const nsecs_t kWaitDuration = 10000000; // 10 ms
+    static const auto kP010PixelFormat = HAL_PIXEL_FORMAT_YCBCR_P010;
+    static const auto kP010DefaultDataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+    static const auto kP010DefaultDynamicRange =
+        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+    static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+    static const auto kJpegRDataSpace =
+        aidl::android::hardware::graphics::common::Dataspace::JPEG_R;
+
+    bool                 mSupportInternalJpeg = false;
+    int64_t              mP010DataSpace = HAL_DATASPACE_BT2020_HLG;
+    int64_t              mP010DynamicRange =
+        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+    int                  mBlobStreamId, mBlobSurfaceId, mP010StreamId, mP010SurfaceId;
+    size_t               mBlobWidth, mBlobHeight;
+    sp<CpuConsumer>      mBlobConsumer, mP010Consumer;
+    bool                 mP010BufferAcquired, mBlobBufferAcquired;
+    sp<Surface>          mP010Surface, mBlobSurface, mOutputSurface;
+    int32_t              mOutputColorSpace;
+    sp<ProducerListener> mProducerListener;
+
+    ssize_t              mMaxJpegBufferSize;
+    ssize_t              mUHRMaxJpegBufferSize;
+
+    camera3::Size        mDefaultMaxJpegSize;
+    camera3::Size        mUHRMaxJpegSize;
+
+    // Keep all incoming P010 buffer timestamps pending further processing.
+    std::vector<int64_t> mInputP010Buffers;
+
+    // Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
+    std::vector<int64_t> mInputJpegBuffers;
+
+    // Map of all input frames pending further processing.
+    std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
+
+    const CameraMetadata mStaticInfo;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index ad24392..4555838 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -37,7 +37,6 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "utils/CameraThreadState.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
 
@@ -49,6 +48,7 @@
 Camera2ClientBase<TClientBase>::Camera2ClientBase(
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
@@ -66,6 +66,7 @@
                 clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideToPortrait),
         mSharedCameraCallbacks(remoteCallback),
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
     ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
@@ -103,11 +104,6 @@
           TClientBase::mCameraIdStr.string());
     status_t res;
 
-    // Verify ops permissions
-    res = TClientBase::startCameraOps();
-    if (res != OK) {
-        return res;
-    }
     IPCTransport providerTransport = IPCTransport::INVALID;
     res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr.string(),
             &providerTransport);
@@ -117,12 +113,14 @@
     switch (providerTransport) {
         case IPCTransport::HIDL:
             mDevice =
-                    new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                    new HidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass,
                             TClientBase::mOverrideToPortrait, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
-                    new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                    new AidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass,
                             TClientBase::mOverrideToPortrait, mLegacyClient);
              break;
         default:
@@ -143,12 +141,30 @@
         return res;
     }
 
+    // Verify ops permissions
+    res = TClientBase::startCameraOps();
+    if (res != OK) {
+        TClientBase::finishCameraOps();
+        mDevice.clear();
+        return res;
+    }
+
     wp<NotificationListener> weakThis(this);
     res = mDevice->setNotifyCallback(weakThis);
+    if (res != OK) {
+        ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
+                __FUNCTION__, TClientBase::mCameraIdStr.string(), strerror(-res), res);
+        return res;
+    }
 
     /** Start watchdog thread */
     mCameraServiceWatchdog = new CameraServiceWatchdog();
-    mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+    res = mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+    if (res != OK) {
+        ALOGE("%s: Unable to start camera service watchdog thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     return OK;
 }
@@ -166,8 +182,8 @@
         mCameraServiceWatchdog.clear();
     }
 
-    ALOGI("Closed Camera %s. Client was: %s (PID %d, UID %u)",
-            TClientBase::mCameraIdStr.string(),
+    ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
+            __FUNCTION__, TClientBase::mCameraIdStr.string(),
             String8(TClientBase::mClientPackageName).string(),
             mInitialClientPid, TClientBase::mClientUid);
 }
@@ -351,7 +367,7 @@
                     TClientBase::mCameraIdStr.string(), res);
             return res;
         }
-        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
+        mCameraServiceProxyWrapper->logActive(TClientBase::mCameraIdStr, maxPreviewFps);
     }
     mDeviceActive = true;
 
@@ -370,7 +386,7 @@
             ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
                     TClientBase::mCameraIdStr.string(), res);
         }
-        CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+        mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
                 streamStats);
     }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index d2dcdb1..89347eb 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -19,6 +19,7 @@
 
 #include "common/CameraDeviceBase.h"
 #include "camera/CaptureResult.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include "CameraServiceWatchdog.h"
 
 namespace android {
@@ -48,6 +49,7 @@
     // TODO: too many params, move into a ClientArgs<T>
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
+                      std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
                       const String16& clientPackageName,
                       bool systemNativeClient,
                       const std::optional<String16>& clientFeatureId,
@@ -140,6 +142,7 @@
     pid_t mInitialClientPid;
     bool mOverrideForPerfClass = false;
     bool mLegacyClient = false;
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
 
     virtual sp<IBinder> asBinderWrapper() {
         return IInterface::asBinder(this);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 69514f3..977ab7c 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -192,7 +192,9 @@
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+            = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -213,7 +215,9 @@
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+            = 0;
 
     /**
      * Create an input stream of width, height, and format.
@@ -235,11 +239,13 @@
         bool dataSpaceOverridden;
         android_dataspace originalDataSpace;
         int64_t dynamicRangeProfile;
+        int32_t colorSpace;
 
         StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
                 dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
                 originalDataSpace(HAL_DATASPACE_UNKNOWN),
-                dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+                dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+                colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
         /**
          * Check whether the format matches the current or the original one in case
          * it got overridden.
@@ -434,6 +440,14 @@
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
 
     /**
+     * Set the current behavior for the AUTOFRAMING control when in AUTO.
+     *
+     * The value must be one of the AUTOFRAMING_* values besides AUTO.
+     */
+    virtual status_t setAutoframingAutoBehavior(
+            camera_metadata_enum_android_control_autoframing_t autoframingValue) = 0;
+
+    /**
      * Whether camera muting (producing black-only output) is supported.
      *
      * Calling setCameraMute(true) when this returns false will return an
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 3aab0b1..5b8e3a1 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
 #define LOG_TAG "CameraProviderManager"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -40,6 +42,7 @@
 #include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
 #include <utils/Trace.h>
+#include <ui/PublicFormat.h>
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
@@ -59,6 +62,8 @@
 } // anonymous namespace
 
 const float CameraProviderManager::kDepthARTolerance = .1f;
+const bool CameraProviderManager::kFrameworkJpegRDisabled =
+        property_get_bool("ro.camera.disableJpegR", false);
 
 CameraProviderManager::HidlServiceInteractionProxyImpl
 CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -1071,6 +1076,209 @@
     }
 }
 
+bool CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+        const CameraMetadata& deviceInfo, int64_t profile, int64_t concurrentProfile) {
+    auto entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    if (entry.count == 0) {
+        return false;
+    }
+
+    const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
+    if (it == entry.data.u8 + entry.count) {
+        return false;
+    }
+
+    entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+    if (entry.count == 0 || ((entry.count % 3) != 0)) {
+        return false;
+    }
+
+    for (size_t i = 0; i < entry.count; i += 3) {
+        if (entry.data.i64[i] == profile) {
+            if (entry.data.i64[i+1] & concurrentProfile) {
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
+    if (kFrameworkJpegRDisabled) {
+        return OK;
+    }
+
+    const int32_t scalerSizesTag =
+              SessionConfigurationUtils::getAppropriateModeTag(
+                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t scalerMinFrameDurationsTag =
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+    const int32_t scalerStallDurationsTag =
+                 SessionConfigurationUtils::getAppropriateModeTag(
+                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+    const int32_t jpegRSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t jpegRStallDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS, maxResolution);
+    const int32_t jpegRMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                 ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
+
+    auto& c = mCameraCharacteristics;
+    std::vector<int32_t> supportedChTags;
+    auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    if (chTags.count == 0) {
+        ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes,
+            supportedDynamicDepthSizes, internalDepthSizes;
+    auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    if (capabilities.count == 0) {
+        ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    auto end = capabilities.data.u8 + capabilities.count;
+    bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+    if (!isTenBitOutputSupported) {
+        // No 10-bit support, nothing more to do.
+        return OK;
+    }
+
+    if (!isConcurrentDynamicRangeCaptureSupported(c,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+        // Advertise Jpeg/R only in case 10 and 8-bit concurrent capture is supported.
+        // This can be removed when 10-bit to 8-bit tonemapping is available.
+        return OK;
+    }
+
+    getSupportedSizes(c, scalerSizesTag,
+            static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_BLOB), &supportedBlobSizes);
+    getSupportedSizes(c, scalerSizesTag,
+            static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+    auto it = supportedP010Sizes.begin();
+    while (it != supportedP010Sizes.end()) {
+        // Resolutions that don't align on 32 pixels are not supported by Jpeg/R.
+        // This can be removed as soon as the encoder restriction is lifted.
+        if ((std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
+                supportedBlobSizes.end()) || ((std::get<0>(*it) % 32) != 0)) {
+            it = supportedP010Sizes.erase(it);
+        } else {
+            it++;
+        }
+    }
+    if (supportedP010Sizes.empty()) {
+        // Nothing to do in this case.
+        return OK;
+    }
+
+    std::vector<int32_t> jpegREntries;
+    for (const auto& it : supportedP010Sizes) {
+        int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+                static_cast<int32_t> (std::get<1>(it)),
+                ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT };
+        jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
+    }
+
+    std::vector<int64_t> blobMinDurations, blobStallDurations;
+    std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
+
+    // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
+    // durations.
+    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedP010Sizes, &blobMinDurations);
+    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedP010Sizes, &blobStallDurations);
+    if (blobStallDurations.empty() || blobMinDurations.empty() ||
+            (blobMinDurations.size() != blobStallDurations.size())) {
+        ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu",
+                __FUNCTION__, blobMinDurations.size(), blobStallDurations.size());
+        return BAD_VALUE;
+    }
+
+    auto itDuration = blobMinDurations.begin();
+    auto itSize = supportedP010Sizes.begin();
+    while (itDuration != blobMinDurations.end()) {
+        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+        jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
+        itDuration++; itSize++;
+    }
+
+    itDuration = blobStallDurations.begin();
+    itSize = supportedP010Sizes.begin();
+    while (itDuration != blobStallDurations.end()) {
+        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+        jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
+        itDuration++; itSize++;
+    }
+
+    supportedChTags.reserve(chTags.count + 3);
+    supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+            chTags.data.i32 + chTags.count);
+    supportedChTags.push_back(jpegRSizesTag);
+    supportedChTags.push_back(jpegRMinFrameDurationsTag);
+    supportedChTags.push_back(jpegRStallDurationsTag);
+    c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
+    c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
+    c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
+    c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+            supportedChTags.size());
+
+    auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+    if (colorSpaces.count > 0 && !maxResolution) {
+        bool displayP3Support = false;
+        int64_t dynamicRange = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        for (size_t i = 0; i < colorSpaces.count; i += 3) {
+            auto colorSpace = colorSpaces.data.i64[i];
+            auto format = colorSpaces.data.i64[i+1];
+            bool formatMatch = (format == static_cast<int64_t>(PublicFormat::JPEG)) ||
+                    (format == static_cast<int64_t>(PublicFormat::UNKNOWN));
+            bool colorSpaceMatch =
+                colorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3;
+            if (formatMatch && colorSpaceMatch) {
+                displayP3Support = true;
+            }
+
+            // Jpeg/R will support the same dynamic range profiles as P010
+            if (format == static_cast<int64_t>(PublicFormat::YCBCR_P010)) {
+                dynamicRange |= colorSpaces.data.i64[i+2];
+            }
+        }
+        if (displayP3Support) {
+            std::vector<int64_t> supportedColorSpaces;
+            // Jpeg/R must support the default system as well ase display P3 color space
+            supportedColorSpaces.reserve(colorSpaces.count + 3*2);
+            supportedColorSpaces.insert(supportedColorSpaces.end(), colorSpaces.data.i64,
+                    colorSpaces.data.i64 + colorSpaces.count);
+
+            supportedColorSpaces.push_back(static_cast<int64_t>(
+                    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB));
+            supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+            supportedColorSpaces.push_back(dynamicRange);
+
+            supportedColorSpaces.push_back(static_cast<int64_t>(
+                    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3));
+            supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+            supportedColorSpaces.push_back(dynamicRange);
+            c.update(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+                    supportedColorSpaces.data(), supportedColorSpaces.size());
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
         bool maxResolution) {
     const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
@@ -1355,6 +1563,19 @@
     return res;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAutoframingTags() {
+    status_t res = OK;
+    auto& c = mCameraCharacteristics;
+
+    auto availableAutoframingEntry = c.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+    if (availableAutoframingEntry.count == 0) {
+        uint8_t  defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE;
+        res = c.update(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+                &defaultAutoframingEntry, 1);
+    }
+    return res;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
     status_t res = OK;
     auto& c = mCameraCharacteristics;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e8d9a37..acf511b 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -407,7 +407,11 @@
 
     status_t notifyUsbDeviceEvent(int32_t eventId, const std::string &usbDeviceId);
 
+    static bool isConcurrentDynamicRangeCaptureSupported(const CameraMetadata& deviceInfo,
+            int64_t profile, int64_t concurrentProfile);
+
     static const float kDepthARTolerance;
+    static const bool kFrameworkJpegRDisabled;
 private:
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
     mutable std::mutex mInterfaceMutex;
@@ -675,7 +679,9 @@
             status_t fixupTorchStrengthTags();
             status_t addDynamicDepthTags(bool maxResolution = false);
             status_t deriveHeicTags(bool maxResolution = false);
+            status_t deriveJpegRTags(bool maxResolution = false);
             status_t addRotateCropTags();
+            status_t addAutoframingTags();
             status_t addPreCorrectionActiveArraySize();
             status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
 
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 2c035de..84fe3a5 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -501,6 +501,11 @@
         ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+    res = deriveJpegRTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
 
     if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
         status_t status = addDynamicDepthTags(/*maxResolution*/true);
@@ -514,6 +519,12 @@
             ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
                     "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
         }
+
+        status = deriveJpegRTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
     }
 
     res = addRotateCropTags();
@@ -521,6 +532,11 @@
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
     }
+    res = addAutoframingTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
     res = addPreCorrectionActiveArraySize();
     if (OK != res) {
         ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
@@ -550,6 +566,11 @@
                     "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
                     strerror(-res), res);
         }
+
+        // b/247038031: In case of system_server crash, camera_server is
+        // restarted as well. If flashlight is turned on before the crash, it
+        // may be stuck to be on. As a workaround, set torch mode to be OFF.
+        interface->setTorchMode(false);
     } else {
         mHasFlashUnit = false;
     }
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 1df6ec4..258d5fd 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -644,6 +644,11 @@
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
     }
+    res = addAutoframingTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
     res = addPreCorrectionActiveArraySize();
     if (OK != res) {
         ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5e7fe7f..5e99389 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -63,7 +63,6 @@
 #include "utils/CameraThreadState.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 #include <algorithm>
 #include <tuple>
@@ -73,8 +72,9 @@
 
 namespace android {
 
-Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool overrideToPortrait,
-        bool legacyClient):
+Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8 &id, bool overrideForPerfClass, bool overrideToPortrait, bool legacyClient):
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
         mLegacyClient(legacyClient),
         mOperatingMode(NO_MODE),
@@ -1001,7 +1001,7 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
             uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
-            int timestampBase, int mirrorMode) {
+            int timestampBase, int mirrorMode, int32_t colorSpace) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1015,7 +1015,7 @@
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
             format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
             streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
-            streamUseCase, timestampBase, mirrorMode);
+            streamUseCase, timestampBase, mirrorMode, colorSpace);
 }
 
 static bool isRawFormat(int format) {
@@ -1036,7 +1036,7 @@
         const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
         uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
-        int timestampBase, int mirrorMode) {
+        int timestampBase, int mirrorMode, int32_t colorSpace) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
@@ -1045,10 +1045,10 @@
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
             " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
             " dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
-            " mirrorMode %d",
+            " mirrorMode %d colorSpace %d",
             mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
             consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
-            dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+            dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace);
 
     status_t res;
     bool wasActive = false;
@@ -1119,7 +1119,7 @@
                 width, height, blobBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
         bool maxResolution =
                 sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1134,25 +1134,25 @@
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace);
     }
 
     size_t consumerCount = consumers.size();
@@ -1240,6 +1240,7 @@
     streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
     streamInfo->originalDataSpace = stream->getOriginalDataSpace();
     streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+    streamInfo->colorSpace = stream->getColorSpace();
     return OK;
 }
 
@@ -1461,6 +1462,13 @@
                     &kDefaultJpegQuality, 1);
         }
 
+        // Fill in AUTOFRAMING if not available
+        if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
+            static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+            mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
+                    &kDefaultAutoframingMode, 1);
+        }
+
         *request = mRequestTemplateCache[templateId];
         mLastTemplateId = templateId;
     }
@@ -1521,6 +1529,7 @@
           maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
     if (res != OK) {
+        mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Can't idle device in %f seconds!",
                 maxExpectedDuration/1e9);
     }
@@ -1888,7 +1897,8 @@
                     stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
                     stream->getMaxHalBuffers(),
                     stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
-                    stream->getDynamicRangeProfile(), streamUseCase);
+                    stream->getDynamicRangeProfile(), streamUseCase,
+                    stream->getColorSpace());
             }
         }
     }
@@ -2138,6 +2148,15 @@
         newRequest->mRotateAndCropAuto = false;
     }
 
+    auto autoframingEntry =
+            newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count > 0 &&
+            autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+        newRequest->mAutoframingAuto = true;
+    } else {
+        newRequest->mAutoframingAuto = false;
+    }
+
     auto zoomRatioEntry =
             newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
     if (zoomRatioEntry.count > 0 &&
@@ -2240,7 +2259,6 @@
     if (mStatus == STATUS_ACTIVE) {
         markClientActive = true;
         mPauseStateNotify = true;
-        mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
 
         rc = internalPauseAndWaitLocked(maxExpectedDuration);
     }
@@ -2272,7 +2290,7 @@
         ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
     }
 
-    CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
+    mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
         ns2ms(systemTime() - startTime));
 
     if (markClientActive) {
@@ -2690,7 +2708,7 @@
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
         bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
-        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
         const std::set<std::string>& cameraIdsWithZoom,
         const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
     ATRACE_CALL();
@@ -2699,8 +2717,8 @@
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
             hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
-            isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
-            outputSurfaces));
+            isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
+            requestTimeNs, outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -2908,6 +2926,7 @@
         mCurrentAfTriggerId(0),
         mCurrentPreCaptureTriggerId(0),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+        mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
         mComposerOutput(false),
         mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
         mCameraMuteChanged(false),
@@ -3460,6 +3479,10 @@
         if (res == OK) {
             sp<Camera3Device> parent = mParent.promote();
             if (parent != nullptr) {
+                sp<StatusTracker> statusTracker = mStatusTracker.promote();
+                if (statusTracker != nullptr) {
+                    statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+                }
                 mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
             }
             setPaused(false);
@@ -3589,13 +3612,14 @@
         // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
         bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
             overrideAutoRotateAndCrop(captureRequest);
+        bool autoframingChanged = overrideAutoframing(captureRequest);
         bool testPatternChanged = overrideTestPattern(captureRequest);
 
         // If the request is the same as last, or we had triggers now or last time or
         // changing overrides this time
         bool newRequest =
-                (mPrevRequest != captureRequest || triggersMixedIn ||
-                        rotateAndCropChanged || testPatternChanged) &&
+                (mPrevRequest != captureRequest || triggersMixedIn || rotateAndCropChanged ||
+                         autoframingChanged || testPatternChanged) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
                 !(batchedRequest && i > 0);
@@ -3924,7 +3948,8 @@
                 expectedDurationInfo.maxDuration,
                 expectedDurationInfo.isFixedFps,
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
-                captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
+                captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
+                mPrevCameraIdsWithZoom,
                 (mUseHalBufManager) ? uniqueSurfaceIdMap :
                                       SurfaceMap{}, captureRequest->mRequestTimeNs);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -4065,6 +4090,17 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::setAutoframingAutoBehaviour(
+        camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+        return BAD_VALUE;
+    }
+    mAutoframingOverride = autoframingValue;
+    return OK;
+}
+
 status_t Camera3Device::RequestThread::setComposerSurface(bool composerSurfacePresent) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
@@ -4664,6 +4700,31 @@
     return false;
 }
 
+bool Camera3Device::RequestThread::overrideAutoframing(const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+
+    if (request->mAutoframingAuto) {
+        Mutex::Autolock l(mTriggerMutex);
+        CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+
+        auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+        if (autoframingEntry.count > 0) {
+            if (autoframingEntry.data.u8[0] == mAutoframingOverride) {
+                return false;
+            } else {
+                autoframingEntry.data.u8[0] = mAutoframingOverride;
+                return true;
+            }
+        } else {
+            uint8_t autoframing_u8 = mAutoframingOverride;
+            metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+                    &autoframing_u8, 1);
+            return true;
+        }
+    }
+    return false;
+}
+
 bool Camera3Device::RequestThread::overrideTestPattern(
         const sp<CaptureRequest> &request) {
     ATRACE_CALL();
@@ -5149,6 +5210,17 @@
     return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
 }
 
+status_t Camera3Device::setAutoframingAutoBehavior(
+    camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+    if (mRequestThread == nullptr) {
+        return INVALID_OPERATION;
+    }
+    return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
+}
+
 bool Camera3Device::supportsCameraMute() {
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index bc759c5..9b75ac5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -49,6 +49,7 @@
 #include "utils/TagMonitor.h"
 #include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include <camera_metadata_hidden.h>
 
 using android::camera3::camera_capture_request_t;
@@ -82,7 +83,8 @@
   friend class AidlCamera3Device;
   public:
 
-    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+    explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~Camera3Device();
@@ -150,7 +152,9 @@
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+            override;
 
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -165,7 +169,9 @@
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+            override;
 
     status_t createInputStream(
             uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -268,6 +274,14 @@
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
 
     /**
+     * Set the current behavior for the AUTOFRAMING control when in AUTO.
+     *
+     * The value must be one of the AUTOFRAMING_* values besides AUTO.
+     */
+    status_t setAutoframingAutoBehavior(
+            camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
+    /**
      * Whether camera muting (producing black-only output) is supported.
      *
      * Calling setCameraMute(true) when this returns false will return an
@@ -326,6 +340,8 @@
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
     // A lock to enforce serialization on the input/configure side
     // of the public interface.
     // Not locked by methods guarded by mOutputLock, since they may act
@@ -596,6 +612,9 @@
         // overriding of ROTATE_AND_CROP value and adjustment of coordinates
         // in several other controls in both the request and the result
         bool                                mRotateAndCropAuto;
+        // Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
+        // in the capture request.
+        bool                                mAutoframingAuto;
 
         // Whether this capture request has its zoom ratio set to 1.0x before
         // the framework overrides it for camera HAL consumption.
@@ -609,6 +628,8 @@
         // Whether this capture request's rotation and crop update has been
         // done.
         bool                                mRotationAndCropUpdated = false;
+        // Whether this capture request's autoframing has been done.
+        bool                                mAutoframingUpdated = false;
         // Whether this capture request's zoom ratio update has been done.
         bool                                mZoomRatioUpdated = false;
         // Whether this max resolution capture request's  crop / metering region update has been
@@ -911,6 +932,10 @@
 
         status_t setRotateAndCropAutoBehavior(
                 camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
+
+        status_t setAutoframingAutoBehaviour(
+                camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
         status_t setComposerSurface(bool composerSurfacePresent);
 
         status_t setCameraMute(int32_t muteMode);
@@ -937,6 +962,9 @@
         // Override rotate_and_crop control if needed; returns true if the current value was changed
         bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
 
+        // Override autoframing control if needed; returns true if the current value was changed
+        bool               overrideAutoframing(const sp<CaptureRequest> &request);
+
         // Override test_pattern control if needed for camera mute; returns true
         // if the current value was changed
         bool               overrideTestPattern(const sp<CaptureRequest> &request);
@@ -1071,6 +1099,7 @@
         uint32_t           mCurrentAfTriggerId;
         uint32_t           mCurrentPreCaptureTriggerId;
         camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+        camera_metadata_enum_android_control_autoframing_t mAutoframingOverride;
         bool               mComposerOutput;
         int32_t            mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
         bool               mCameraMuteChanged;
@@ -1122,7 +1151,7 @@
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
             bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
             bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
-            bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+            bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
             nsecs_t requestTimeNs);
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index f594f84..a78d01e 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -35,11 +35,12 @@
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
-        bool deviceTimeBaseIsRealtime, int timestampBase) :
+        bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
                 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
-                dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+                dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+                colorSpace),
         mTotalBufferCount(0),
         mMaxCachedBufferCount(0),
         mHandoutTotalBufferCount(0),
@@ -93,6 +94,7 @@
     }
     lines.appendFormat("      Dynamic Range Profile: 0x%" PRIx64 "\n",
             camera_stream::dynamic_range_profile);
+    lines.appendFormat("      Color Space: %d\n", camera_stream::color_space);
     lines.appendFormat("      Stream use case: %" PRId64 "\n", camera_stream::use_case);
     lines.appendFormat("      Timestamp base: %d\n", getTimestampBase());
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index ca1f238..6af0875 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
-            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
 
   public:
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 84b5aa4..1abcd86 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -56,12 +56,12 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
-                            timestampBase),
+                            timestampBase, colorSpace),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -91,11 +91,11 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
-                            deviceTimeBaseIsRealtime, timestampBase),
+                            deviceTimeBaseIsRealtime, timestampBase, colorSpace),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -131,12 +131,12 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
-                            timestampBase),
+                            timestampBase, colorSpace),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -180,13 +180,13 @@
                                          int setId, bool isMultiResolution,
                                          int64_t dynamicRangeProfile, int64_t streamUseCase,
                                          bool deviceTimeBaseIsRealtime, int timestampBase,
-                                         int mirrorMode) :
+                                         int mirrorMode, int32_t colorSpace) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
-                            timestampBase),
+                            timestampBase, colorSpace),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseBufferManager(false),
@@ -331,7 +331,7 @@
     status_t res =
             gbLocker.lockAsync(
                     GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
-                    &mapped, fenceFd.get());
+                    &mapped, fenceFd.release());
     if (res != OK) {
         ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
         return res;
@@ -513,10 +513,6 @@
         mStreamUnpreparable = true;
     }
 
-    if (res != OK) {
-        close(anwReleaseFence);
-    }
-
     *releaseFenceOut = releaseFence;
 
     return res;
@@ -713,7 +709,8 @@
             mTotalBufferCount += mMaxCachedBufferCount;
             res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
             if (res != OK) {
-                ALOGE("%s: Unable to start preview spacer", __FUNCTION__);
+                ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
+                        strerror(-res), res);
                 return res;
             }
         }
@@ -1327,7 +1324,7 @@
     void* mapped = nullptr;
     base::unique_fd fenceFd(dup(fence));
     status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
-            fenceFd.get());
+            fenceFd.release());
     if (res != OK) {
         ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
         return;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index db988a0..0d758bc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,8 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -113,7 +114,8 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +131,8 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
 
     virtual ~Camera3OutputStream();
 
@@ -273,7 +276,8 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
 
     /**
      * Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 6569395..a441638 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -128,12 +128,71 @@
     return res;
 }
 
+status_t fixupAutoframingTags(CameraMetadata& resultMetadata) {
+    status_t res = OK;
+    camera_metadata_entry autoframingEntry =
+            resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count == 0) {
+        const uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_OFF;
+        res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING, &defaultAutoframingEntry, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING: %s (%d)",
+                  __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    camera_metadata_entry autoframingStateEntry =
+            resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING_STATE);
+    if (autoframingStateEntry.count == 0) {
+        const uint8_t defaultAutoframingStateEntry = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
+        res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING_STATE,
+                                    &defaultAutoframingStateEntry, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING_STATE: %s (%d)",
+                  __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    return res;
+}
+
+void correctMeteringRegions(camera_metadata_t *meta) {
+    if (meta == nullptr) return;
+
+    uint32_t meteringRegionKeys[] = {
+            ANDROID_CONTROL_AE_REGIONS,
+            ANDROID_CONTROL_AWB_REGIONS,
+            ANDROID_CONTROL_AF_REGIONS };
+
+    for (uint32_t key : meteringRegionKeys) {
+        camera_metadata_entry_t entry;
+        int res = find_camera_metadata_entry(meta, key, &entry);
+        if (res != OK) continue;
+
+        for (size_t i = 0; i < entry.count; i += 5) {
+            if (entry.data.i32[0] > entry.data.i32[2]) {
+                ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
+                        __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
+                entry.data.i32[2] = entry.data.i32[0];
+            }
+            if (entry.data.i32[1] > entry.data.i32[3]) {
+                ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
+                        __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
+                entry.data.i32[3] = entry.data.i32[1];
+            }
+        }
+    }
+}
+
 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
     if (result == nullptr) return;
 
     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
             result->mMetadata.getAndLock());
     set_camera_metadata_vendor_id(meta, states.vendorTagId);
+    correctMeteringRegions(meta);
     result->mMetadata.unlock(meta);
 
     if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
@@ -152,6 +211,7 @@
         camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
                 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
         set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+        correctMeteringRegions(pmeta);
         physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
     }
 
@@ -324,6 +384,22 @@
         }
     }
 
+    // Fix up autoframing metadata
+    res = fixupAutoframingTags(captureResult.mMetadata);
+    if (res != OK) {
+        SET_ERR("Failed to set autoframing defaults in result metadata: %s (%d)",
+                strerror(-res), res);
+        return;
+    }
+    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+        res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
+        if (res != OK) {
+            SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
+                    strerror(-res), res);
+            return;
+        }
+    }
+
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
         String8 cameraId8(physicalMetadata.mPhysicalCameraId);
         auto mapper = states.distortionMappers.find(cameraId8.c_str());
@@ -464,6 +540,32 @@
     return found;
 }
 
+const std::set<std::string>& getCameraIdsWithZoomLocked(
+        const InFlightRequestMap& inflightMap, const CameraMetadata& metadata,
+        const std::set<std::string>& cameraIdsWithZoom) {
+    camera_metadata_ro_entry overrideEntry =
+            metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+    camera_metadata_ro_entry frameNumberEntry =
+            metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
+    if (overrideEntry.count != 1
+            || overrideEntry.data.i32[0] != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM
+            || frameNumberEntry.count != 1) {
+        // No valid overriding frame number, skip
+        return cameraIdsWithZoom;
+    }
+
+    uint32_t overridingFrameNumber = frameNumberEntry.data.i32[0];
+    ssize_t idx = inflightMap.indexOfKey(overridingFrameNumber);
+    if (idx < 0) {
+        ALOGE("%s: Failed to find pending request #%d in inflight map",
+                __FUNCTION__, overridingFrameNumber);
+        return cameraIdsWithZoom;
+    }
+
+    const InFlightRequest &r = inflightMap.valueFor(overridingFrameNumber);
+    return r.cameraIdsWithZoom;
+}
+
 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
     ATRACE_CALL();
 
@@ -652,10 +754,12 @@
             } else if (request.hasCallback) {
                 CameraMetadata metadata;
                 metadata = result->result;
+                auto cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+                        states.inflightMap, metadata, request.cameraIdsWithZoom);
                 sendCaptureResult(states, metadata, request.resultExtras,
                     collectedPartialResult, frameNumber,
                     hasInputBufferInRequest, request.zslCapture && request.stillCapture,
-                    request.rotateAndCropAuto, request.cameraIdsWithZoom,
+                    request.rotateAndCropAuto, cameraIdsWithZoom,
                     request.physicalMetadatas);
             }
         }
@@ -882,11 +986,13 @@
                     states.listener->notifyShutter(r.resultExtras, msg.timestamp);
                 }
                 // send pending result and buffers
+                const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+                        inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
                 sendCaptureResult(states,
                     r.pendingMetadata, r.resultExtras,
                     r.collectedPartialResult, msg.frame_number,
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
-                    r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
+                    r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
             }
             returnAndRemovePendingOutputBuffers(
                     states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 9215f23..da45227 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,12 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool useHalBufManager, int64_t dynamicProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             transport, consumerUsage, timestampOffset, setId,
                             /*isMultiResolution*/false, dynamicProfile, streamUseCase,
-                            deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+                            deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
     if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index aac3c2a..5167225 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,8 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
 
     virtual ~Camera3SharedOutputStream();
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 88be9ff..4d8495f 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,8 @@
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int32_t colorSpace) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -95,6 +96,7 @@
     camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
     camera_stream::dynamic_range_profile = dynamicRangeProfile;
     camera_stream::use_case = streamUseCase;
+    camera_stream::color_space = colorSpace;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
@@ -135,6 +137,10 @@
     return camera_stream::data_space;
 }
 
+int32_t Camera3Stream::getColorSpace() const {
+    return camera_stream::color_space;
+}
+
 uint64_t Camera3Stream::getUsage() const {
     return mUsage;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 214618a..f32053b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,7 @@
     uint32_t          getHeight() const;
     int               getFormat() const;
     android_dataspace getDataSpace() const;
+    int32_t           getColorSpace() const;
     uint64_t          getUsage() const;
     void              setUsage(uint64_t usage);
     void              setFormatOverride(bool formatOverriden);
@@ -509,7 +510,8 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-            int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+            int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+            int32_t colorSpace);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6812e89..823be2e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
     std::unordered_set<int32_t> sensor_pixel_modes_used;
     int64_t dynamic_range_profile;
     int64_t use_case;
+    int32_t color_space;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
         int64_t streamUseCase;
         int timestampBase;
         int mirrorMode;
+        int32_t colorSpace;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0),
             dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
             streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
             timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
-            mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+            mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+            colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
                 uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
-                int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+                int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+                int32_t _colorSpace) :
             width(_width), height(_height), format(_format),
             dataSpace(_dataSpace), consumerUsage(_consumerUsage),
             sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
-            streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+            streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+            colorSpace(_colorSpace) {}
 };
 
 // Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
     virtual int      getFormat() const = 0;
     virtual int64_t  getDynamicRangeProfile() const = 0;
     virtual android_dataspace getDataSpace() const = 0;
+    virtual int32_t getColorSpace() const = 0;
     virtual void setFormatOverride(bool formatOverriden) = 0;
     virtual bool isFormatOverridden() const = 0;
     virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 444445b..870825a 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -182,6 +182,9 @@
     // Indicates that ROTATE_AND_CROP was set to AUTO
     bool rotateAndCropAuto;
 
+    // Indicates that AUTOFRAMING was set to AUTO
+    bool autoframingAuto;
+
     // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
     std::set<std::string> cameraIdsWithZoom;
 
@@ -214,6 +217,7 @@
             stillCapture(false),
             zslCapture(false),
             rotateAndCropAuto(false),
+            autoframingAuto(false),
             requestTimeNs(0),
             transform(-1) {
     }
@@ -221,8 +225,9 @@
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
             bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
             const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
-            bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
-            nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
+            bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
+            const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+            const SurfaceMap& outSurfaces = SurfaceMap{}) :
             shutterTimestamp(0),
             sensorTimestamp(0),
             requestStatus(OK),
@@ -240,6 +245,7 @@
             stillCapture(isStillCapture),
             zslCapture(isZslCapture),
             rotateAndCropAuto(rotateAndCropAuto),
+            autoframingAuto(autoframingAuto),
             cameraIdsWithZoom(idsWithZoom),
             requestTimeNs(requestNs),
             outputSurfaces(outSurfaces),
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 27b00c9..515259e 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -354,17 +354,8 @@
             if (weight == 0) {
                 continue;
             }
-            // Top left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+            scaleRegion(entry.data.i32 + j, zoomRatio, arrayWidth,
                     arrayHeight);
-            // Bottom right (exclusive): Use adjacent inclusive pixel to
-            // calculate.
-            entry.data.i32[j+2] -= 1;
-            entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
-                    arrayHeight);
-            entry.data.i32[j+2] += 1;
-            entry.data.i32[j+3] += 1;
         }
     }
 
@@ -401,17 +392,8 @@
             if (weight == 0) {
                 continue;
             }
-            // Top-left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+            scaleRegion(entry.data.i32 + j, 1.0 / zoomRatio, arrayWidth,
                     arrayHeight);
-            // Bottom-right (exclusive): Use adjacent inclusive pixel to
-            // calculate.
-            entry.data.i32[j+2] -= 1;
-            entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
-                    arrayHeight);
-            entry.data.i32[j+2] += 1;
-            entry.data.i32[j+3] += 1;
         }
     }
     for (auto rect : kRectsToCorrect) {
@@ -470,6 +452,24 @@
     }
 }
 
+void ZoomRatioMapper::scaleRegion(int32_t* region, float scaleRatio,
+        int32_t arrayWidth, int32_t arrayHeight) {
+    // Top-left (inclusive)
+    scaleCoordinates(region, 1, scaleRatio, true /*clamp*/, arrayWidth,
+            arrayHeight);
+    // Bottom-right (exclusive): Use adjacent inclusive pixel to
+    // calculate.
+    region[2] -= 1;
+    region[3] -= 1;
+    scaleCoordinates(region + 2, 1, scaleRatio, true /*clamp*/, arrayWidth,
+            arrayHeight);
+    region[2] += 1;
+    region[3] += 1;
+    // Make sure bottom-right >= top-left
+    region[2] = std::max(region[0], region[2]);
+    region[3] = std::max(region[1], region[3]);
+}
+
 void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
         float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
     for (int i = 0; i < rectCount * 4; i += 4) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index b7a9e41..1aa8e78 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -69,6 +69,8 @@
   public: // Visible for testing. Do not use concurently.
     void scaleCoordinates(int32_t* coordPairs, int coordCount,
             float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
+    void scaleRegion(int32_t* region, float scaleRatio,
+            int32_t arrayWidth, int32_t arrayHeight);
 
     bool isValid() { return mIsValid; }
   private:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 1e103f2..7eba57f 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -162,9 +162,12 @@
     return (uint64_t)usage;
 }
 
-AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
-        bool overrideToPortrait, bool legacyClient) :
-        Camera3Device(id, overrideForPerfClass, overrideToPortrait, legacyClient) {
+AidlCamera3Device::AidlCamera3Device(
+        std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient) :
+        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+        legacyClient) {
     mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
@@ -914,6 +917,7 @@
                     cam3stream->getOriginalFormat() : src->format);
         dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
                     cam3stream->getOriginalDataSpace() : src->data_space);
+        dst.colorSpace = src->color_space;
 
         dst.bufferSize = bufferSizes[i];
         if (src->physical_camera_id != nullptr) {
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 630985f..e61f8f7 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,9 @@
     using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
     class AidlCameraDeviceCallbacks;
     friend class AidlCameraDeviceCallbacks;
-    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+    explicit AidlCamera3Device(
+            std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 2bd4660..205a899 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,9 +31,11 @@
             public Camera3Device {
   public:
 
-   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
-          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, overrideToPortrait,
-          legacyClient) { }
+   explicit HidlCamera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient = false) :
+        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+                legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index ae4d5dd..9098fe8 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -74,6 +74,17 @@
           ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
           ANDROID_SENSOR_READOUT_TIMESTAMP,
         } },
+      {34, {
+          ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+          ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+        } },
 };
 
 /**
@@ -90,4 +101,12 @@
           ANDROID_SENSOR_PIXEL_MODE,
           ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
         }  },
+      {34, {
+          ANDROID_CONTROL_AUTOFRAMING,
+          ANDROID_CONTROL_AUTOFRAMING_STATE,
+          ANDROID_CONTROL_SETTINGS_OVERRIDE,
+          ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+          ANDROID_EXTENSION_CURRENT_TYPE,
+          ANDROID_EXTENSION_STRENGTH,
+        }  },
 };
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index e43b91f..b1bf41e 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -52,7 +52,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
@@ -64,6 +64,7 @@
     fuzz_config: {
         cc: [
             "android-media-fuzzing-reports@google.com",
+            "android-camera-fwk-eng@google.com",
         ],
         componentid: 155276,
         libfuzzer_options: [
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 09f8eb6..072fcfb 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -111,12 +111,15 @@
     size_t mPreviewBufferCount = 0;
     bool mAutoFocusMessage = false;
     bool mSnapshotNotification = false;
+    bool mRecordingNotification = false;
     mutable Mutex mPreviewLock;
     mutable Condition mPreviewCondition;
     mutable Mutex mAutoFocusLock;
     mutable Condition mAutoFocusCondition;
     mutable Mutex mSnapshotLock;
     mutable Condition mSnapshotCondition;
+    mutable Mutex mRecordingLock;
+    mutable Condition mRecordingCondition;
 
     void getNumCameras();
     void getCameraInformation(int32_t cameraId);
@@ -125,6 +128,7 @@
     void invokeDump();
     void invokeShellCommand();
     void invokeNotifyCalls();
+    void invokeTorchAPIs(int32_t cameraId);
 
     // CameraClient interface
     void notifyCallback(int32_t msgType, int32_t, int32_t) override;
@@ -152,6 +156,8 @@
             Mutex::Autolock l(mPreviewLock);
             ++mPreviewBufferCount;
             mPreviewCondition.broadcast();
+            mRecordingNotification = true;
+            mRecordingCondition.broadcast();
             break;
         }
         case CAMERA_MSG_COMPRESSED_IMAGE: {
@@ -311,115 +317,154 @@
     mCameraService->notifySystemEvent(eventId, args);
 }
 
+void CameraFuzzer::invokeTorchAPIs(int32_t cameraId) {
+    String16 cameraIdStr = String16(String8::format("%d", cameraId));
+    sp<IBinder> binder = new BBinder;
+
+    mCameraService->setTorchMode(cameraIdStr, true, binder);
+    ALOGV("Turned torch on.");
+    int32_t torchStrength = rand() % 5 + 1;
+    ALOGV("Changing torch strength level to %d", torchStrength);
+    mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
+    mCameraService->setTorchMode(cameraIdStr, false, binder);
+    ALOGV("Turned torch off.");
+}
+
 void CameraFuzzer::invokeCameraAPIs() {
-    for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
-        getCameraInformation(cameraId);
+    /** In order to avoid the timeout issue caused due to multiple iteration of loops, the 'for'
+     * loops are removed and the 'cameraId', 'pictureSize' and 'videoSize' are derived using the
+     * FuzzedDataProvider from the available cameras and vectors of 'pictureSizes' and 'videoSizes'
+     */
+    int32_t cameraId = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(0, mNumCameras - 1);
+    getCameraInformation(cameraId);
+    invokeTorchAPIs(cameraId);
 
-        ::android::binder::Status rc;
-        sp<ICamera> cameraDevice;
+    ::android::binder::Status rc;
+    sp<ICamera> cameraDevice;
 
-        rc = mCameraService->connect(this, cameraId, String16(),
-                android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
-                &cameraDevice);
-        if (!rc.isOk()) {
-            // camera not connected
-            return;
+    rc = mCameraService->connect(this, cameraId, String16(),
+                                 android::CameraService::USE_CALLING_UID,
+                                 android::CameraService::USE_CALLING_PID,
+                                 /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+                                 /*overrideToPortrait*/true, &cameraDevice);
+    if (!rc.isOk()) {
+        // camera not connected
+        return;
+    }
+    if (cameraDevice) {
+        sp<Surface> previewSurface;
+        sp<SurfaceControl> surfaceControl;
+        CameraParameters params(cameraDevice->getParameters());
+        String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+        bool isAFSupported = false;
+        const char* focusMode = nullptr;
+
+        if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+            isAFSupported = true;
+        } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+            isAFSupported = true;
+            focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+        } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+            isAFSupported = true;
+            focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+        } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+            isAFSupported = true;
+            focusMode = CameraParameters::FOCUS_MODE_MACRO;
         }
-        if (cameraDevice) {
-            sp<Surface> previewSurface;
-            sp<SurfaceControl> surfaceControl;
-            CameraParameters params(cameraDevice->getParameters());
-            String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
-            bool isAFSupported = false;
-            const char *focusMode = nullptr;
+        if (nullptr != focusMode) {
+            params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+            cameraDevice->setParameters(params.flatten());
+        }
+        int previewWidth, previewHeight;
+        params.getPreviewSize(&previewWidth, &previewHeight);
 
-            if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
-                isAFSupported = true;
-            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
-                isAFSupported = true;
-                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
-            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
-                isAFSupported = true;
-                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
-            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
-                isAFSupported = true;
-                focusMode = CameraParameters::FOCUS_MODE_MACRO;
-            }
-            if (nullptr != focusMode) {
-                params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
-                cameraDevice->setParameters(params.flatten());
-            }
-            int previewWidth, previewHeight;
-            params.getPreviewSize(&previewWidth, &previewHeight);
+        mComposerClient = new SurfaceComposerClient;
+        mComposerClient->initCheck();
 
-            mComposerClient = new SurfaceComposerClient;
-            mComposerClient->initCheck();
-
-            bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
-            int layerMetaData;
-            if (shouldPassInvalidLayerMetaData) {
-                layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
-            } else {
-                layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+        bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+        int layerMetaData;
+        if (shouldPassInvalidLayerMetaData) {
+            layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+        } else {
+            layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
                     0, kNumLayerMetaData - 1)];
-            }
-            surfaceControl = mComposerClient->createSurface(
+        }
+        surfaceControl = mComposerClient->createSurface(
                 String8("Test Surface"), previewWidth, previewHeight,
                 CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
 
-            if (surfaceControl.get() != nullptr) {
-                SurfaceComposerClient::Transaction{}
+        if (surfaceControl.get()) {
+            SurfaceComposerClient::Transaction{}
                     .setLayer(surfaceControl, 0x7fffffff)
                     .show(surfaceControl)
                     .apply();
 
-                previewSurface = surfaceControl->getSurface();
+            previewSurface = surfaceControl->getSurface();
+            if (previewSurface.get()) {
                 cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
             }
-            cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+        }
+        cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
 
-            Vector<Size> pictureSizes;
-            params.getSupportedPictureSizes(pictureSizes);
+        Vector<Size> pictureSizes;
+        params.getSupportedPictureSizes(pictureSizes);
 
-            for (size_t i = 0; i < pictureSizes.size(); ++i) {
-                params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
-                cameraDevice->setParameters(params.flatten());
-                cameraDevice->startPreview();
-                waitForPreviewStart();
-                cameraDevice->autoFocus();
-                waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
-                bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
-                int msgType;
-                if (shouldPassInvalidCameraMsg) {
-                    msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
-                } else {
-                    msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+        if (pictureSizes.size()) {
+            Size pictureSize = pictureSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                    0, pictureSizes.size() - 1)];
+            params.setPictureSize(pictureSize.width, pictureSize.height);
+            cameraDevice->setParameters(params.flatten());
+            cameraDevice->startPreview();
+            waitForPreviewStart();
+            cameraDevice->autoFocus();
+            waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+            bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+            int msgType;
+            if (shouldPassInvalidCameraMsg) {
+                msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+            } else {
+                msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
                         0, kNumCameraMsg - 1)];
-                }
-                cameraDevice->takePicture(msgType);
-
-                waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
             }
+            cameraDevice->takePicture(msgType);
 
-            Vector<Size> videoSizes;
-            params.getSupportedVideoSizes(videoSizes);
+            waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+            cameraDevice->stopPreview();
+        }
 
-            for (size_t i = 0; i < videoSizes.size(); ++i) {
-                params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+        Vector<Size> videoSizes;
+        params.getSupportedVideoSizes(videoSizes);
 
-                cameraDevice->setParameters(params.flatten());
-                cameraDevice->startPreview();
-                waitForPreviewStart();
-                cameraDevice->setVideoBufferMode(
+        if (videoSizes.size()) {
+            Size videoSize = videoSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                    0, videoSizes.size() - 1)];
+            params.setVideoSize(videoSize.width, videoSize.height);
+
+            cameraDevice->setParameters(params.flatten());
+            cameraDevice->startPreview();
+            waitForPreviewStart();
+            cameraDevice->setVideoBufferMode(
                     android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
-                cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
-                cameraDevice->startRecording();
-                cameraDevice->stopRecording();
+            sp<SurfaceControl> surfaceControlVideo = mComposerClient->createSurface(
+                    String8("Test Surface Video"), previewWidth, previewHeight,
+                    CameraParameters::previewFormatToEnum(params.getPreviewFormat()),
+                    layerMetaData);
+            if (surfaceControlVideo.get()) {
+                SurfaceComposerClient::Transaction{}
+                        .setLayer(surfaceControlVideo, 0x7fffffff)
+                        .show(surfaceControlVideo)
+                        .apply();
+                sp<Surface> previewSurfaceVideo = surfaceControlVideo->getSurface();
+                if (previewSurfaceVideo.get()) {
+                    cameraDevice->setVideoTarget(previewSurfaceVideo->getIGraphicBufferProducer());
+                }
             }
             cameraDevice->stopPreview();
-            cameraDevice->disconnect();
+            cameraDevice->startRecording();
+            waitForEvent(mRecordingLock, mRecordingCondition, mRecordingNotification);
+            cameraDevice->stopRecording();
         }
+        cameraDevice->disconnect();
     }
 }
 
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 3616572..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -27,8 +27,13 @@
         "external/dynamic_depth/internal",
     ],
 
+    header_libs: [
+        "libmedia_headers",
+    ],
+
     shared_libs: [
         "libbase",
+        "libbinder",
         "libcutils",
         "libcameraservice",
         "libhidlbase",
@@ -44,7 +49,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -57,6 +62,7 @@
     ],
 
     srcs: [
+        "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
         "ClientManagerTest.cpp",
         "DepthProcessorTest.cpp",
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
new file mode 100644
index 0000000..731eebf
--- /dev/null
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -0,0 +1,302 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/BnCameraServiceProxy.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <android/hardware/ICameraService.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "../CameraService.h"
+#include "../utils/CameraServiceProxyWrapper.h"
+
+#include <gtest/gtest.h>
+
+#include <memory>
+#include <vector>
+
+using namespace android;
+using namespace android::hardware::camera;
+
+// Empty service listener.
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+        // No op
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onCameraAccessPrioritiesChanged() {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+            int32_t /*torchStrength*/) {
+        // No op
+        return binder::Status::ok();
+    }
+};
+
+// Empty device callback.
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+    TestCameraDeviceCallbacks() {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual binder::Status onDeviceError(int /*errorCode*/,
+            const CaptureResultExtras& /*resultExtras*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onDeviceIdle() {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+            int64_t /*timestamp*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+            const CaptureResultExtras& /*resultExtras*/,
+            const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onPrepared(int /*streamId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRepeatingRequestError(
+            int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRequestQueueEmpty() {
+        return binder::Status::ok();
+    }
+};
+
+// Override isCameraDisabled from the CameraServiceProxy with a flag.
+class CameraServiceProxyOverride : public ::android::hardware::BnCameraServiceProxy {
+public:
+    CameraServiceProxyOverride() :
+            mCameraServiceProxy(CameraServiceProxyWrapper::getDefaultCameraServiceProxy()),
+            mCameraDisabled(false), mOverrideCameraDisabled(false)
+    { }
+
+    virtual binder::Status getRotateAndCropOverride(const String16& packageName, int lensFacing,
+            int userId, int *ret) override {
+        return mCameraServiceProxy->getRotateAndCropOverride(packageName, lensFacing,
+                userId, ret);
+    }
+
+    virtual binder::Status getAutoframingOverride(const String16& packageName, int *ret) override {
+        return mCameraServiceProxy->getAutoframingOverride(packageName, ret);
+    }
+
+    virtual binder::Status pingForUserUpdate() override {
+        return mCameraServiceProxy->pingForUserUpdate();
+    }
+
+    virtual binder::Status notifyCameraState(
+            const hardware::CameraSessionStats& cameraSessionStats) override {
+        return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
+    }
+
+    virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
+        if (mOverrideCameraDisabled) {
+            *ret = mCameraDisabled;
+            return binder::Status::ok();
+        }
+        return mCameraServiceProxy->isCameraDisabled(userId, ret);
+    }
+
+    void setCameraDisabled(bool cameraDisabled) {
+        mCameraDisabled = cameraDisabled;
+    }
+
+    void setOverrideCameraDisabled(bool overrideCameraDisabled) {
+        mOverrideCameraDisabled = overrideCameraDisabled;
+    }
+
+protected:
+    sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
+    bool mCameraDisabled;
+    bool mOverrideCameraDisabled;
+};
+
+class AutoDisconnectDevice {
+public:
+    AutoDisconnectDevice(sp<hardware::camera2::ICameraDeviceUser> device) :
+            mDevice(device)
+    { }
+
+    ~AutoDisconnectDevice() {
+        if (mDevice != nullptr) {
+            mDevice->disconnect();
+        }
+    }
+
+private:
+    sp<hardware::camera2::ICameraDeviceUser> mDevice;
+};
+
+class CameraPermissionsTest : public ::testing::Test {
+protected:
+    static sp<CameraService> sCameraService;
+    static sp<CameraServiceProxyOverride> sCameraServiceProxy;
+    static std::shared_ptr<CameraServiceProxyWrapper> sCameraServiceProxyWrapper;
+    static uid_t sOldUid;
+
+    static void SetUpTestSuite() {
+        sOldUid = getuid();
+        setuid(AID_CAMERASERVER);
+        sCameraServiceProxy = new CameraServiceProxyOverride();
+        sCameraServiceProxyWrapper =
+            std::make_shared<CameraServiceProxyWrapper>(sCameraServiceProxy);
+        sCameraService = new CameraService(sCameraServiceProxyWrapper);
+        sCameraService->clearCachedVariables();
+    }
+
+    static void TearDownTestSuite() {
+        sCameraServiceProxyWrapper = nullptr;
+        sCameraServiceProxy = nullptr;
+        sCameraService = nullptr;
+        setuid(sOldUid);
+    }
+};
+
+sp<CameraService> CameraPermissionsTest::sCameraService = nullptr;
+sp<CameraServiceProxyOverride> CameraPermissionsTest::sCameraServiceProxy = nullptr;
+std::shared_ptr<CameraServiceProxyWrapper>
+CameraPermissionsTest::sCameraServiceProxyWrapper = nullptr;
+uid_t CameraPermissionsTest::sOldUid = 0;
+
+// Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
+// policy, and succeed when it isn't.
+TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(true);
+
+    sCameraServiceProxy->setCameraDisabled(true);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+        AutoDisconnectDevice autoDisconnect(device);
+        ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
+        ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
+                << "connectDevice returned exception code " << status.exceptionCode();
+    }
+
+    sCameraServiceProxy->setCameraDisabled(false);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+        AutoDisconnectDevice autoDisconnect(device);
+        ASSERT_TRUE(status.isOk());
+    }
+}
+
+// Test that consecutive camera connections succeed.
+TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+        AutoDisconnectDevice autoDisconnectA(deviceA);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+        status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+        AutoDisconnectDevice autoDisconnectB(deviceB);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+    }
+}
+
+// Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
+// in the second call.
+TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+        AutoDisconnectDevice autoDisconnectA(deviceA);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+        status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+        AutoDisconnectDevice autoDisconnectB(deviceB);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+    }
+}
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index ff7aafd..badd47a 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -160,11 +160,9 @@
             false/*hasZoomRatioRange*/, zoomRatioRange,
             usePreCorrectArray));
 
-    size_t index = 0;
     int32_t width = testActiveArraySize[2];
     int32_t height = testActiveArraySize[3];
     if (usePreCorrectArray) {
-        index = 1;
         width = testPreCorrActiveArraySize[2];
         height = testPreCorrActiveArraySize[3];
     }
@@ -254,6 +252,19 @@
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
     }
+
+    // Verify region zoom scaling doesn't generate invalid metering region
+    // (width < 0, or height < 0)
+    std::array<float, 3> scaleRatios = {10.0f, 1.0f, 0.1f};
+    for (float scaleRatio : scaleRatios) {
+        for (size_t i = 0; i < originalCoords.size(); i+= 2) {
+            int32_t coordinates[] = {originalCoords[i], originalCoords[i+1],
+                    originalCoords[i], originalCoords[i+1]};
+            mapper.scaleRegion(coordinates, scaleRatio, width, height);
+            EXPECT_LE(coordinates[0], coordinates[2]);
+            EXPECT_LE(coordinates[1], coordinates[3]);
+        }
+    }
 }
 
 TEST(ZoomRatioTest, scaleCoordinatesTest) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index dae5eea..bed576f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -29,29 +29,29 @@
 using hardware::ICameraServiceProxy;
 using hardware::CameraSessionStats;
 
-Mutex CameraServiceProxyWrapper::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
-
-Mutex CameraServiceProxyWrapper::mLock;
-std::map<String8, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
-        CameraServiceProxyWrapper::mSessionStatsMap;
-
 /**
  * CameraSessionStatsWrapper functions
  */
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
-    Mutex::Autolock l(mLock);
-
-    updateProxyDeviceState(mSessionStats);
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateProxyDeviceState(
+        sp<hardware::ICameraServiceProxy>& proxyBinder) {
+    if (proxyBinder == nullptr) return;
+    proxyBinder->notifyCameraState(mSessionStats);
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen(
+        sp<hardware::ICameraServiceProxy>& proxyBinder) {
+    Mutex::Autolock l(mLock);
+    updateProxyDeviceState(proxyBinder);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(
+    sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
     mSessionStats.mLatencyMs = latencyMs;
-    updateProxyDeviceState(mSessionStats);
+    updateProxyDeviceState(proxyBinder);
 }
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
@@ -66,12 +66,13 @@
     }
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(
+    sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
     mSessionStats.mMaxPreviewFps = maxPreviewFps;
-    updateProxyDeviceState(mSessionStats);
+    updateProxyDeviceState(proxyBinder);
 
     // Reset mCreationDuration to -1 to distinguish between 1st session
     // after configuration, and all other sessions after configuration.
@@ -79,6 +80,7 @@
 }
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+        sp<hardware::ICameraServiceProxy>& proxyBinder,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -91,7 +93,7 @@
     mSessionStats.mUserTag = String16(userTag.c_str());
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mStreamStats = streamStats;
-    updateProxyDeviceState(mSessionStats);
+    updateProxyDeviceState(proxyBinder);
 
     mSessionStats.mInternalReconfigure = 0;
     mSessionStats.mStreamStats.clear();
@@ -103,19 +105,26 @@
 
 sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
 #ifndef __BRILLO__
-    Mutex::Autolock al(sProxyMutex);
-    if (sCameraServiceProxy == nullptr) {
-        sp<IServiceManager> sm = defaultServiceManager();
-        // Use checkService because cameraserver normally starts before the
-        // system server and the proxy service. So the long timeout that getService
-        // has before giving up is inappropriate.
-        sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
-        if (binder != nullptr) {
-            sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
-        }
+    Mutex::Autolock al(mProxyMutex);
+    if (mCameraServiceProxy == nullptr) {
+        mCameraServiceProxy = getDefaultCameraServiceProxy();
     }
 #endif
-    return sCameraServiceProxy;
+    return mCameraServiceProxy;
+}
+
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::getDefaultCameraServiceProxy() {
+#ifndef __BRILLO__
+    sp<IServiceManager> sm = defaultServiceManager();
+    // Use checkService because cameraserver normally starts before the
+    // system server and the proxy service. So the long timeout that getService
+    // has before giving up is inappropriate.
+    sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+    if (binder != nullptr) {
+        return interface_cast<ICameraServiceProxy>(binder);
+    }
+#endif
+    return nullptr;
 }
 
 void CameraServiceProxyWrapper::pingCameraServiceProxy() {
@@ -138,10 +147,19 @@
     return ret;
 }
 
-void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+int CameraServiceProxyWrapper::getAutoframingOverride(const String16& packageName) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
-    if (proxyBinder == nullptr) return;
-    proxyBinder->notifyCameraState(sessionStats);
+    if (proxyBinder == nullptr) {
+        return ANDROID_CONTROL_AUTOFRAMING_OFF;
+    }
+    int ret = 0;
+    auto status = proxyBinder->getAutoframingOverride(packageName, &ret);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed during autoframing override query: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+
+    return ret;
 }
 
 void CameraServiceProxyWrapper::logStreamConfigured(const String8& id,
@@ -175,7 +193,8 @@
     }
 
     ALOGV("%s: id %s", __FUNCTION__, id.c_str());
-    sessionStats->onActive(maxPreviewFps);
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onActive(proxyBinder, maxPreviewFps);
 }
 
 void CameraServiceProxyWrapper::logIdle(const String8& id,
@@ -205,7 +224,8 @@
                 streamStats[i].mStartLatencyMs);
     }
 
-    sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
             videoStabilizationMode, streamStats);
 }
 
@@ -235,7 +255,8 @@
 
     ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
             __FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
-    sessionStats->onOpen();
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onOpen(proxyBinder);
 }
 
 void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs) {
@@ -259,7 +280,8 @@
     }
 
     ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
-    sessionStats->onClose(latencyMs);
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onClose(proxyBinder, latencyMs);
 }
 
 bool CameraServiceProxyWrapper::isCameraDisabled(int userId) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index eb818d1..0f77fc9 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -32,72 +32,83 @@
 class CameraServiceProxyWrapper {
 private:
     // Guard mCameraServiceProxy
-    static Mutex sProxyMutex;
+    Mutex mProxyMutex;
     // Cached interface to the camera service proxy in system service
-    static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+    sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
 
-    struct CameraSessionStatsWrapper {
+    class CameraSessionStatsWrapper {
+    private:
         hardware::CameraSessionStats mSessionStats;
         Mutex mLock; // lock for per camera session stats
 
+        /**
+         * Update the session stats of a given camera device (open/close/active/idle) with
+         * the camera proxy service in the system service
+         */
+        void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
+
+    public:
         CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
                 const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
             mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
-            {}
+            { }
 
-        void onOpen();
-        void onClose(int32_t latencyMs);
+        void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
+        void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs);
         void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
-        void onActive(float maxPreviewFps);
-        void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
+        void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
+                int64_t requestCount, int64_t resultErrorCount, bool deviceError,
                 const std::string& userTag, int32_t videoStabilizationMode,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
     };
 
     // Lock for camera session stats map
-    static Mutex mLock;
+    Mutex mLock;
     // Map from camera id to the camera's session statistics
-    static std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+    std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
 
-    /**
-     * Update the session stats of a given camera device (open/close/active/idle) with
-     * the camera proxy service in the system service
-     */
-    static void updateProxyDeviceState(
-            const hardware::CameraSessionStats& sessionStats);
-
-    static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+    sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
 
 public:
+    CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
+            mCameraServiceProxy(serviceProxy)
+    { }
+
+    static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
+
     // Open
-    static void logOpen(const String8& id, int facing,
+    void logOpen(const String8& id, int facing,
             const String16& clientPackageName, int apiLevel, bool isNdk,
             int32_t latencyMs);
 
     // Close
-    static void logClose(const String8& id, int32_t latencyMs);
+    void logClose(const String8& id, int32_t latencyMs);
 
     // Stream configuration
-    static void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
+    void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
             int32_t latencyMs);
 
     // Session state becomes active
-    static void logActive(const String8& id, float maxPreviewFps);
+    void logActive(const String8& id, float maxPreviewFps);
 
     // Session state becomes idle
-    static void logIdle(const String8& id,
+    void logIdle(const String8& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::string& userTag, int32_t videoStabilizationMode,
             const std::vector<hardware::CameraStreamStats>& streamStats);
 
     // Ping camera service proxy for user update
-    static void pingCameraServiceProxy();
+    void pingCameraServiceProxy();
 
     // Return the current top activity rotate and crop override.
-    static int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+    int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+
+    // Return the current top activity autoframing.
+    int getAutoframingOverride(const String16& packageName);
 
     // Detect if the camera is disabled by device policy.
-    static bool isCameraDisabled(int userId);
+    bool isCameraDisabled(int userId);
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 7dde268..f786b79 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,6 +19,8 @@
 #include "SessionConfigurationUtils.h"
 #include "../api2/DepthCompositeStream.h"
 #include "../api2/HeicCompositeStream.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "api2/JpegRCompositeStream.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -26,6 +28,7 @@
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "system/graphics-base-v1.1.h"
+#include <ui/PublicFormat.h>
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
@@ -127,7 +130,7 @@
 
 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
         camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
-    return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+    return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
             (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
 }
 
@@ -209,11 +212,18 @@
 }
 
 //check if format is 10-bit compatible
-bool is10bitCompatibleFormat(int32_t format) {
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
     switch(format) {
         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
         case HAL_PIXEL_FORMAT_YCBCR_P010:
             return true;
+        case HAL_PIXEL_FORMAT_BLOB:
+            if (dataSpace == static_cast<android_dataspace_t>(
+                        ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+                return true;
+            }
+
+            return false;
         default:
             return false;
     }
@@ -282,6 +292,65 @@
     }
 }
 
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+    camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    for (size_t i = 0; i < entry.count; ++i) {
+        uint8_t capability = entry.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+        int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+    int64_t colorSpace64 = colorSpace;
+    int64_t format64 = format;
+
+    // Translate HAL format + data space to public format
+    if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+        format64 = 0x100; // JPEG
+    } else if (format == HAL_PIXEL_FORMAT_BLOB
+            && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+        format64 = 0x48454946; // HEIC
+    } else if (format == HAL_PIXEL_FORMAT_BLOB
+            && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+        format64 = 0x69656963; // DEPTH_JPEG
+    } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // DEPTH_POINT_CLOUD, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // DEPTH16, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // RAW_DEPTH, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // RAW_DEPTH10, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+            static_cast<android_dataspace>(
+                ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+        format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+    }
+
+    camera_metadata_ro_entry_t entry =
+            staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+    for (size_t i = 0; i < entry.count; i += 3) {
+        bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+        bool isDynamicProfileCompatible =
+                (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+        if (colorSpace64 == entry.data.i64[i]
+                && isFormatCompatible
+                && isDynamicProfileCompatible) {
+            return true;
+        }
+    }
+
+    ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+            " combination not found", colorSpace, format64, dynamicRangeProfile);
+    return false;
+}
+
 bool isPublicFormat(int32_t format)
 {
     switch(format) {
@@ -336,7 +405,8 @@
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
-        int64_t streamUseCase, int timestampBase, int mirrorMode) {
+        int64_t streamUseCase, int timestampBase, int mirrorMode,
+        int32_t colorSpace) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -443,13 +513,23 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
-            !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+            !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
         String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
                 logicalCameraId.string(), format, dynamicRangeProfile);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
+    if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+            SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+            !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+                    dynamicRangeProfile, physicalCameraMetadata)) {
+        String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
+                "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+                logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
             physicalCameraMetadata)) {
         String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
@@ -483,6 +563,7 @@
         streamInfo.streamUseCase = streamUseCase;
         streamInfo.timestampBase = timestampBase;
         streamInfo.mirrorMode = mirrorMode;
+        streamInfo.colorSpace = colorSpace;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
@@ -538,6 +619,7 @@
     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
     stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
     stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+    stream->colorSpace = streamInfo.colorSpace;
     stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
     stream->id = -1; // Invalid stream id
     stream->physicalCameraId = std::string(physicalId.string());
@@ -635,6 +717,7 @@
         String8 physicalCameraId = String8(it.getPhysicalCameraId());
 
         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+        int32_t colorSpace = it.getColorSpace();
         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
                 overrideForPerfClass);
@@ -693,7 +776,7 @@
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
                     logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                    streamUseCase, timestampBase, mirrorMode);
+                    streamUseCase, timestampBase, mirrorMode, colorSpace);
 
             if (!res.isOk())
                 return res;
@@ -703,7 +786,9 @@
                         camera3::DepthCompositeStream::isDepthCompositeStream(surface);
                 bool isHeicCompositeStream =
                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                if (isDepthCompositeStream || isHeicCompositeStream) {
+                bool isJpegRCompositeStream =
+                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface);
+                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
                     // We need to take in to account that composite streams can have
                     // additional internal camera streams.
                     std::vector<OutputStreamInfo> compositeStreams;
@@ -711,10 +796,14 @@
                       // TODO: Take care of composite streams.
                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
                                 deviceInfo, &compositeStreams);
-                    } else {
+                    } else if (isHeicCompositeStream) {
                         ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
                             deviceInfo, &compositeStreams);
+                    } else {
+                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                            deviceInfo, &compositeStreams);
                     }
+
                     if (ret != OK) {
                         String8 msg = String8::format(
                                 "Camera %s: Failed adding composite streams: %s (%d)",
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index a127c7b..b5654ac 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -98,10 +98,11 @@
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
-        int64_t streamUseCase, int timestampBase, int mirrorMode);
+        int64_t streamUseCase, int timestampBase, int mirrorMode,
+        int32_t colorSpace);
 
 //check if format is 10-bit output compatible
-bool is10bitCompatibleFormat(int32_t format);
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
 
 // check if the dynamic range requires 10-bit output
 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
@@ -109,6 +110,11 @@
 // Check if the device supports a given dynamicRangeProfile
 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
 
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+        int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
 bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
 
 void mapStreamInfo(const OutputStreamInfo &streamInfo,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 1efdc60..28a22e1 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,12 +49,22 @@
             return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
         case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
             return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION;
         case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
             return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
         case ANDROID_LENS_INTRINSIC_CALIBRATION:
             return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
         case ANDROID_LENS_DISTORTION:
             return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+        case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
+            return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
+        case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
+            return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
         default:
             ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
                     defaultTag);
@@ -93,4 +103,4 @@
 
 } // namespace SessionConfigurationUtils
 } // namespace camera3
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 11534bb..e8d3f6e 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -169,7 +169,7 @@
         "libmemunreachable",
         "libprotobuf-cpp-lite",
         "libstagefright_foundation",
-        "libstatslog",
+        "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
         "libutils",
@@ -177,6 +177,7 @@
     ],
 
     export_shared_lib_headers: [
+        "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
     ],
@@ -200,3 +201,33 @@
         "libaudioutils_headers",
     ],
 }
+
+cc_library {
+    name: "libstats_media_metrics",
+    generated_sources: ["stats_media_metrics.cpp"],
+    generated_headers: ["stats_media_metrics.h"],
+    export_generated_headers: ["stats_media_metrics.h"],
+    shared_libs: [
+        "libcutils",
+        "libstatspull",
+        "libstatssocket",
+    ],
+}
+
+genrule {
+    name: "stats_media_metrics.h",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --header $(genDir)/stats_media_metrics.h --module media_metrics --namespace android,stats,media_metrics",
+    out: [
+        "stats_media_metrics.h",
+    ],
+}
+
+genrule {
+    name: "stats_media_metrics.cpp",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --cpp $(genDir)/stats_media_metrics.cpp --module media_metrics --namespace android,stats,media_metrics --importHeader stats_media_metrics.h",
+    out: [
+        "stats_media_metrics.cpp",
+    ],
+}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index b03e418..7af6c41 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -24,7 +24,7 @@
 #include <aaudio/AAudio.h>        // error codes
 #include <audio_utils/clock.h>    // clock conversions
 #include <cutils/properties.h>
-#include <statslog.h>             // statsd
+#include <stats_media_metrics.h>             // statsd
 #include <system/audio.h>
 
 #include "AudioTypes.h"           // string to int conversions
@@ -269,6 +269,24 @@
     "enabled",
 };
 
+static constexpr const char * const MidiDeviceCloseFields[] {
+    "mediametrics_midi_device_close_reported",
+    "uid",
+    "midi_device_id",
+    "input_port_count",
+    "output_port_count",
+    "device_type",
+    "is_shared",
+    "supports_ump",
+    "using_alsa",
+    "duration_ns",
+    "opened_count",
+    "closed_count",
+    "device_disconnected",
+    "total_input_bytes",
+    "total_output_bytes",
+};
+
 /**
  * printFields is a helper method that prints the fields and corresponding values
  * in a human readable style.
@@ -292,7 +310,7 @@
     int result = 0;
 
 #ifdef STATSD_ENABLE
-    result = android::util::stats_write(args...);
+    result = stats::media_metrics::stats_write(args...);
 #endif
     return result;
 }
@@ -308,7 +326,7 @@
     std::stringstream ss;
 
 #ifdef STATSD_ENABLE
-    result = android::util::stats_write(args...);
+    result = stats::media_metrics::stats_write(args...);
     ss << "result:" << result;
 #endif
     ss << " { ";
@@ -497,6 +515,15 @@
             [this](const std::shared_ptr<const android::mediametrics::Item> &item){
                 mSpatializer.onEvent(item);
             }));
+
+    // Handle MIDI
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_MIDI "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mMidiLogging.onEvent(item);
+            }));
 }
 
 AudioAnalytics::~AudioAnalytics()
@@ -607,7 +634,7 @@
         const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
 
         // currently we only send create status events.
-        const int32_t event = android::util::
+        const int32_t event = stats::media_metrics::
                 MEDIAMETRICS_AUDIO_RECORD_STATUS_REPORTED__EVENT__AUDIO_RECORD_EVENT_CREATE;
 
         // The following fields should all be present in a create event.
@@ -647,7 +674,7 @@
                 __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SAMPLERATE);
 
         const auto [ result, str ] = sendToStatsd(AudioRecordStatusFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
                 , atom_status
                 , message.c_str()
                 , subCode
@@ -661,7 +688,7 @@
                 , sampleRate
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mStatsdLog->log(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
+        mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
         return true;
     }
     return false;
@@ -679,7 +706,7 @@
         const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
 
         // currently we only send create status events.
-        const int32_t event = android::util::
+        const int32_t event = stats::media_metrics::
                 MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__EVENT__AUDIO_TRACK_EVENT_CREATE;
 
         // The following fields should all be present in a create event.
@@ -734,7 +761,7 @@
                 __func__,
                 AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
         const auto [ result, str ] = sendToStatsd(AudioTrackStatusFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
                 , atom_status
                 , message.c_str()
                 , subCode
@@ -751,7 +778,7 @@
                 , (float)pitch
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mStatsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
+        mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
         return true;
     }
     return false;
@@ -860,7 +887,7 @@
         if (clientCalled  // only log if client app called AudioRecord.
                 && mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
                     , ENUM_EXTRACT(inputDeviceStatsd)
                     , inputDeviceNames.c_str()
                     , deviceTimeNs
@@ -878,7 +905,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case THREAD: {
@@ -930,7 +957,7 @@
               << ")";
         if (mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioThreadDeviceUsageFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
                 , ENUM_EXTRACT(deviceStatsd)
                 , deviceNames.c_str()
                 , deviceTimeNs
@@ -944,7 +971,7 @@
             );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case TRACK: {
@@ -1050,7 +1077,7 @@
         if (clientCalled // only log if client app called AudioTracks
                 && mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioTrackDeviceUsageFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
                     , ENUM_EXTRACT(outputDeviceStatsd)
                     , outputDeviceNames.c_str()
                     , deviceTimeNs
@@ -1074,7 +1101,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
         }
         } break;
     }
@@ -1136,7 +1163,7 @@
             const long_enum_type_t inputDeviceBits{};
 
             const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
                     , ENUM_EXTRACT(outputDeviceBits)
                     , mA2dpDeviceName.c_str()
@@ -1146,7 +1173,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
     }
 }
@@ -1190,7 +1217,7 @@
                 << " deviceName:" << mA2dpDeviceName;
         if (mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
                     , ENUM_EXTRACT(outputDeviceBits)
                     , mA2dpDeviceName.c_str()
@@ -1200,7 +1227,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
         return;
     }
@@ -1217,7 +1244,7 @@
             << " deviceName:" << mA2dpDeviceName;
     if (mAudioAnalytics.mDeliverStatistics) {
         const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                 , ENUM_EXTRACT(inputDeviceBits)
                 , ENUM_EXTRACT(outputDeviceBits)
                 , mA2dpDeviceName.c_str()
@@ -1227,7 +1254,7 @@
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
         mAudioAnalytics.mStatsdLog->log(
-                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+                stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
     }
 }
 
@@ -1355,10 +1382,10 @@
                     << "(" << sharingModeRequestedStr << ")";
 
     if (mAudioAnalytics.mDeliverStatistics) {
-        android::util::BytesField bf_serialized(
+        const stats::media_metrics::BytesField bf_serialized(
             serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
         const auto result = sendToStatsd(
-                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
                 , path
                 , direction
                 , framesPerBurst
@@ -1381,7 +1408,7 @@
         std::stringstream ss;
         ss << "result:" << result;
         const auto fieldsStr = printFields(AAudioStreamFields,
-                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
                 , path
                 , direction
                 , framesPerBurst
@@ -1404,7 +1431,7 @@
         ss << " " << fieldsStr;
         std::string str = ss.str();
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
+        mAudioAnalytics.mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
     }
 }
 
@@ -1544,12 +1571,12 @@
 // Classifies the setting event for statsd (use generated statsd enums.proto constants).
 static int32_t classifySettingEvent(bool isSetAlready, bool withinBoot) {
     if (isSetAlready) {
-        return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
+        return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
     }
     if (withinBoot) {
-        return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
+        return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
     }
-    return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
+    return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
 }
 
 void AudioAnalytics::Spatializer::onEvent(
@@ -1598,7 +1625,7 @@
                 types::channelMaskVectorFromString(channelMasks);
 
         const auto [ result, str ] = sendToStatsd(SpatializerCapabilitiesFields,
-                CONDITION(android::util::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
                 , headTrackingModesVector
                 , levelsVector
                 , modesVector
@@ -1606,7 +1633,7 @@
                 );
 
         mAudioAnalytics.mStatsdLog->log(
-                android::util::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
+                stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
 
         std::lock_guard lg(mLock);
         if (mFirstCreateTimeNs == 0) {
@@ -1655,13 +1682,13 @@
                 deviceState.enabled = enabled;
                 const bool enabledStatsd = enabled == "true";
                 const auto [ result, str ] = sendToStatsd(SpatializerDeviceEnabledFields,
-                        CONDITION(android::util::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
+                        CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
                         , deviceTypeStatsd
                         , settingEventStatsd
                         , enabledStatsd
                         );
                 mAudioAnalytics.mStatsdLog->log(
-                        android::util::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
+                        stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
             }
         }
         if (!hasHeadTracker.empty()) {
@@ -1671,13 +1698,13 @@
                 deviceState.hasHeadTracker = hasHeadTracker;
                 const bool supportedStatsd = hasHeadTracker == "true";
                 const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceSupportedFields,
-                        CONDITION(android::util::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
+                        CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
                         , deviceTypeStatsd
                         , settingEventStatsd
                         , supportedStatsd
                         );
                 mAudioAnalytics.mStatsdLog->log(
-                        android::util::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
+                        stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
             }
         }
         if (!headTrackerEnabled.empty()) {
@@ -1687,13 +1714,13 @@
                 deviceState.headTrackerEnabled = headTrackerEnabled;
                 const bool enabledStatsd = headTrackerEnabled == "true";
                 const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceEnabledFields,
-                        CONDITION(android::util::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
+                        CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
                         , deviceTypeStatsd
                         , settingEventStatsd
                         , enabledStatsd
                         );
                 mAudioAnalytics.mStatsdLog->log(
-                        android::util::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
+                        stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
             }
         }
         mSimpleLog.log("%s deviceKey: %s item: %s",
@@ -1710,6 +1737,127 @@
     return { s, n };
 }
 
+void AudioAnalytics::MidiLogging::onEvent(
+        const std::shared_ptr<const android::mediametrics::Item> &item) const {
+    const std::string& key = item->getKey();
+
+    const auto uid = item->getUid();
+
+    int32_t deviceId = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DEVICEID, &deviceId);
+
+    int32_t inputPortCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_INPUTPORTCOUNT, &inputPortCount);
+
+    int32_t outputPortCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_OUTPUTPORTCOUNT, &outputPortCount);
+
+    int32_t hardwareType = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_HARDWARETYPE, &hardwareType);
+
+    std::string isSharedString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ISSHARED, &isSharedString);
+    const bool isShared = (isSharedString == "true");
+
+    std::string supportsMidiUmpString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SUPPORTSMIDIUMP, &supportsMidiUmpString);
+    const bool supportsMidiUmp = (supportsMidiUmpString == "true");
+
+    std::string usingAlsaString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_USINGALSA, &usingAlsaString);
+    const bool usingAlsa = (usingAlsaString == "true");
+
+    int64_t durationNs = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DURATIONNS, &durationNs);
+
+    int32_t openedCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_OPENEDCOUNT, &openedCount);
+
+    int32_t closedCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_CLOSEDCOUNT, &closedCount);
+
+    std::string deviceDisconnectedString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DEVICEDISCONNECTED, &deviceDisconnectedString);
+    const bool deviceDisconnected = (deviceDisconnectedString == "true");
+
+    int32_t totalInputBytes = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_TOTALINPUTBYTES, &totalInputBytes);
+
+    int32_t totalOutputBytes = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES, &totalOutputBytes);
+
+    LOG(LOG_LEVEL) << "key:" << key
+            << " uid:" << uid
+            << " id:" << deviceId
+            << " input_port_count:" << inputPortCount
+            << " output_port_count:" << outputPortCount
+            << " device_type:" << hardwareType
+            << " is_shared:" << isSharedString
+            << " supports_ump:" << supportsMidiUmpString
+            << " using_alsa:" << usingAlsaString
+            << " duration_opened_ms:" << durationNs
+            << " opened_count:" << openedCount
+            << " closed_count:" << closedCount
+            << " device_disconnected:" << deviceDisconnectedString
+            << " total_input_bytes:" << totalInputBytes
+            << " total_output_bytes:" << totalOutputBytes;
+
+    if (mAudioAnalytics.mDeliverStatistics) {
+        const auto result = sendToStatsd(
+                CONDITION(stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED)
+                , uid
+                , deviceId
+                , inputPortCount
+                , outputPortCount
+                , hardwareType
+                , isShared
+                , supportsMidiUmp
+                , usingAlsa
+                , durationNs
+                , openedCount
+                , closedCount
+                , deviceDisconnected
+                , totalInputBytes
+                , totalOutputBytes);
+        std::stringstream ss;
+        ss << "result:" << result;
+        const auto fieldsStr = printFields(MidiDeviceCloseFields,
+                CONDITION(stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED)
+                , uid
+                , deviceId
+                , inputPortCount
+                , outputPortCount
+                , hardwareType
+                , isShared
+                , supportsMidiUmp
+                , usingAlsa
+                , durationNs
+                , openedCount
+                , closedCount
+                , deviceDisconnected
+                , totalInputBytes
+                , totalOutputBytes);
+        ss << " " << fieldsStr;
+        std::string str = ss.str();
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mAudioAnalytics.mStatsdLog->log(
+                stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED, str);
+    }
+}
+
 // This method currently suppresses the name.
 std::string AudioAnalytics::getDeviceNamesFromOutputDevices(std::string_view devices) const {
     std::string deviceNames;
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 5787e9e..630a436 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -26,7 +26,7 @@
 #include <string>
 #include <audio_utils/clock.h>
 #include <cutils/properties.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
 #include <sys/timerfd.h>
 #include <system/audio.h>
 
@@ -164,7 +164,7 @@
     const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
     const int32_t min_volume_duration_secs = (int32_t)(min_volume_duration_ns / NANOS_PER_SECOND);
     const int32_t max_volume_duration_secs = (int32_t)(max_volume_duration_ns / NANOS_PER_SECOND);
-    const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+    const int result = stats::media_metrics::stats_write(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED,
                                          audio_device,
                                          duration_secs,
                                          (float)volume,
@@ -177,7 +177,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audio_power_usage_data_reported:"
-            << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+            << stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED
             << " audio_device:" << audio_device
             << " duration_secs:" << duration_secs
             << " average_volume:" << (float)volume
@@ -187,7 +187,7 @@
             << " max_volume_duration_secs:" << max_volume_duration_secs
             << " max_volume:" << (float)max_volume
             << " }";
-    mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
+    mStatsdLog->log(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
 void AudioPowerUsage::updateMinMaxVolumeAndDuration(
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index d2b4aab..353ae12 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -18,7 +18,7 @@
 #include "MediaMetricsConstants.h"
 #include "StringUtils.h"
 #include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
-#include <statslog.h>            // statsd
+#include <stats_media_metrics.h>            // statsd
 
 namespace android::mediametrics::types {
 
@@ -184,41 +184,41 @@
 const std::unordered_map<std::string, int32_t>& getAudioDeviceInfoTypeMap() {
     // DO NOT MODIFY VALUES (OK to add new ones).
     static std::unordered_map<std::string, int32_t> map{
-        {"unknown", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
-        {"earpiece", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
-        {"speaker", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
-        {"headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
-        {"headphone", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
-        {"bt_sco", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
-        {"bt_sco_hs", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
-        {"bt_sco_carkit", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
-        {"bt_a2dp", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
-        {"bt_a2dp_hp", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
-        {"bt_a2dp_spk", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
-        {"aux_digital", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
-        {"hdmi", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
-        {"analog_dock", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
-        {"digital_dock", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
-        {"usb_accessory", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
-        {"usb_device", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
-        {"usb_headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
-        {"remote_submix", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
-        {"telephony_tx", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
-        {"line", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
-        {"hdmi_arc", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
-        {"hdmi_earc", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
-        {"spdif", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
-        {"fm_transmitter", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
-        {"aux_line", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
-        {"speaker_safe", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
-        {"ip", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
-        {"bus", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
-        {"proxy", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
-        {"hearing_aid_out", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
-        {"echo_canceller", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
-        {"ble_headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
-        {"ble_speaker", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
-        {"ble_broadcast", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
+        {"unknown", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
+        {"earpiece", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
+        {"speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
+        {"headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
+        {"headphone", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
+        {"bt_sco", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+        {"bt_sco_hs", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+        {"bt_sco_carkit", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+        {"bt_a2dp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+        {"bt_a2dp_hp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+        {"bt_a2dp_spk", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+        {"aux_digital", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+        {"hdmi", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+        {"analog_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+        {"digital_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+        {"usb_accessory", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
+        {"usb_device", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
+        {"usb_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
+        {"remote_submix", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
+        {"telephony_tx", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
+        {"line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
+        {"hdmi_arc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
+        {"hdmi_earc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
+        {"spdif", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
+        {"fm_transmitter", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
+        {"aux_line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
+        {"speaker_safe", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
+        {"ip", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
+        {"bus", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
+        {"proxy", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
+        {"hearing_aid_out", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
+        {"echo_canceller", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
+        {"ble_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
+        {"ble_speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
+        {"ble_broadcast", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
     };
     return map;
 }
@@ -324,23 +324,23 @@
     // DO NOT MODIFY VALUES(OK to add new ones).
     static std::unordered_map<std::string, int32_t> map {
         {"",
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_OK,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_ARGUMENT,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_IO,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_MEMORY,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_SECURITY,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_STATE,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_TIMEOUT,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_UNKNOWN,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
     };
     return map;
 }
@@ -664,7 +664,7 @@
     auto& map = getStatusMap();
     auto it = map.find(status);
     if (it == map.end()) {
-        return util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
+        return stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
     }
     return it->second;
 }
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index ceb3e6a..b4de4f4 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -33,7 +33,7 @@
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <memunreachable/memunreachable.h>
 #include <private/android_filesystem_config.h> // UID
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include <set>
 
@@ -546,7 +546,7 @@
     if (mStatsdRegistered.test_and_set()) {
         return;
     }
-    auto tag = android::util::MEDIA_DRM_ACTIVITY_INFO;
+    auto tag = stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO;
     auto cb = MediaMetricsService::pullAtomCallback;
     AStatsManager_setPullAtomCallback(tag, /* metadata */ nullptr, cb, this);
 }
@@ -564,7 +564,7 @@
 std::string MediaMetricsService::atomTagToKey(int32_t atomTag)
 {
     switch (atomTag) {
-    case android::util::MEDIA_DRM_ACTIVITY_INFO:
+    case stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO:
         return "mediadrm";
     }
     return {};
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 84d494e..8b33f10 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -51,7 +51,7 @@
         "libprotobuf-cpp-lite",
         "libstagefright",
         "libstagefright_foundation",
-        "libstatslog",
+        "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
         "libutils",
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 776f878..8a48ce5 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -37,8 +37,6 @@
 #include "MediaMetricsService.h"
 #include "iface_statsd.h"
 
-#include <statslog.h>
-
 namespace android {
 
 // set of routines that crack a mediametrics::Item
@@ -89,6 +87,9 @@
         { "nuplayer", statsd_nuplayer },
         { "nuplayer2", statsd_nuplayer },
         { "recorder", statsd_recorder },
+        { "media_drm_created", statsd_media_drm_created },
+        { "media_drm_session_opened", statsd_media_drm_session_opened },
+        { "media_drm_errored", statsd_media_drm_errored },
     };
     return dump2StatsdInternal(statsd_pushers, item, statsdLog);
 }
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index 82e928e..f0a4ac8 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -363,6 +363,20 @@
         SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
     } mSpatializer{*this};
 
+    // MidiLogging collects info whenever a MIDI device is closed.
+    class MidiLogging {
+    public:
+        explicit MidiLogging(AudioAnalytics &audioAnalytics)
+            : mAudioAnalytics(audioAnalytics) {}
+
+        void onEvent(
+                const std::shared_ptr<const android::mediametrics::Item> &item) const;
+
+    private:
+
+        AudioAnalytics &mAudioAnalytics;
+    } mMidiLogging{*this};
+
     AudioPowerUsage mAudioPowerUsage;
 };
 
diff --git a/services/mediametrics/include/mediametricsservice/iface_statsd.h b/services/mediametrics/include/mediametricsservice/iface_statsd.h
index c2a8b3c..a97a386 100644
--- a/services/mediametrics/include/mediametricsservice/iface_statsd.h
+++ b/services/mediametrics/include/mediametricsservice/iface_statsd.h
@@ -32,6 +32,9 @@
 extern statsd_pusher statsd_codec;
 extern statsd_pusher statsd_extractor;
 extern statsd_pusher statsd_mediaparser;
+extern statsd_pusher statsd_media_drm_created;
+extern statsd_pusher statsd_media_drm_session_opened;
+extern statsd_pusher statsd_media_drm_errored;
 
 extern statsd_pusher statsd_nuplayer;
 extern statsd_pusher statsd_recorder;
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 3d9376e..9a9bc1d 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -107,15 +107,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiopolicy_reported:"
-            << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -131,7 +132,7 @@
             << " active_session:" << active_session
             << " active_device:" << active_device
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 01adf7f..63c61ec 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -147,8 +147,9 @@
     (void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
     const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized,
@@ -156,7 +157,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiorecord_reported:"
-            << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -181,7 +182,7 @@
 
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index e9b6dd6..3056605 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -188,15 +188,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiothread_reported:"
-            << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -231,7 +232,7 @@
             << " latency_mean_millis:" << latency_mean_millis
             << " latency_stddev_millis:" << latency_stddev_millis
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 67514e9..1fc7fb4 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -134,8 +134,9 @@
     (void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
     const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+                               stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED,
                                timestamp_nanos, package_name.c_str(), package_version_code,
                                media_apex_version,
                                bf_serialized,
@@ -143,7 +144,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiotrack_reported:"
-            << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -164,7 +165,7 @@
 
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index a737ba0..c5957e9 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 #include <stats_event.h>
 
 #include "cleaner.h"
@@ -46,7 +46,7 @@
     if (item == nullptr) return false;
 
     AStatsEvent* event = AStatsEvent_obtain();
-    AStatsEvent_setAtomId(event, android::util::MEDIA_CODEC_REPORTED);
+    AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
 
     const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
     AStatsEvent_writeInt64(event, timestamp_nanos);
@@ -455,8 +455,8 @@
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
                                timestamp_nanos, package_name.c_str(), package_version_code,
                                media_apex_version,
                                bf_serialized);
@@ -464,7 +464,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_codec_reported:"
-            << android::util::MEDIAMETRICS_CODEC_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -525,7 +525,7 @@
             << " original_qp_b_min:" << qpBMinOri
             << " original_qp_b_max:" << qpBMaxOri
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED, log.str());
 
 
     return true;
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index e06a605..1008531 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -35,7 +35,7 @@
 #include "StringUtils.h"
 #include "iface_statsd.h"
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include <array>
 #include <string>
@@ -69,8 +69,9 @@
     // This field is left here for backward compatibility.
     // This field is not used anymore.
     const std::string  kUnusedField("");
-    android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         vendor.c_str(),
@@ -80,7 +81,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_mediadrm_reported:"
-            << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -90,7 +91,7 @@
             << " description:" << description
             // omitting serialized
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
     return true;
 }
 
@@ -122,7 +123,8 @@
         item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
     }
 
-    const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+    const int result = stats::media_metrics::stats_write(
+                               stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED,
                                timestamp_nanos, package_name.c_str(), package_version_code,
                                media_apex_version,
                                plugin_id.c_str(), description.c_str(),
@@ -136,7 +138,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_drmmanager_reported:"
-            << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -151,7 +153,7 @@
         log << " method_" << i << ":" << methodCounts[i];
     }
     log << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
     return true;
 }
 
@@ -207,7 +209,7 @@
 
     // Memory for |event| is internally managed by statsd.
     AStatsEvent* event = AStatsEventList_addStatsEvent(out);
-    AStatsEvent_setAtomId(event, android::util::MEDIA_DRM_ACTIVITY_INFO);
+    AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO);
     AStatsEvent_writeString(event, item->getPkgName().c_str());
     AStatsEvent_writeInt64(event, item->getPkgVersionCode());
     AStatsEvent_writeString(event, vendor.c_str());
@@ -219,7 +221,7 @@
     std::stringstream log;
     log << "pulled:" << " {"
             << " media_drm_activity_info:"
-            << android::util::MEDIA_DRM_ACTIVITY_INFO
+            << stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO
             << " package_name:" << item->getPkgName()
             << " package_version_code:" << item->getPkgVersionCode()
             << " vendor:" << vendor
@@ -227,7 +229,115 @@
             << " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
             << " vendor_metrics:" <<  mediametrics::stringutils::bytesToString(plugin_raw, 8)
             << " }";
-    statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO, log.str());
+    return true;
+}
+
+bool statsd_media_drm_created(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    int64_t uuid_lsb = -1;
+    if (!item->getInt64("uuid_lsb", &uuid_lsb)) return false;
+    int64_t uuid_msb = -1;
+    if (!item->getInt64("uuid_msb", &uuid_msb)) return false;
+    int64_t object_nonce_lsb = -1;
+    if (!item->getInt64("object_nonce_lsb", &object_nonce_lsb)) return false;
+    int64_t object_nonce_msb = -1;
+    if (!item->getInt64("object_nonce_msb", &object_nonce_msb)) return false;
+    int64_t apex_version = -1;
+    item->getInt64("apex_version", &apex_version);
+    const int result = stats_write(
+                stats::media_metrics::MEDIA_DRM_CREATED,
+                uuid_lsb, uuid_msb, object_nonce_lsb,
+                object_nonce_msb, apex_version);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " media_drm_created:"
+            << stats::media_metrics::MEDIA_DRM_CREATED
+            << " uuid_lsb:" << uuid_lsb
+            << " uuid_msb:" << uuid_msb
+            << " object_nonce_lsb:" << object_nonce_lsb
+            << " object_nonce_msb:" << object_nonce_msb
+            << " apex_version:" << apex_version
+            << " }";
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_CREATED, log.str());
+    return true;
+}
+
+bool statsd_media_drm_session_opened(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    int64_t object_nonce_lsb = -1;
+    if (!item->getInt64("object_nonce_lsb", &object_nonce_lsb)) return false;
+    int64_t object_nonce_msb = -1;
+    if (!item->getInt64("object_nonce_msb", &object_nonce_msb)) return false;
+    int64_t session_nonce_lsb = -1;
+    if (!item->getInt64("session_nonce_lsb", &session_nonce_lsb)) return false;
+    int64_t session_nonce_msb = -1;
+    if (!item->getInt64("session_nonce_msb", &session_nonce_msb)) return false;
+    int32_t requested_security_level = -1;
+    if (!item->getInt32("requested_security_level", &requested_security_level)) return false;
+    int32_t opened_security_level = -1;
+    if (!item->getInt32("opened_security_level", &opened_security_level)) return false;
+    const int result = stats_write(
+                stats::media_metrics::MEDIA_DRM_SESSION_OPENED, object_nonce_lsb,
+                object_nonce_msb, session_nonce_lsb, session_nonce_msb,
+                requested_security_level, opened_security_level);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " media_drm_session_opened:"
+            << stats::media_metrics::MEDIA_DRM_SESSION_OPENED
+            << " object_nonce_lsb:" << object_nonce_lsb
+            << " object_nonce_msb:" << object_nonce_msb
+            << " session_nonce_lsb:" << session_nonce_lsb
+            << " session_nonce_msb:" << session_nonce_msb
+            << " requested_security_level:" << requested_security_level
+            << " opened_security_level:" << opened_security_level
+            << " }";
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_SESSION_OPENED, log.str());
+    return true;
+}
+
+bool statsd_media_drm_errored(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    int64_t object_nonce_lsb = -1;
+    if (!item->getInt64("object_nonce_lsb", &object_nonce_lsb)) return false;
+    int64_t object_nonce_msb = -1;
+    if (!item->getInt64("object_nonce_msb", &object_nonce_msb)) return false;
+    int64_t session_nonce_lsb = 0;
+    item->getInt64("session_nonce_lsb", &session_nonce_lsb);
+    int64_t session_nonce_msb = 0;
+    item->getInt64("session_nonce_msb", &session_nonce_msb);
+    int32_t api = -1;
+    if (!item->getInt32("api", &api)) return false;
+    int32_t error_code = -1;
+    if (!item->getInt32("error_code", &error_code)) return false;
+    int32_t cdm_err = 0;
+    item->getInt32("cdm_err", &cdm_err);
+    int32_t oem_err = 0;
+    item->getInt32("oem_err", &oem_err);
+    const int result = stats_write(
+                stats::media_metrics::MEDIA_DRM_ERRORED, object_nonce_lsb,
+                object_nonce_msb, session_nonce_lsb, session_nonce_msb,
+                api, error_code, cdm_err, oem_err);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " media_drm_errored:"
+            << stats::media_metrics::MEDIA_DRM_ERRORED
+            << " object_nonce_lsb:" << object_nonce_lsb
+            << " object_nonce_msb:" << object_nonce_msb
+            << " session_nonce_lsb:" << session_nonce_lsb
+            << " session_nonce_msb:" << session_nonce_msb
+            << " api:" << api
+            << " error_code:" << error_code
+            << " cdm_err:" << cdm_err
+            << " oem_err:" << oem_err
+            << " }";
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_ERRORED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index a8bfeaa..9345df6 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -96,15 +96,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_extractor_reported:"
-            << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -116,7 +117,7 @@
             << " entry_point:" << entry_point_string << "(" << entry_point << ")"
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 67ca874b..458bd32 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -28,7 +28,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -83,7 +83,8 @@
     item->getString("android.media.mediaparser.logSessionId", &logSessionId);
     logSessionId = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+    const int result = stats::media_metrics::stats_write(
+                               stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED,
                                timestamp_nanos,
                                package_name.c_str(),
                                package_version_code,
@@ -103,7 +104,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_mediaparser_reported:"
-            << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -120,7 +121,7 @@
             << " video_height:" << videoHeight
             << " log_session_id:" << logSessionId
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index bdee1f2..fd545f4 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -153,8 +153,9 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
@@ -162,7 +163,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_nuplayer_reported:"
-            << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -193,7 +194,7 @@
             // TODO NuPlayer - add log_session_id
             // << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 5f54a68..efa284b 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -179,15 +179,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_recorder_reported:"
-            << android::util::MEDIAMETRICS_RECORDER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -218,7 +219,7 @@
             << " iframe_interval:" << iframe_interval
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 4d18876..adf0a5e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -26,8 +26,9 @@
 #include <cutils/sched_policy.h>
 #include <dirent.h>
 #include <media/MediaResourcePolicy.h>
-#include <media/stagefright/ProcessInfo.h>
+#include <media/stagefright/foundation/ABase.h>
 #include <mediautils/BatteryNotifier.h>
+#include <mediautils/ProcessInfo.h>
 #include <mediautils/SchedulingPolicyService.h>
 #include <string.h>
 #include <sys/types.h>
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 81c85e5..08ac90e 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -40,6 +40,7 @@
         "libbinder",
         "libbinder_ndk",
         "libmedia",
+        "libmediautils",
         "libutils",
     ],
     fuzz_config: {
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index e4aaea0..7003dcb 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -22,8 +22,8 @@
 #include <aidl/android/media/BnResourceManagerClient.h>
 #include <media/MediaResource.h>
 #include <media/MediaResourcePolicy.h>
-#include <media/stagefright/ProcessInfoInterface.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <mediautils/ProcessInfoInterface.h>
 #include "ResourceManagerService.h"
 #include "fuzzer/FuzzedDataProvider.h"
 
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 618626f..0366d9b 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -18,6 +18,7 @@
         "libbinder_ndk",
         "liblog",
         "libmedia",
+        "libmediautils",
         "libutils",
     ],
     include_dirs: [
@@ -63,6 +64,7 @@
         "libbinder_ndk",
         "liblog",
         "libmedia",
+        "libmediautils",
         "libutils",
     ],
     include_dirs: [
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 5bf44ce..7bd9484 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -21,7 +21,7 @@
 #include <media/MediaResource.h>
 #include <media/MediaResourcePolicy.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
+#include <mediautils/ProcessInfoInterface.h>
 
 namespace android {
 
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index b55b601..fd546f6 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -69,6 +69,10 @@
     result << "    Reference Count:      " << mOpenCount << "\n";
     result << "    Session Id:           " << getSessionId() << "\n";
     result << "    Privacy Sensitive:    " << isPrivacySensitive() << "\n";
+    result << "    Hardware Channel Count:" << getHardwareSamplesPerFrame() << "\n";
+    result << "    Hardware Format:      " << getHardwareFormat() << " ("
+                                           << audio_format_to_string(getHardwareFormat()) << ")\n";
+    result << "    Hardware Sample Rate: " << getHardwareSampleRate() << "\n";
     result << "    Connected:            " << mConnected.load() << "\n";
     result << "    Registered Streams:" << "\n";
     result << AAudioServiceStreamShared::dumpHeader() << "\n";
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index ea817ab..3d237b3 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -22,6 +22,7 @@
 #include <assert.h>
 #include <map>
 #include <mutex>
+#include <set>
 #include <sstream>
 #include <thread>
 #include <utils/Singleton.h>
@@ -68,6 +69,24 @@
     return result.str();
 }
 
+namespace {
+
+const static std::map<audio_format_t, audio_format_t> NEXT_FORMAT_TO_TRY = {
+        {AUDIO_FORMAT_PCM_FLOAT,         AUDIO_FORMAT_PCM_32_BIT},
+        {AUDIO_FORMAT_PCM_32_BIT,        AUDIO_FORMAT_PCM_24_BIT_PACKED},
+        {AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT}
+};
+
+audio_format_t getNextFormatToTry(audio_format_t curFormat, audio_format_t returnedFromAPM) {
+    if (returnedFromAPM != AUDIO_FORMAT_DEFAULT) {
+        return returnedFromAPM;
+    }
+    const auto it = NEXT_FORMAT_TO_TRY.find(curFormat);
+    return it != NEXT_FORMAT_TO_TRY.end() ? it->second : AUDIO_FORMAT_DEFAULT;
+}
+
+}
+
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAUDIO_OK;
     copyFrom(request.getConstantConfiguration());
@@ -81,36 +100,38 @@
         legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     audio_format_t audioFormat = getFormat();
+    std::set<audio_format_t> formatsTried;
+    while (true) {
+        if (formatsTried.find(audioFormat) != formatsTried.end()) {
+            // APM returning something that has already tried.
+            ALOGW("Have already tried to open #x, but failed before");
+            break;
+        }
+        formatsTried.insert(audioFormat);
 
-    result = openWithFormat(audioFormat);
-    if (result == AAUDIO_OK) return result;
+        audio_format_t nextFormatToTry = AUDIO_FORMAT_DEFAULT;
+        result = openWithFormat(audioFormat, &nextFormatToTry);
+        if (result == AAUDIO_OK || result != AAUDIO_ERROR_UNAVAILABLE) {
+            // Return if it is successful or there is an error that is not
+            // AAUDIO_ERROR_UNAVAILABLE happens.
+            ALOGI("Opened format=%#x with result=%d", audioFormat, result);
+            break;
+        }
 
-    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
-        ALOGD("%s() FLOAT failed, perhaps due to format. Try again with 32_BIT", __func__);
-        audioFormat = AUDIO_FORMAT_PCM_32_BIT;
-        result = openWithFormat(audioFormat);
-    }
-    if (result == AAUDIO_OK) return result;
-
-    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_32_BIT) {
-        ALOGD("%s() 32_BIT failed, perhaps due to format. Try again with 24_BIT_PACKED", __func__);
-        audioFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
-        result = openWithFormat(audioFormat);
-    }
-    if (result == AAUDIO_OK) return result;
-
-    // TODO The HAL and AudioFlinger should be recommending a format if the open fails.
-    //      But that recommendation is not propagating back from the HAL.
-    //      So for now just try something very likely to work.
-    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_24_BIT_PACKED) {
-        ALOGD("%s() 24_BIT failed, perhaps due to format. Try again with 16_BIT", __func__);
-        audioFormat = AUDIO_FORMAT_PCM_16_BIT;
-        result = openWithFormat(audioFormat);
+        nextFormatToTry = getNextFormatToTry(audioFormat, nextFormatToTry);
+        ALOGD("%s() %#x failed, perhaps due to format. Try again with %#x",
+              __func__, audioFormat, nextFormatToTry);
+        audioFormat = nextFormatToTry;
+        if (audioFormat == AUDIO_FORMAT_DEFAULT) {
+            // Nothing else to try
+            break;
+        }
     }
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(audio_format_t audioFormat) {
+aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(
+        audio_format_t audioFormat, audio_format_t* nextFormatToTry) {
     aaudio_result_t result = AAUDIO_OK;
     audio_config_base_t config;
     audio_port_handle_t deviceId;
@@ -171,7 +192,11 @@
     if (status != OK) {
         // This can happen if the resource is busy or the config does
         // not match the hardware.
-        ALOGD("%s() - openMmapStream() returned status %d",  __func__, status);
+        ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
+              "channel_mask=%#x",
+              __func__, status, config.format, config.sample_rate, config.format);
+        *nextFormatToTry = config.format != audioFormat ? config.format
+                                                        : *nextFormatToTry;
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
@@ -206,6 +231,9 @@
 
     setFormat(config.format);
     setSampleRate(config.sample_rate);
+    setHardwareSampleRate(getSampleRate());
+    setHardwareFormat(getFormat());
+    setHardwareSamplesPerFrame(AAudioConvert_channelMaskToCount(getChannelMask()));
 
     // If the position is not updated while the timestamp is updated for more than a certain amount,
     // the timestamp reported from the HAL may not be accurate. Here, a timestamp grace period is
@@ -237,9 +265,6 @@
     if (mMmapStream != nullptr) {
         // Needs to be explicitly cleared or CTS will fail but it is not clear why.
         mMmapStream.clear();
-        // Apparently the above close is asynchronous. An attempt to open a new device
-        // right after a close can fail. Also some callbacks may still be in flight!
-        // FIXME Make closing synchronous.
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 }
@@ -371,12 +396,8 @@
     asyncTask.detach();
 }
 
-void AAudioServiceEndpointMMAP::onVolumeChanged(audio_channel_mask_t channels,
-                                              android::Vector<float> values) {
-    // TODO Do we really need a different volume for each channel?
-    // We get called with an array filled with a single value!
-    float volume = values[0];
-    ALOGD("%s() volume[0] = %f", __func__, volume);
+void AAudioServiceEndpointMMAP::onVolumeChanged(float volume) {
+    ALOGD("%s() volume = %f", __func__, volume);
     std::lock_guard<std::mutex> lock(mLockStreams);
     for(const auto& stream : mRegisteredStreams) {
         stream->onVolumeChanged(volume);
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 3e7f2c7..73e0f61 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -77,8 +77,7 @@
     // -------------- Callback functions for MmapStreamCallback ---------------------
     void onTearDown(audio_port_handle_t portHandle) override;
 
-    void onVolumeChanged(audio_channel_mask_t channels,
-                         android::Vector<float> values) override;
+    void onVolumeChanged(float volume) override;
 
     void onRoutingChanged(audio_port_handle_t portHandle) override;
     // ------------------------------------------------------------------------------
@@ -93,7 +92,7 @@
 
 private:
 
-    aaudio_result_t openWithFormat(audio_format_t audioFormat);
+    aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
 
     aaudio_result_t createMmapBuffer(android::base::unique_fd* fileDescriptor);
 
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index dd421fe..02202d8 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -82,6 +82,9 @@
     setDeviceId(mStreamInternal->getDeviceId());
     setSessionId(mStreamInternal->getSessionId());
     setFormat(AUDIO_FORMAT_PCM_FLOAT); // force for mixer
+    setHardwareSampleRate(mStreamInternal->getHardwareSampleRate());
+    setHardwareFormat(mStreamInternal->getHardwareFormat());
+    setHardwareSamplesPerFrame(mStreamInternal->getHardwareSamplesPerFrame());
     mFramesPerBurst = mStreamInternal->getFramesPerBurst();
 
     return result;
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index f4ee84f..35d712c 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -84,7 +84,8 @@
 
 std::string AAudioServiceStreamBase::dumpHeader() {
     return std::string(
-            "    T   Handle   UId   Port Run State Format Burst Chan Mask     Capacity");
+            "    T   Handle   UId   Port Run State Format Burst Chan Mask     Capacity"
+            " HwFormat HwChan HwRate");
 }
 
 std::string AAudioServiceStreamBase::dump() const {
@@ -101,6 +102,9 @@
     result << std::setw(5) << getSamplesPerFrame();
     result << std::setw(8) << std::hex << getChannelMask() << std::dec;
     result << std::setw(9) << getBufferCapacity();
+    result << std::setw(9) << getHardwareFormat();
+    result << std::setw(7) << getHardwareSamplesPerFrame();
+    result << std::setw(7) << getHardwareSampleRate();
 
     return result.str();
 }
@@ -278,7 +282,7 @@
     if (result != AAUDIO_OK) goto error;
 
     // This should happen at the end of the start.
-    sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
+    sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED, static_cast<int64_t>(mClientHandle));
     setState(AAUDIO_STREAM_STATE_STARTED);
 
     return result;
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 605ac01..91ae511 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -29,6 +29,9 @@
 
 cc_fuzz {
     name: "oboeservice_fuzzer",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
     srcs: [
         "oboeservice_fuzzer.cpp",
     ],
diff --git a/services/oboeservice/fuzzer/README.md b/services/oboeservice/fuzzer/README.md
index ae7af3eb..617822f 100644
--- a/services/oboeservice/fuzzer/README.md
+++ b/services/oboeservice/fuzzer/README.md
@@ -23,21 +23,24 @@
 12. InputPreset
 13. BufferCapacity
 
-| Parameter| Valid Input Values| Configured Value|
-|------------- |-------------| ----- |
-| `AAudioFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `UserId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
-| `ProcessId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
-| `InService`   | `bool` | Value obtained from FuzzedDataProvider |
-| `DeviceId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
-| `SampleRate`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
-| `ChannelMask` | `AAUDIO_UNSPECIFIED`, `AAUDIO_CHANNEL_INDEX_MASK_1`, `AAUDIO_CHANNEL_INDEX_MASK_2`, `AAUDIO_CHANNEL_INDEX_MASK_3`, `AAUDIO_CHANNEL_INDEX_MASK_4`, `AAUDIO_CHANNEL_INDEX_MASK_5`, `AAUDIO_CHANNEL_INDEX_MASK_6`, `AAUDIO_CHANNEL_INDEX_MASK_7`, `AAUDIO_CHANNEL_INDEX_MASK_8`, `AAUDIO_CHANNEL_INDEX_MASK_9`, `AAUDIO_CHANNEL_INDEX_MASK_10`, `AAUDIO_CHANNEL_INDEX_MASK_11`, `AAUDIO_CHANNEL_INDEX_MASK_12`, `AAUDIO_CHANNEL_INDEX_MASK_13`, `AAUDIO_CHANNEL_INDEX_MASK_14`, `AAUDIO_CHANNEL_INDEX_MASK_15`, `AAUDIO_CHANNEL_INDEX_MASK_16`, `AAUDIO_CHANNEL_INDEX_MASK_17`, `AAUDIO_CHANNEL_INDEX_MASK_18`, `AAUDIO_CHANNEL_INDEX_MASK_19`, `AAUDIO_CHANNEL_INDEX_MASK_20`, `AAUDIO_CHANNEL_INDEX_MASK_21`, `AAUDIO_CHANNEL_INDEX_MASK_22`, `AAUDIO_CHANNEL_INDEX_MASK_23`, `AAUDIO_CHANNEL_INDEX_MASK_24`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_FRONT_BACK`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_2POINT1`, `AAUDIO_CHANNEL_TRI`, `AAUDIO_CHANNEL_TRI_BACK`, `AAUDIO_CHANNEL_3POINT1`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_QUAD`, `AAUDIO_CHANNEL_QUAD_SIDE`, `AAUDIO_CHANNEL_SURROUND`, `AAUDIO_CHANNEL_PENTA`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_5POINT1_SIDE`, `AAUDIO_CHANNEL_5POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1POINT4`, `AAUDIO_CHANNEL_6POINT1`, `AAUDIO_CHANNEL_7POINT1`, `AAUDIO_CHANNEL_7POINT1POINT2`, `AAUDIO_CHANNEL_7POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT6` | Value obtained from FuzzedDataProvider |
-| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| Parameter                 | Valid Input Values| Configured Value|
+|---------------------------|-------------| ----- |
+| `Format`                  | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT`, `AAUDIO_FORMAT_IEC61937`, `AAUDIO_FORMAT_PCM_I24_PACKED`, `AAUDIO_FORMAT_PCM_I32` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `UserId`                  | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
+| `ProcessId`               | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
+| `InService`               | `bool` | Value obtained from FuzzedDataProvider |
+| `DeviceId`                | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SampleRate`              | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `ChannelMask`             | `AAUDIO_UNSPECIFIED`, `AAUDIO_CHANNEL_INDEX_MASK_1`, `AAUDIO_CHANNEL_INDEX_MASK_2`, `AAUDIO_CHANNEL_INDEX_MASK_3`, `AAUDIO_CHANNEL_INDEX_MASK_4`, `AAUDIO_CHANNEL_INDEX_MASK_5`, `AAUDIO_CHANNEL_INDEX_MASK_6`, `AAUDIO_CHANNEL_INDEX_MASK_7`, `AAUDIO_CHANNEL_INDEX_MASK_8`, `AAUDIO_CHANNEL_INDEX_MASK_9`, `AAUDIO_CHANNEL_INDEX_MASK_10`, `AAUDIO_CHANNEL_INDEX_MASK_11`, `AAUDIO_CHANNEL_INDEX_MASK_12`, `AAUDIO_CHANNEL_INDEX_MASK_13`, `AAUDIO_CHANNEL_INDEX_MASK_14`, `AAUDIO_CHANNEL_INDEX_MASK_15`, `AAUDIO_CHANNEL_INDEX_MASK_16`, `AAUDIO_CHANNEL_INDEX_MASK_17`, `AAUDIO_CHANNEL_INDEX_MASK_18`, `AAUDIO_CHANNEL_INDEX_MASK_19`, `AAUDIO_CHANNEL_INDEX_MASK_20`, `AAUDIO_CHANNEL_INDEX_MASK_21`, `AAUDIO_CHANNEL_INDEX_MASK_22`, `AAUDIO_CHANNEL_INDEX_MASK_23`, `AAUDIO_CHANNEL_INDEX_MASK_24`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_FRONT_BACK`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_2POINT1`, `AAUDIO_CHANNEL_TRI`, `AAUDIO_CHANNEL_TRI_BACK`, `AAUDIO_CHANNEL_3POINT1`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_QUAD`, `AAUDIO_CHANNEL_QUAD_SIDE`, `AAUDIO_CHANNEL_SURROUND`, `AAUDIO_CHANNEL_PENTA`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_5POINT1_SIDE`, `AAUDIO_CHANNEL_5POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1POINT4`, `AAUDIO_CHANNEL_6POINT1`, `AAUDIO_CHANNEL_7POINT1`, `AAUDIO_CHANNEL_7POINT1POINT2`, `AAUDIO_CHANNEL_7POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT6` | Value obtained from FuzzedDataProvider |
+| `Direction`               | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SharingMode`             | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `Usage`                   | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `ContentType`             | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `InputPreset`             | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `BufferCapacity`          | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareSampleRate`      | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareSamplesPerFrame` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareFormat`          | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT`, `AAUDIO_FORMAT_IEC61937`, `AAUDIO_FORMAT_PCM_I24_PACKED`, `AAUDIO_FORMAT_PCM_I32` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
 
 This also ensures that the plugin is always deterministic for any given input.
 
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index 5e48955..6dc6eff 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -34,6 +34,9 @@
     AAUDIO_FORMAT_UNSPECIFIED,
     AAUDIO_FORMAT_PCM_I16,
     AAUDIO_FORMAT_PCM_FLOAT,
+    AAUDIO_FORMAT_PCM_I24_PACKED,
+    AAUDIO_FORMAT_PCM_I32,
+    AAUDIO_FORMAT_IEC61937
 };
 
 aaudio_usage_t kAAudioUsages[] = {
@@ -400,6 +403,13 @@
 
     request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
 
+    request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setHardwareFormat((audio_format_t)(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
+
     aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
     if (stream < 0) {
         // invalid request, stream not opened.
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index 5c1dda1..0649061 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -15,9 +15,8 @@
     imports: [
         "android.hardware.common-V2",
         "android.hardware.common.fmq-V1",
-        "android.hardware.tv.tuner-V1",
+        "android.hardware.tv.tuner-V2",
     ],
-
     backend: {
         java: {
             enabled: false,
@@ -42,7 +41,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V1-ndk",
+        "android.hardware.tv.tuner-V2-ndk",
         "libbase",
         "libbinder",
         "libbinder_ndk",
@@ -85,7 +84,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V1-ndk",
+        "android.hardware.tv.tuner-V2-ndk",
         "libbase",
         "libbinder",
         "libfmq",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index a6f3a2c..92fa970 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -26,6 +26,7 @@
 #include <aidl/android/hardware/tv/tuner/Result.h>
 
 #include "TunerDvr.h"
+#include "TunerService.h"
 #include "TunerTimeFilter.h"
 
 using ::aidl::android::hardware::tv::tuner::IDvr;
@@ -41,23 +42,21 @@
 namespace tv {
 namespace tuner {
 
-TunerDemux::TunerDemux(shared_ptr<IDemux> demux, int id) {
+TunerDemux::TunerDemux(const shared_ptr<IDemux> demux, const int id,
+                       const shared_ptr<TunerService> tuner) {
     mDemux = demux;
     mDemuxId = id;
+    mTunerService = tuner;
 }
 
 TunerDemux::~TunerDemux() {
+    close();
     mDemux = nullptr;
+    mTunerService = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerDemux::setFrontendDataSource(
         const shared_ptr<ITunerFrontend>& in_frontend) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     int frontendId;
     in_frontend->getFrontendId(&frontendId);
 
@@ -65,43 +64,26 @@
 }
 
 ::ndk::ScopedAStatus TunerDemux::setFrontendDataSourceById(int frontendId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->setFrontendDataSource(frontendId);
 }
 
 ::ndk::ScopedAStatus TunerDemux::openFilter(const DemuxFilterType& in_type, int32_t in_bufferSize,
                                             const shared_ptr<ITunerFilterCallback>& in_cb,
                                             shared_ptr<ITunerFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> filter;
     shared_ptr<TunerFilter::FilterCallback> filterCb =
             ::ndk::SharedRefBase::make<TunerFilter::FilterCallback>(in_cb);
     shared_ptr<IFilterCallback> cb = filterCb;
     auto status = mDemux->openFilter(in_type, in_bufferSize, cb, &filter);
     if (status.isOk()) {
-        *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type);
+        *_aidl_return =
+                ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type, mTunerService);
     }
 
     return status;
 }
 
 ::ndk::ScopedAStatus TunerDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<ITimeFilter> filter;
     auto status = mDemux->openTimeFilter(&filter);
     if (status.isOk()) {
@@ -113,35 +95,17 @@
 
 ::ndk::ScopedAStatus TunerDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
                                                int32_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> halFilter = (static_cast<TunerFilter*>(tunerFilter.get()))->getHalFilter();
     return mDemux->getAvSyncHwId(halFilter, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->getAvSyncTime(avSyncHwId, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
                                          const shared_ptr<ITunerDvrCallback>& in_cb,
                                          shared_ptr<ITunerDvr>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IDvrCallback> callback = ::ndk::SharedRefBase::make<TunerDvr::DvrCallback>(in_cb);
     shared_ptr<IDvr> halDvr;
     auto res = mDemux->openDvr(in_dvbType, in_bufferSize, callback, &halDvr);
@@ -153,36 +117,15 @@
 }
 
 ::ndk::ScopedAStatus TunerDemux::connectCiCam(int32_t ciCamId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->connectCiCam(ciCamId);
 }
 
 ::ndk::ScopedAStatus TunerDemux::disconnectCiCam() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->disconnectCiCam();
 }
 
 ::ndk::ScopedAStatus TunerDemux::close() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mDemux->close();
-    mDemux = nullptr;
-
-    return res;
+    return mDemux->close();
 }
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index cdb3aa0..0c71987 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -32,10 +32,13 @@
 namespace tv {
 namespace tuner {
 
+class TunerService;
+
 class TunerDemux : public BnTunerDemux {
 
 public:
-    TunerDemux(shared_ptr<IDemux> demux, int demuxId);
+    TunerDemux(const shared_ptr<IDemux> demux, const int demuxId,
+               const shared_ptr<TunerService> tuner);
     virtual ~TunerDemux();
 
     ::ndk::ScopedAStatus setFrontendDataSource(
@@ -60,6 +63,7 @@
 private:
     shared_ptr<IDemux> mDemux;
     int mDemuxId;
+    shared_ptr<TunerService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index 70aee20..ffe0be9 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -41,38 +41,21 @@
 }
 
 TunerDescrambler::~TunerDescrambler() {
+    close();
     mDescrambler = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::setDemuxSource(
         const shared_ptr<ITunerDemux>& in_tunerDemux) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDescrambler->setDemuxSource((static_cast<TunerDemux*>(in_tunerDemux.get()))->getId());
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDescrambler->setKeyToken(in_keyToken);
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::addPid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -83,12 +66,6 @@
 
 ::ndk::ScopedAStatus TunerDescrambler::removePid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -98,16 +75,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::close() {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mDescrambler->close();
-    mDescrambler = nullptr;
-
-    return res;
+    return mDescrambler->close();
 }
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index 8776f7e..fcee966 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -37,36 +37,19 @@
 }
 
 TunerDvr::~TunerDvr() {
+    close();
     mDvr = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->getQueueDesc(_aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerDvr::configure(const DvrSettings& in_settings) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->configure(in_settings);
 }
 
 ::ndk::ScopedAStatus TunerDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -82,12 +65,6 @@
 }
 
 ::ndk::ScopedAStatus TunerDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -103,46 +80,34 @@
 }
 
 ::ndk::ScopedAStatus TunerDvr::start() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->start();
 }
 
 ::ndk::ScopedAStatus TunerDvr::stop() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->stop();
 }
 
 ::ndk::ScopedAStatus TunerDvr::flush() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->flush();
 }
 
 ::ndk::ScopedAStatus TunerDvr::close() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
+    return mDvr->close();
+}
+
+::ndk::ScopedAStatus TunerDvr::setStatusCheckIntervalHint(const int64_t milliseconds) {
+    if (milliseconds < 0L) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    ::ndk::ScopedAStatus s = mDvr->setStatusCheckIntervalHint(milliseconds);
+    if (s.getStatus() == STATUS_UNKNOWN_TRANSACTION) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    auto status = mDvr->close();
-    mDvr = nullptr;
-
-    return status;
+    return s;
 }
 
 /////////////// IDvrCallback ///////////////////////
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 1854d08..2330e7b 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -61,6 +61,7 @@
     ::ndk::ScopedAStatus stop() override;
     ::ndk::ScopedAStatus flush() override;
     ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
 
     struct DvrCallback : public BnDvrCallback {
         DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index e8c7767..478e7ea 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -36,28 +36,28 @@
 
 using namespace std;
 
-TunerFilter::TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb,
-                         DemuxFilterType type)
+TunerFilter::TunerFilter(const shared_ptr<IFilter> filter, const shared_ptr<FilterCallback> cb,
+                         const DemuxFilterType type, const shared_ptr<TunerService> tuner)
       : mFilter(filter),
         mType(type),
         mStarted(false),
         mShared(false),
         mClientPid(-1),
-        mFilterCallback(cb) {}
+        mFilterCallback(cb),
+        mTunerService(tuner) {}
 
 TunerFilter::~TunerFilter() {
-    Mutex::Autolock _l(mLock);
-    mFilter = nullptr;
+    close();
+    freeSharedFilterToken("");
+    {
+        Mutex::Autolock _l(mLock);
+        mFilter = nullptr;
+        mTunerService = nullptr;
+    }
 }
 
 ::ndk::ScopedAStatus TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -73,12 +73,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::getId(int32_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -94,12 +88,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::getId64Bit(int64_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -115,12 +103,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configure(const DemuxFilterSettings& in_settings) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -132,12 +114,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configureMonitorEvent(int32_t monitorEventType) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -149,12 +125,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configureIpFilterContextId(int32_t cid) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -166,12 +136,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configureAvStreamType(const AvStreamType& in_avStreamType) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -183,12 +147,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -207,12 +165,6 @@
 ::ndk::ScopedAStatus TunerFilter::getAvSharedHandle(NativeHandle* out_avMemory,
                                                     int64_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -225,12 +177,6 @@
 ::ndk::ScopedAStatus TunerFilter::releaseAvHandle(const NativeHandle& in_handle,
                                                   int64_t in_avDataId) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -242,12 +188,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::start() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -267,12 +207,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::stop() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -291,12 +225,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::flush() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -312,12 +240,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::close() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -326,7 +248,7 @@
                 mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
                 mFilterCallback->detachSharedFilterCallback();
             }
-            TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+            mTunerService->removeSharedFilter(this->ref<TunerFilter>());
         } else {
             // Calling from shared process, do not really close this filter.
             if (mFilterCallback != nullptr) {
@@ -341,7 +263,6 @@
         mFilterCallback->detachCallbacks();
     }
     auto res = mFilter->close();
-    mFilter = nullptr;
     mStarted = false;
     mShared = false;
     mClientPid = -1;
@@ -351,12 +272,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::acquireSharedFilterToken(string* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared || mStarted) {
         ALOGD("create SharedFilter in wrong state");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -365,7 +280,7 @@
 
     IPCThreadState* ipc = IPCThreadState::self();
     mClientPid = ipc->getCallingPid();
-    string token = TunerService::getTunerService()->addFilterToShared(this->ref<TunerFilter>());
+    string token = mTunerService->addFilterToShared(this->ref<TunerFilter>());
     _aidl_return->assign(token);
     mShared = true;
 
@@ -374,12 +289,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (!mShared) {
         // The filter is not shared or the shared filter has been closed.
         return ::ndk::ScopedAStatus::ok();
@@ -390,7 +299,7 @@
         mFilterCallback->detachSharedFilterCallback();
     }
 
-    TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+    mTunerService->removeSharedFilter(this->ref<TunerFilter>());
     mShared = false;
 
     return ::ndk::ScopedAStatus::ok();
@@ -398,24 +307,12 @@
 
 ::ndk::ScopedAStatus TunerFilter::getFilterType(DemuxFilterType* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     *_aidl_return = mType;
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerFilter::setDelayHint(const FilterDelayHint& in_hint) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFilter->setDelayHint(in_hint);
 }
 
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index 93d8898..f6178c4 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -53,8 +53,9 @@
 
 using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
 
-class TunerFilter : public BnTunerFilter {
+class TunerService;
 
+class TunerFilter : public BnTunerFilter {
 public:
     class FilterCallback : public BnFilterCallback {
     public:
@@ -75,7 +76,8 @@
         Mutex mCallbackLock;
     };
 
-    TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb, DemuxFilterType type);
+    TunerFilter(const shared_ptr<IFilter> filter, const shared_ptr<FilterCallback> cb,
+                const DemuxFilterType type, const shared_ptr<TunerService> tuner);
     virtual ~TunerFilter();
 
     ::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
@@ -113,6 +115,7 @@
     int32_t mClientPid;
     shared_ptr<FilterCallback> mFilterCallback;
     Mutex mLock;
+    shared_ptr<TunerService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 5116305..1e93d95 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -37,18 +37,13 @@
 }
 
 TunerFrontend::~TunerFrontend() {
+    close();
     mFrontend = nullptr;
     mId = -1;
 }
 
 ::ndk::ScopedAStatus TunerFrontend::setCallback(
         const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (tunerFrontendCallback == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -60,53 +55,23 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::tune(const FrontendSettings& settings) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->tune(settings);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::stopTune() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->stopTune();
 }
 
 ::ndk::ScopedAStatus TunerFrontend::scan(const FrontendSettings& settings,
                                          FrontendScanType frontendScanType) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->scan(settings, frontendScanType);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::stopScan() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->stopScan();
 }
 
 ::ndk::ScopedAStatus TunerFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (lnb == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -116,46 +81,19 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::linkCiCamToFrontend(int32_t ciCamId, int32_t* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->linkCiCam(ciCamId, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::unlinkCiCamToFrontend(int32_t ciCamId) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->unlinkCiCam(ciCamId);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::close() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mFrontend->close();
-    mFrontend = nullptr;
-
-    return res;
+    return mFrontend->close();
 }
 
 ::ndk::ScopedAStatus TunerFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
                                               vector<FrontendStatus>* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->getStatus(in_statusTypes, _aidl_return);
 }
 
@@ -165,34 +103,16 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::getHardwareInfo(std::string* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->getHardwareInfo(_aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::removeOutputPid(int32_t in_pid) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->removeOutputPid(in_pid);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::getFrontendStatusReadiness(
         const std::vector<FrontendStatusType>& in_statusTypes,
         std::vector<FrontendStatusReadiness>* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->getFrontendStatusReadiness(in_statusTypes, _aidl_return);
 }
 
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 1e143c3..2fb6135 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -36,18 +36,13 @@
 }
 
 TunerLnb::~TunerLnb() {
+    close();
     mLnb = nullptr;
     mId = -1;
 }
 
 ::ndk::ScopedAStatus TunerLnb::setCallback(
         const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_tunerLnbCallback == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -59,56 +54,23 @@
 }
 
 ::ndk::ScopedAStatus TunerLnb::setVoltage(LnbVoltage in_voltage) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->setVoltage(in_voltage);
 }
 
 ::ndk::ScopedAStatus TunerLnb::setTone(LnbTone in_tone) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->setTone(in_tone);
 }
 
 ::ndk::ScopedAStatus TunerLnb::setSatellitePosition(LnbPosition in_position) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->setSatellitePosition(in_position);
 }
 
 ::ndk::ScopedAStatus TunerLnb::sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->sendDiseqcMessage(in_diseqcMessage);
 }
 
 ::ndk::ScopedAStatus TunerLnb::close() {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mLnb->close();
-    mLnb = nullptr;
-
-    return res;
+    return mLnb->close();
 }
 
 /////////////// ILnbCallback ///////////////////////
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index 4833aaf..d59d95f 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -27,6 +27,7 @@
 #include <android/binder_manager.h>
 #include <binder/IPCThreadState.h>
 #include <binder/PermissionCache.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 
 #include <string>
@@ -51,65 +52,45 @@
 namespace tv {
 namespace tuner {
 
-shared_ptr<TunerService> TunerService::sTunerService = nullptr;
-
 TunerService::TunerService() {
-    if (!TunerHelper::checkTunerFeature()) {
-        ALOGD("Device doesn't have tuner hardware.");
-        return;
+    const string statsServiceName = string() + ITuner::descriptor + "/default";
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
+    mTuner = ITuner::fromBinder(binder);
+    ALOGE_IF(mTuner == nullptr, "Failed to get Tuner HAL Service");
+
+    mTunerVersion = TUNER_HAL_VERSION_2_0;
+    if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
+        // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
+        mTunerVersion = (mTunerVersion + 1) << 16;
     }
 
+    // Register the tuner resources to TRM.
     updateTunerResources();
 }
 
-TunerService::~TunerService() {}
+TunerService::~TunerService() {
+    mTuner = nullptr;
+}
 
 binder_status_t TunerService::instantiate() {
-    sTunerService = ::ndk::SharedRefBase::make<TunerService>();
-    return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
-}
-
-shared_ptr<TunerService> TunerService::getTunerService() {
-    return sTunerService;
-}
-
-bool TunerService::hasITuner() {
-    ALOGV("hasITuner");
-    if (mTuner != nullptr) {
-        return true;
+    shared_ptr<TunerService> tunerService = ::ndk::SharedRefBase::make<TunerService>();
+    bool lazyHal = property_get_bool("ro.tuner.lazyhal", false);
+    if (lazyHal) {
+        return AServiceManager_registerLazyService(tunerService->asBinder().get(),
+                                                   getServiceName());
     }
-    const string statsServiceName = string() + ITuner::descriptor + "/default";
-    if (AServiceManager_isDeclared(statsServiceName.c_str())) {
-        ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
-        mTuner = ITuner::fromBinder(binder);
-    } else {
-        mTuner = nullptr;
-        ALOGE("Failed to get Tuner HAL Service");
-        return false;
-    }
-
-    mTunerVersion = TUNER_HAL_VERSION_2_0;
-    // TODO: Enable this after Tuner HAL is frozen.
-    // if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
-    //  // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
-    //  mTunerVersion = (mTunerVersion + 1) << 16;
-    //}
-
-    return true;
+    return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
 }
 
 ::ndk::ScopedAStatus TunerService::openDemux(int32_t /* in_demuxHandle */,
                                              shared_ptr<ITunerDemux>* _aidl_return) {
     ALOGV("openDemux");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
     vector<int32_t> id;
     shared_ptr<IDemux> demux;
     auto status = mTuner->openDemux(&id, &demux);
     if (status.isOk()) {
-        *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, id[0]);
+        *_aidl_return =
+                ::ndk::SharedRefBase::make<TunerDemux>(demux, id[0], this->ref<TunerService>());
     }
 
     return status;
@@ -117,41 +98,19 @@
 
 ::ndk::ScopedAStatus TunerService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
     ALOGV("getDemuxCaps");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getDemuxCaps(_aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerService::getFrontendIds(vector<int32_t>* ids) {
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getFrontendIds(ids);
 }
 
 ::ndk::ScopedAStatus TunerService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getFrontendInfo(id, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerService::openFrontend(int32_t frontendHandle,
                                                 shared_ptr<ITunerFrontend>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
     shared_ptr<IFrontend> frontend;
     auto status = mTuner->openFrontendById(id, &frontend);
@@ -163,12 +122,6 @@
 }
 
 ::ndk::ScopedAStatus TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<ILnb> lnb;
     int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
     auto status = mTuner->openLnbById(id, &lnb);
@@ -181,12 +134,6 @@
 
 ::ndk::ScopedAStatus TunerService::openLnbByName(const string& lnbName,
                                                  shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     vector<int32_t> id;
     shared_ptr<ILnb> lnb;
     auto status = mTuner->openLnbByName(lnbName, &id, &lnb);
@@ -199,12 +146,6 @@
 
 ::ndk::ScopedAStatus TunerService::openDescrambler(int32_t /*descramblerHandle*/,
                                                    shared_ptr<ITunerDescrambler>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IDescrambler> descrambler;
     // int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
     auto status = mTuner->openDescrambler(&descrambler);
@@ -216,7 +157,6 @@
 }
 
 ::ndk::ScopedAStatus TunerService::getTunerHalVersion(int* _aidl_return) {
-    hasITuner();
     *_aidl_return = mTunerVersion;
     return ::ndk::ScopedAStatus::ok();
 }
@@ -224,12 +164,6 @@
 ::ndk::ScopedAStatus TunerService::openSharedFilter(const string& in_filterToken,
                                                     const shared_ptr<ITunerFilterCallback>& in_cb,
                                                     shared_ptr<ITunerFilter>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (!PermissionCache::checkCallingPermission(sSharedFilterPermission)) {
         ALOGE("Request requires android.permission.ACCESS_TV_SHARED_FILTER");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -260,35 +194,22 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
-::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
+::ndk::ScopedAStatus TunerService::isLnaSupported(bool* _aidl_return) {
+    ALOGV("isLnaSupported");
+    return mTuner->isLnaSupported(_aidl_return);
+}
 
+::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
     return mTuner->setLna(bEnable);
 }
 
 ::ndk::ScopedAStatus TunerService::setMaxNumberOfFrontends(FrontendType in_frontendType,
                                                            int32_t in_maxNumber) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->setMaxNumberOfFrontends(in_frontendType, in_maxNumber);
 }
 
 ::ndk::ScopedAStatus TunerService::getMaxNumberOfFrontends(FrontendType in_frontendType,
                                                            int32_t* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getMaxNumberOfFrontends(in_frontendType, _aidl_return);
 }
 
@@ -309,11 +230,6 @@
 }
 
 void TunerService::updateTunerResources() {
-    if (!hasITuner()) {
-        ALOGE("Failed to updateTunerResources");
-        return;
-    }
-
     TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
 }
 
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index 7fc2aa4..517df4a 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -77,6 +77,7 @@
     ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
                                           const shared_ptr<ITunerFilterCallback>& in_cb,
                                           shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus isLnaSupported(bool* _aidl_return) override;
     ::ndk::ScopedAStatus setLna(bool in_bEnable) override;
     ::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
                                                  int32_t in_maxNumber) override;
@@ -86,10 +87,7 @@
     string addFilterToShared(const shared_ptr<TunerFilter>& sharedFilter);
     void removeSharedFilter(const shared_ptr<TunerFilter>& sharedFilter);
 
-    static shared_ptr<TunerService> getTunerService();
-
 private:
-    bool hasITuner();
     void updateTunerResources();
     vector<TunerFrontendInfo> getTRMFrontendInfos();
     vector<int32_t> getTRMLnbHandles();
@@ -98,8 +96,6 @@
     int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
     Mutex mSharedFiltersLock;
     map<string, shared_ptr<TunerFilter>> mSharedFilters;
-
-    static shared_ptr<TunerService> sTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 73cd6b4..385a063 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -35,37 +35,19 @@
 }
 
 TunerTimeFilter::~TunerTimeFilter() {
+    close();
     mTimeFilter = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::setTimeStamp(int64_t timeStamp) {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTimeFilter->setTimeStamp(timeStamp);
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::clearTimeStamp() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTimeFilter->clearTimeStamp();
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     auto status = mTimeFilter->getSourceTime(_aidl_return);
     if (!status.isOk()) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
@@ -74,13 +56,6 @@
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     auto status = mTimeFilter->getTimeStamp(_aidl_return);
     if (!status.isOk()) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
@@ -89,16 +64,7 @@
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::close() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto status = mTimeFilter->close();
-    mTimeFilter = nullptr;
-
-    return status;
+    return mTimeFilter->close();
 }
 
 }  // namespace tuner
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
index 2c01c4e..cafe075 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
@@ -66,4 +66,9 @@
      * close the DVR instance to release resource for DVR.
      */
     void close();
+
+    /**
+     * Set status check time interval.
+     */
+    void setStatusCheckIntervalHint(in long milliseconds);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index b8084ab..8d285e3 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -107,6 +107,13 @@
     ITunerFilter openSharedFilter(in String filterToken, in ITunerFilterCallback cb);
 
     /**
+     * Is Low Noise Amplifier (LNA) supported by the Tuner.
+     *
+     * @return {@code true} if supported, otherwise {@code false}.
+     */
+    boolean isLnaSupported();
+
+    /**
      * Enable or Disable Low Noise Amplifier (LNA).
      *
      * @param bEnable enable Lna or not.
diff --git a/services/tuner/hidl/TunerHidlDemux.cpp b/services/tuner/hidl/TunerHidlDemux.cpp
index a8151d2..bbb7782 100644
--- a/services/tuner/hidl/TunerHidlDemux.cpp
+++ b/services/tuner/hidl/TunerHidlDemux.cpp
@@ -20,6 +20,7 @@
 
 #include "TunerHidlDvr.h"
 #include "TunerHidlFilter.h"
+#include "TunerHidlService.h"
 #include "TunerHidlTimeFilter.h"
 
 using ::aidl::android::hardware::tv::tuner::DemuxFilterSubType;
@@ -42,23 +43,20 @@
 namespace tv {
 namespace tuner {
 
-TunerHidlDemux::TunerHidlDemux(sp<IDemux> demux, int id) {
+TunerHidlDemux::TunerHidlDemux(const sp<IDemux> demux, const int id,
+                               const shared_ptr<TunerHidlService> tuner) {
     mDemux = demux;
     mDemuxId = id;
+    mTunerService = tuner;
 }
 
 TunerHidlDemux::~TunerHidlDemux() {
     mDemux = nullptr;
+    mTunerService = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSource(
         const shared_ptr<ITunerFrontend>& in_frontend) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     int frontendId;
     in_frontend->getFrontendId(&frontendId);
     HidlResult res = mDemux->setFrontendDataSource(frontendId);
@@ -69,12 +67,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSourceById(int frontendId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->setFrontendDataSource(frontendId);
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -86,12 +78,6 @@
                                                 int32_t in_bufferSize,
                                                 const shared_ptr<ITunerFilterCallback>& in_cb,
                                                 shared_ptr<ITunerFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlDemuxFilterMainType mainType = static_cast<HidlDemuxFilterMainType>(in_type.mainType);
     HidlDemuxFilterType filterType{
             .mainType = mainType,
@@ -132,17 +118,12 @@
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type);
+    *_aidl_return =
+            ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type, mTunerService);
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlITimeFilter> filterSp;
     mDemux->openTimeFilter([&](HidlResult r, const sp<HidlITimeFilter>& filter) {
@@ -159,12 +140,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
                                                    int32_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     uint32_t avSyncHwId;
     HidlResult res;
     sp<HidlIFilter> halFilter = static_cast<TunerHidlFilter*>(tunerFilter.get())->getHalFilter();
@@ -181,12 +156,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     uint64_t time;
     HidlResult res;
     mDemux->getAvSyncTime(static_cast<uint32_t>(avSyncHwId), [&](HidlResult r, uint64_t ts) {
@@ -204,12 +173,6 @@
 ::ndk::ScopedAStatus TunerHidlDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
                                              const shared_ptr<ITunerDvrCallback>& in_cb,
                                              shared_ptr<ITunerDvr>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res;
     sp<HidlIDvrCallback> callback = new TunerHidlDvr::DvrCallback(in_cb);
     sp<HidlIDvr> hidlDvr;
@@ -228,12 +191,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::connectCiCam(int32_t ciCamId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->connectCiCam(static_cast<uint32_t>(ciCamId));
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -242,12 +199,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::disconnectCiCam() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->disconnectCiCam();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -256,15 +207,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::close() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->close();
-    mDemux = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/hidl/TunerHidlDemux.h b/services/tuner/hidl/TunerHidlDemux.h
index d535da6..94a715e 100644
--- a/services/tuner/hidl/TunerHidlDemux.h
+++ b/services/tuner/hidl/TunerHidlDemux.h
@@ -37,9 +37,12 @@
 namespace tv {
 namespace tuner {
 
+class TunerHidlService;
+
 class TunerHidlDemux : public BnTunerDemux {
 public:
-    TunerHidlDemux(sp<HidlIDemux> demux, int demuxId);
+    TunerHidlDemux(const sp<HidlIDemux> demux, const int demuxId,
+                   const shared_ptr<TunerHidlService> tuner);
     virtual ~TunerHidlDemux();
 
     ::ndk::ScopedAStatus setFrontendDataSource(
@@ -64,6 +67,7 @@
 private:
     sp<HidlIDemux> mDemux;
     int mDemuxId;
+    shared_ptr<TunerHidlService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlDescrambler.cpp b/services/tuner/hidl/TunerHidlDescrambler.cpp
index dd8cd9c..51b7ede 100644
--- a/services/tuner/hidl/TunerHidlDescrambler.cpp
+++ b/services/tuner/hidl/TunerHidlDescrambler.cpp
@@ -45,12 +45,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::setDemuxSource(
         const shared_ptr<ITunerDemux>& in_tunerDemux) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDescrambler->setDemuxSource(
             static_cast<TunerHidlDemux*>(in_tunerDemux.get())->getId());
     if (res != HidlResult::SUCCESS) {
@@ -60,12 +54,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDescrambler->setKeyToken(in_keyToken);
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -75,12 +63,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::addPid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     sp<HidlIFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -94,12 +76,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::removePid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     sp<HidlIFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -112,15 +88,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::close() {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDescrambler->close();
-    mDescrambler = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
index 1a619d5..8083a6e 100644
--- a/services/tuner/hidl/TunerHidlDvr.cpp
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -54,12 +54,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     MQDesc dvrMQDesc;
     HidlResult res;
     mDvr->getQueueDesc([&](HidlResult r, const MQDesc& desc) {
@@ -77,12 +71,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::configure(const DvrSettings& in_settings) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->configure(getHidlDvrSettings(in_settings));
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -91,12 +79,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -116,12 +98,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -141,12 +117,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::start() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->start();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -155,12 +125,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::stop() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->stop();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -169,12 +133,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::flush() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->flush();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -183,21 +141,18 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::close() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->close();
-    mDvr = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
     return ::ndk::ScopedAStatus::ok();
 }
 
+::ndk::ScopedAStatus TunerHidlDvr::setStatusCheckIntervalHint(int64_t /* in_milliseconds */) {
+    HidlResult res = HidlResult::UNAVAILABLE;
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
 HidlDvrSettings TunerHidlDvr::getHidlDvrSettings(const DvrSettings& settings) {
     HidlDvrSettings s;
     switch (mType) {
diff --git a/services/tuner/hidl/TunerHidlDvr.h b/services/tuner/hidl/TunerHidlDvr.h
index a280ff7..aa86b14 100644
--- a/services/tuner/hidl/TunerHidlDvr.h
+++ b/services/tuner/hidl/TunerHidlDvr.h
@@ -63,6 +63,7 @@
     ::ndk::ScopedAStatus stop() override;
     ::ndk::ScopedAStatus flush() override;
     ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
 
     struct DvrCallback : public HidlIDvrCallback {
         DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index fe74a5c..d6a0cae 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -92,31 +92,32 @@
 namespace tv {
 namespace tuner {
 
-TunerHidlFilter::TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb,
-                                 DemuxFilterType type)
+TunerHidlFilter::TunerHidlFilter(const sp<HidlIFilter> filter, const sp<FilterCallback> cb,
+                                 const DemuxFilterType type,
+                                 const shared_ptr<TunerHidlService> tuner)
       : mFilter(filter),
         mType(type),
         mStarted(false),
         mShared(false),
         mClientPid(-1),
-        mFilterCallback(cb) {
+        mFilterCallback(cb),
+        mTunerService(tuner) {
     mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
 }
 
 TunerHidlFilter::~TunerHidlFilter() {
-    Mutex::Autolock _l(mLock);
-    mFilter = nullptr;
-    mFilter_1_1 = nullptr;
+    freeSharedFilterToken("");
+
+    {
+        Mutex::Autolock _l(mLock);
+        mFilter = nullptr;
+        mFilter_1_1 = nullptr;
+        mTunerService = nullptr;
+    }
 }
 
 ::ndk::ScopedAStatus TunerHidlFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -146,12 +147,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::getId(int32_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -200,12 +195,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::configure(const DemuxFilterSettings& in_settings) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -318,12 +307,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -378,12 +361,6 @@
 ::ndk::ScopedAStatus TunerHidlFilter::releaseAvHandle(const NativeHandle& in_handle,
                                                       int64_t in_avDataId) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -407,12 +384,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::start() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -434,12 +405,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::stop() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -461,12 +426,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::flush() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -487,12 +446,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::close() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -501,7 +454,7 @@
                 mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
                 mFilterCallback->detachSharedFilterCallback();
             }
-            TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+            mTunerService->removeSharedFilter(this->ref<TunerHidlFilter>());
         } else {
             // Calling from shared process, do not really close this filter.
             if (mFilterCallback != nullptr) {
@@ -516,8 +469,6 @@
         mFilterCallback->detachCallbacks();
     }
     HidlResult res = mFilter->close();
-    mFilter = nullptr;
-    mFilter_1_1 = nullptr;
     mStarted = false;
     mShared = false;
     mClientPid = -1;
@@ -531,12 +482,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::acquireSharedFilterToken(string* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared || mStarted) {
         ALOGD("create SharedFilter in wrong state");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -545,8 +490,7 @@
 
     IPCThreadState* ipc = IPCThreadState::self();
     mClientPid = ipc->getCallingPid();
-    string token =
-            TunerHidlService::getTunerService()->addFilterToShared(this->ref<TunerHidlFilter>());
+    string token = mTunerService->addFilterToShared(this->ref<TunerHidlFilter>());
     _aidl_return->assign(token);
     mShared = true;
 
@@ -555,12 +499,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (!mShared) {
         // The filter is not shared or the shared filter has been closed.
         return ::ndk::ScopedAStatus::ok();
@@ -571,19 +509,13 @@
         mFilterCallback->detachSharedFilterCallback();
     }
 
-    TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+    mTunerService->removeSharedFilter(this->ref<TunerHidlFilter>());
     mShared = false;
 
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerHidlFilter::getFilterType(DemuxFilterType* _aidl_return) {
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     *_aidl_return = mType;
     return ::ndk::ScopedAStatus::ok();
 }
@@ -908,6 +840,10 @@
                 static_cast<uint32_t>(settings.scIndexMask.get<DemuxFilterScIndexMask::scHevc>()));
         break;
     }
+    case DemuxFilterScIndexMask::scVvc: {
+        // Shouldn't be here.
+        break;
+    }
     }
     return record;
 }
diff --git a/services/tuner/hidl/TunerHidlFilter.h b/services/tuner/hidl/TunerHidlFilter.h
index 63c7a1b..a58eeca 100644
--- a/services/tuner/hidl/TunerHidlFilter.h
+++ b/services/tuner/hidl/TunerHidlFilter.h
@@ -114,6 +114,8 @@
 const static int IP_V4_LENGTH = 4;
 const static int IP_V6_LENGTH = 16;
 
+class TunerHidlService;
+
 class TunerHidlFilter : public BnTunerFilter {
 public:
     class FilterCallback : public HidlIFilterCallback {
@@ -165,7 +167,8 @@
         Mutex mCallbackLock;
     };
 
-    TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb, DemuxFilterType type);
+    TunerHidlFilter(const sp<HidlIFilter> filter, const sp<FilterCallback> cb,
+                    const DemuxFilterType type, const shared_ptr<TunerHidlService> tuner);
     virtual ~TunerHidlFilter();
 
     ::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
@@ -230,6 +233,7 @@
     int32_t mClientPid;
     sp<FilterCallback> mFilterCallback;
     Mutex mLock;
+    shared_ptr<TunerHidlService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlFrontend.cpp b/services/tuner/hidl/TunerHidlFrontend.cpp
index 03957f3..7ffb2a4 100644
--- a/services/tuner/hidl/TunerHidlFrontend.cpp
+++ b/services/tuner/hidl/TunerHidlFrontend.cpp
@@ -176,26 +176,23 @@
 namespace tv {
 namespace tuner {
 
-TunerHidlFrontend::TunerHidlFrontend(sp<HidlIFrontend> frontend, int id) {
+TunerHidlFrontend::TunerHidlFrontend(const sp<HidlIFrontend> frontend, const int id,
+                                     const shared_ptr<TunerHidlService> tuner) {
     mFrontend = frontend;
     mFrontend_1_1 = ::android::hardware::tv::tuner::V1_1::IFrontend::castFrom(mFrontend);
     mId = id;
+    mTunerService = tuner;
 }
 
 TunerHidlFrontend::~TunerHidlFrontend() {
     mFrontend = nullptr;
     mFrontend_1_1 = nullptr;
     mId = -1;
+    mTunerService = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::setCallback(
         const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (tunerFrontendCallback == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -211,12 +208,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::tune(const FrontendSettings& settings) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     HidlFrontendSettings frontendSettings;
     HidlFrontendSettingsExt1_1 frontendSettingsExt;
@@ -234,12 +225,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::stopTune() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mFrontend->stopTune();
     if (status == HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::ok();
@@ -250,12 +235,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFrontend::scan(const FrontendSettings& settings,
                                              FrontendScanType frontendScanType) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     HidlFrontendSettings frontendSettings;
     HidlFrontendSettingsExt1_1 frontendSettingsExt;
@@ -276,12 +255,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::stopScan() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mFrontend->stopScan();
     if (status == HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::ok();
@@ -291,12 +264,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (lnb == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -349,18 +316,8 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::close() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    TunerHidlService::getTunerService()->removeFrontend(this->ref<TunerHidlFrontend>());
-
+    mTunerService->removeFrontend(this->ref<TunerHidlFrontend>());
     HidlResult status = mFrontend->close();
-    mFrontend = nullptr;
-    mFrontend_1_1 = nullptr;
-
     if (status != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
     }
@@ -370,12 +327,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
                                                   vector<FrontendStatus>* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res;
     vector<HidlFrontendStatus> status;
     vector<HidlFrontendStatusExt1_1> statusExt;
@@ -442,11 +393,6 @@
 }
 
 void TunerHidlFrontend::setLna(bool bEnable) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return;
-    }
-
     mFrontend->setLna(bEnable);
 }
 
diff --git a/services/tuner/hidl/TunerHidlFrontend.h b/services/tuner/hidl/TunerHidlFrontend.h
index f698655..f54127b 100644
--- a/services/tuner/hidl/TunerHidlFrontend.h
+++ b/services/tuner/hidl/TunerHidlFrontend.h
@@ -64,9 +64,12 @@
 namespace tv {
 namespace tuner {
 
+class TunerHidlService;
+
 class TunerHidlFrontend : public BnTunerFrontend {
 public:
-    TunerHidlFrontend(sp<HidlIFrontend> frontend, int id);
+    TunerHidlFrontend(const sp<HidlIFrontend> frontend, const int id,
+                      const shared_ptr<TunerHidlService> tuner);
     virtual ~TunerHidlFrontend();
 
     ::ndk::ScopedAStatus setCallback(
@@ -118,6 +121,7 @@
     int mId;
     sp<HidlIFrontend> mFrontend;
     sp<::android::hardware::tv::tuner::V1_1::IFrontend> mFrontend_1_1;
+    shared_ptr<TunerHidlService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlService.cpp b/services/tuner/hidl/TunerHidlService.cpp
index 6f55f1e..52005c2 100644
--- a/services/tuner/hidl/TunerHidlService.cpp
+++ b/services/tuner/hidl/TunerHidlService.cpp
@@ -24,6 +24,7 @@
 #include <android/binder_manager.h>
 #include <binder/IPCThreadState.h>
 #include <binder/PermissionCache.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 
 #include "TunerHelper.h"
@@ -46,7 +47,6 @@
 using ::aidl::android::hardware::tv::tuner::FrontendIsdbtTimeInterleaveMode;
 using ::aidl::android::hardware::tv::tuner::FrontendType;
 using ::aidl::android::hardware::tv::tuner::Result;
-using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
 using ::android::IPCThreadState;
 using ::android::PermissionCache;
 using ::android::hardware::hidl_vec;
@@ -63,47 +63,9 @@
 namespace tv {
 namespace tuner {
 
-shared_ptr<TunerHidlService> TunerHidlService::sTunerService = nullptr;
-
 TunerHidlService::TunerHidlService() {
-    if (!TunerHelper::checkTunerFeature()) {
-        ALOGD("Device doesn't have tuner hardware.");
-        return;
-    }
-
-    updateTunerResources();
-}
-
-TunerHidlService::~TunerHidlService() {
-    mOpenedFrontends.clear();
-    mLnaStatus = -1;
-}
-
-binder_status_t TunerHidlService::instantiate() {
-    if (HidlITuner::getService() == nullptr) {
-        ALOGD("Failed to get ITuner HIDL HAL");
-        return STATUS_NAME_NOT_FOUND;
-    }
-
-    sTunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
-    return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
-}
-
-shared_ptr<TunerHidlService> TunerHidlService::getTunerService() {
-    return sTunerService;
-}
-
-bool TunerHidlService::hasITuner() {
-    ALOGV("hasITuner");
-    if (mTuner != nullptr) {
-        return true;
-    }
-
     mTuner = HidlITuner::getService();
-    if (mTuner == nullptr) {
-        ALOGE("Failed to get ITuner service");
-        return false;
-    }
+    ALOGE_IF(mTuner == nullptr, "Failed to get ITuner service");
     mTunerVersion = TUNER_HAL_VERSION_1_0;
 
     mTuner_1_1 = ::android::hardware::tv::tuner::V1_1::ITuner::castFrom(mTuner);
@@ -113,23 +75,35 @@
         ALOGD("Failed to get ITuner_1_1 service");
     }
 
-    return true;
+    // Register tuner resources to TRM.
+    updateTunerResources();
 }
 
-bool TunerHidlService::hasITuner_1_1() {
-    ALOGV("hasITuner_1_1");
-    hasITuner();
-    return (mTunerVersion == TUNER_HAL_VERSION_1_1);
+TunerHidlService::~TunerHidlService() {
+    mOpenedFrontends.clear();
+    mLnaStatus = -1;
+    mTuner = nullptr;
+    mTuner_1_1 = nullptr;
+}
+
+binder_status_t TunerHidlService::instantiate() {
+    if (HidlITuner::getService() == nullptr) {
+        ALOGD("Failed to get ITuner HIDL HAL");
+        return STATUS_NAME_NOT_FOUND;
+    }
+
+    shared_ptr<TunerHidlService> tunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
+    bool lazyHal = property_get_bool("ro.tuner.lazyhal", false);
+    if (lazyHal) {
+        return AServiceManager_registerLazyService(tunerService->asBinder().get(),
+                                                   getServiceName());
+    }
+    return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
 }
 
 ::ndk::ScopedAStatus TunerHidlService::openDemux(int32_t /* in_demuxHandle */,
                                                  shared_ptr<ITunerDemux>* _aidl_return) {
     ALOGV("openDemux");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res;
     uint32_t id;
     sp<IDemux> demuxSp = nullptr;
@@ -140,7 +114,8 @@
         ALOGD("open demux, id = %d", demuxId);
     });
     if (res == HidlResult::SUCCESS) {
-        *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id);
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id,
+                                                                   this->ref<TunerHidlService>());
         return ::ndk::ScopedAStatus::ok();
     }
 
@@ -150,11 +125,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
     ALOGV("getDemuxCaps");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res;
     HidlDemuxCapabilities caps;
     mTuner->getDemuxCaps([&](HidlResult r, const HidlDemuxCapabilities& demuxCaps) {
@@ -171,11 +141,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::getFrontendIds(vector<int32_t>* ids) {
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     hidl_vec<HidlFrontendId> feIds;
     HidlResult res = getHidlFrontendIds(feIds);
     if (res != HidlResult::SUCCESS) {
@@ -188,12 +153,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlFrontendInfo info;
     HidlResult res = getHidlFrontendInfo(id, info);
     if (res != HidlResult::SUCCESS) {
@@ -202,7 +161,7 @@
 
     HidlFrontendDtmbCapabilities dtmbCaps;
     if (static_cast<HidlFrontendType>(info.type) == HidlFrontendType::DTMB) {
-        if (!hasITuner_1_1()) {
+        if (mTuner_1_1 == nullptr) {
             ALOGE("ITuner_1_1 service is not init.");
             return ::ndk::ScopedAStatus::fromServiceSpecificError(
                     static_cast<int32_t>(Result::UNAVAILABLE));
@@ -224,12 +183,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::openFrontend(int32_t frontendHandle,
                                                     shared_ptr<ITunerFrontend>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlIFrontend> frontend;
     int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
@@ -241,8 +194,8 @@
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
     }
 
-    shared_ptr<TunerHidlFrontend> tunerFrontend =
-            ::ndk::SharedRefBase::make<TunerHidlFrontend>(frontend, id);
+    shared_ptr<TunerHidlFrontend> tunerFrontend = ::ndk::SharedRefBase::make<TunerHidlFrontend>(
+            frontend, id, this->ref<TunerHidlService>());
     if (mLnaStatus != -1) {
         tunerFrontend->setLna(mLnaStatus == 1);
     }
@@ -255,12 +208,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlILnb> lnb;
     int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
@@ -278,12 +225,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::openLnbByName(const string& lnbName,
                                                      shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     int lnbId;
     HidlResult status;
     sp<HidlILnb> lnb;
@@ -302,12 +243,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::openDescrambler(
         int32_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlIDescrambler> descrambler;
     //int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -324,7 +259,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::getTunerHalVersion(int* _aidl_return) {
-    hasITuner();
     *_aidl_return = mTunerVersion;
     return ::ndk::ScopedAStatus::ok();
 }
@@ -332,7 +266,7 @@
 ::ndk::ScopedAStatus TunerHidlService::openSharedFilter(
         const string& in_filterToken, const shared_ptr<ITunerFilterCallback>& in_cb,
         shared_ptr<ITunerFilter>* _aidl_return) {
-    if (!hasITuner()) {
+    if (mTuner == nullptr) {
         ALOGE("get ITuner failed");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
@@ -368,8 +302,13 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
+::ndk::ScopedAStatus TunerHidlService::isLnaSupported(bool* /* _aidl_return */) {
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
 ::ndk::ScopedAStatus TunerHidlService::setLna(bool bEnable) {
-    if (!hasITuner()) {
+    if (mTuner == nullptr) {
         ALOGE("get ITuner failed");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
@@ -428,11 +367,6 @@
 }
 
 void TunerHidlService::updateTunerResources() {
-    if (!hasITuner()) {
-        ALOGE("Failed to updateTunerResources");
-        return;
-    }
-
     TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
 }
 
diff --git a/services/tuner/hidl/TunerHidlService.h b/services/tuner/hidl/TunerHidlService.h
index 2252d35..872aefc 100644
--- a/services/tuner/hidl/TunerHidlService.h
+++ b/services/tuner/hidl/TunerHidlService.h
@@ -89,6 +89,7 @@
     ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
                                           const shared_ptr<ITunerFilterCallback>& in_cb,
                                           shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus isLnaSupported(bool* _aidl_return) override;
     ::ndk::ScopedAStatus setLna(bool in_bEnable) override;
     ::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
                                                  int32_t in_maxNumber) override;
@@ -99,11 +100,7 @@
     void removeSharedFilter(const shared_ptr<TunerHidlFilter>& sharedFilter);
     void removeFrontend(const shared_ptr<TunerHidlFrontend>& frontend);
 
-    static shared_ptr<TunerHidlService> getTunerService();
-
 private:
-    bool hasITuner();
-    bool hasITuner_1_1();
     void updateTunerResources();
     vector<TunerFrontendInfo> getTRMFrontendInfos();
     vector<int32_t> getTRMLnbHandles();
@@ -121,8 +118,6 @@
     Mutex mOpenedFrontendsLock;
     unordered_set<shared_ptr<TunerHidlFrontend>> mOpenedFrontends;
     int mLnaStatus = -1;
-
-    static shared_ptr<TunerHidlService> sTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlTimeFilter.cpp b/services/tuner/hidl/TunerHidlTimeFilter.cpp
index d0606d6..06a71d0 100644
--- a/services/tuner/hidl/TunerHidlTimeFilter.cpp
+++ b/services/tuner/hidl/TunerHidlTimeFilter.cpp
@@ -43,12 +43,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::setTimeStamp(int64_t timeStamp) {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mTimeFilter->setTimeStamp(static_cast<uint64_t>(timeStamp));
     if (status != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
@@ -57,12 +51,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::clearTimeStamp() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mTimeFilter->clearTimeStamp();
     if (status != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
@@ -71,13 +59,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::getSourceTime(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     mTimeFilter->getSourceTime([&](HidlResult r, uint64_t t) {
         status = r;
@@ -91,13 +72,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::getTimeStamp(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     mTimeFilter->getTimeStamp([&](HidlResult r, uint64_t t) {
         status = r;
@@ -111,15 +85,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::close() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mTimeFilter->close();
-    mTimeFilter = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index a014dea..f8311ff 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -32,6 +32,7 @@
 
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
+    ProcessState::self()->setThreadPoolMaxThreadCount(8);
 
     // Check legacy HIDL HAL first. If it's not existed, use AIDL HAL.
     binder_status_t status = TunerHidlService::instantiate();
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
index 6a3e199..90a0090 100644
--- a/services/tuner/mediatuner.rc
+++ b/services/tuner/mediatuner.rc
@@ -1,8 +1,14 @@
+# media.tuner service is not started by default unless tuner.server.enable is set
+# as "true" by TRM (Tuner Resource Manager). TRM checks ro.tuner.lazyhal, if it
+# isn't true , TRM sets tuner.server.enable as "true".
 service media.tuner /system/bin/mediatuner
     class main
     group media
     ioprio rt 4
-    onrestart restart vendor.tuner-hal-1-0
-    onrestart restart vendor.tuner-hal-1-1
-    onrestart restart vendor.tuner-default
     task_profiles ProcessCapacityHigh HighPerformance
+    interface aidl media.tuner
+    oneshot
+    disabled
+
+on property:tuner.server.enable=true
+    enable media.tuner