Merge "Sync with FrameEvent.aidl change"
diff --git a/METADATA b/METADATA
index aabda36..146bfcb 100644
--- a/METADATA
+++ b/METADATA
@@ -2,22 +2,22 @@
 #     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
 #     DEPENDING ON IT IN YOUR PROJECT. ***
 third_party {
-  # would be NOTICE save for Widevine Master License Agreement in:
-  #   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
-  #   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
-  #   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
-  #   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
-  #   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
-  # and patent disclaimers in:
-  #   media/codec2/components/aac/patent_disclaimer.txt
-  #   media/codec2/components/amr_nb_wb/patent_disclaimer.txt
-  #   media/codec2/components/mp3/patent_disclaimer.txt
-  #   media/codec2/components/mpeg4_h263/patent_disclaimer.txt
-  #   media/codecs/amrnb/patent_disclaimer.txt
-  #   media/codecs/amrwb/dec/patent_disclaimer.txt
-  #   media/codecs/amrwb/enc/patent_disclaimer.txt
-  #   media/codecs/m4v_h263/patent_disclaimer.txt
-  #   media/codecs/mp3dec/patent_disclaimer.txt
-  #   media/libstagefright/codecs/aacenc/patent_disclaimer.txt
+  license_note: "would be NOTICE save for Widevine Master License Agreement in:\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h\n"
+  " and patent disclaimers in:\n"
+  "   media/codec2/components/aac/patent_disclaimer.txt\n"
+  "   media/codec2/components/amr_nb_wb/patent_disclaimer.txt\n"
+  "   media/codec2/components/mp3/patent_disclaimer.txt\n"
+  "   media/codec2/components/mpeg4_h263/patent_disclaimer.txt\n"
+  "   media/codecs/amrnb/patent_disclaimer.txt\n"
+  "   media/codecs/amrwb/dec/patent_disclaimer.txt\n"
+  "   media/codecs/amrwb/enc/patent_disclaimer.txt\n"
+  "   media/codecs/m4v_h263/patent_disclaimer.txt\n"
+  "   media/codecs/mp3dec/patent_disclaimer.txt\n"
+  "   media/libstagefright/codecs/aacenc/patent_disclaimer.txt"
   license_type: BY_EXCEPTION_ONLY
 }
diff --git a/apex/Android.bp b/apex/Android.bp
index b9abd12..570ca01 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -23,7 +23,6 @@
 
 apex_defaults {
     name: "com.android.media-defaults",
-    updatable: true,
     bootclasspath_fragments: ["com.android.media-bootclasspath-fragment"],
     systemserverclasspath_fragments: ["com.android.media-systemserverclasspath-fragment"],
     multilib: {
@@ -57,6 +56,7 @@
     prebuilts: [
         "code_coverage.policy",
         "com.android.media-mediatranscoding.rc",
+        "com.android.media-mediatranscoding.32rc",
         "crash_dump.policy",
         "mediaextractor.policy",
         "media-linker-config",
@@ -67,14 +67,13 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media-androidManifest",
 
-    // IMPORTANT: For the APEX to be installed on Android 10 (API 29),
-    // min_sdk_version should be 29. This enables the build system to make
+    // IMPORTANT: q-launched-apex-module enables the build system to make
     // sure the package compatible to Android 10 in two ways:
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    min_sdk_version: "29",
+    defaults: ["q-launched-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
@@ -126,6 +125,26 @@
     // modified by the Soong or platform compat team.
     hidden_api: {
         max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+
+        // The following packages contain classes from other modules on the
+        // bootclasspath. That means that the hidden API flags for this module
+        // has to explicitly list every single class this module provides in
+        // that package to differentiate them from the classes provided by other
+        // modules. That can include private classes that are not part of the
+        // API.
+        split_packages: [
+            "android.media",
+        ],
+
+        // The following packages and all their subpackages currently only
+        // contain classes from this bootclasspath_fragment. Listing a package
+        // here won't prevent other bootclasspath modules from adding classes in
+        // any of those packages but it will prevent them from adding those
+        // classes into an API surface, e.g. public, system, etc.. Doing so will
+        // result in a build failure due to inconsistent flags.
+        package_prefixes: [
+            "android.media.internal",
+        ],
     },
 }
 
@@ -148,7 +167,6 @@
 
 apex_defaults {
     name: "com.android.media.swcodec-defaults",
-    updatable: true,
     binaries: [
         "mediaswcodec",
     ],
@@ -160,6 +178,7 @@
     ],
     prebuilts: [
         "com.android.media.swcodec-mediaswcodec.rc",
+        "com.android.media.swcodec-mediaswcodec.32rc",
         "com.android.media.swcodec-ld.config.txt",
         "mediaswcodec.policy",
         "code_coverage.policy",
@@ -172,30 +191,46 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media.swcodec-androidManifest",
 
-    // IMPORTANT: For the APEX to be installed on Android 10 (API 29),
-    // min_sdk_version should be 29. This enables the build system to make
+    // IMPORTANT: q-launched-apex-module enables the build system to make
     // sure the package compatible to Android 10 in two ways:
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    min_sdk_version: "29",
+    defaults: ["q-launched-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
 }
 
+// install as mediatranscoding.* and mediaswcodec.* instead of init.*
+// so we are ready for day we have more than 1 *rc file within the apex.
+
 prebuilt_etc {
     name: "com.android.media-mediatranscoding.rc",
     src: "mediatranscoding.rc",
-    filename: "init.rc",
+    filename: "mediatranscoding.rc",
+    installable: false,
+}
+
+prebuilt_etc {
+    name: "com.android.media-mediatranscoding.32rc",
+    src: "mediatranscoding.32rc",
+    filename: "mediatranscoding.32rc",
     installable: false,
 }
 
 prebuilt_etc {
     name: "com.android.media.swcodec-mediaswcodec.rc",
     src: "mediaswcodec.rc",
-    filename: "init.rc",
+    filename: "mediaswcodec.rc",
+    installable: false,
+}
+
+prebuilt_etc {
+    name: "com.android.media.swcodec-mediaswcodec.32rc",
+    src: "mediaswcodec.32rc",
+    filename: "mediaswcodec.32rc",
     installable: false,
 }
 
diff --git a/apex/manifest.json b/apex/manifest.json
index 2cf7296..752c2b5 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,6 @@
 {
   "name": "com.android.media",
-  "version": 339999900,
+  "version": 330100000,
   "requireNativeLibs": [
     "libandroid.so",
     "libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index 82463a2..3732a76 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
 {
   "name": "com.android.media.swcodec",
-  "version": 339999900,
+  "version": 330100000,
   "requireNativeLibs": [
     ":sphal"
   ]
diff --git a/apex/mediaswcodec.32rc b/apex/mediaswcodec.32rc
index 79aef36..f40d172 100644
--- a/apex/mediaswcodec.32rc
+++ b/apex/mediaswcodec.32rc
@@ -1,3 +1,5 @@
+##  for SDK releases >= 32
+##
 service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
     class main
     user mediacodec
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index 0c9b8c8..46799c7 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -1,3 +1,6 @@
+##  for SDK releases 29..31
+##  where writepid has not yet been replaced by task_profiles
+##
 service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
     class main
     user mediacodec
diff --git a/apex/mediatranscoding.32rc b/apex/mediatranscoding.32rc
index 5169462..edba9b9 100644
--- a/apex/mediatranscoding.32rc
+++ b/apex/mediatranscoding.32rc
@@ -1,3 +1,6 @@
+##  for SDK releases >= 32
+##
+#
 # media.transcoding service is defined on com.android.media apex which goes back
 # to API29, but we only want it started on API31+ devices. So we declare it as
 # "disabled" and start it explicitly on boot.
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index ae9f8ba..6e453be 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -1,3 +1,7 @@
+##  for SDK releases 29..31
+##  where writepid has not yet been replaced by task_profiles
+##
+#
 # media.transcoding service is defined on com.android.media apex which goes back
 # to API29, but we only want it started on API31+ devices. So we declare it as
 # "disabled" and start it explicitly on boot.
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 2a07ffc..05341bf 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -52,6 +52,12 @@
         return err;
     }
 
+    float maxPreviewFps = 0;
+    if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
+        return err;
+    }
+
     int dataSpace = 0;
     if ((err = parcel->readInt32(&dataSpace)) != OK) {
         ALOGE("%s: Failed to read dataSpace from parcel", __FUNCTION__);
@@ -112,14 +118,14 @@
         return err;
     }
 
-    int dynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
-    if ((err = parcel->readInt32(&dynamicRangeProfile)) != OK) {
+    int64_t dynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    if ((err = parcel->readInt64(&dynamicRangeProfile)) != OK) {
         ALOGE("%s: Failed to read dynamic range profile type from parcel", __FUNCTION__);
         return err;
     }
 
-    int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
-    if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+    int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    if ((err = parcel->readInt64(&streamUseCase)) != OK) {
         ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
         return err;
     }
@@ -127,6 +133,7 @@
     mWidth = width;
     mHeight = height;
     mFormat = format;
+    mMaxPreviewFps = maxPreviewFps;
     mDataSpace = dataSpace;
     mUsage = usage;
     mRequestCount = requestCount;
@@ -166,6 +173,11 @@
         return err;
     }
 
+    if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to write stream maxPreviewFps!", __FUNCTION__);
+        return err;
+    }
+
     if ((err = parcel->writeInt32(mDataSpace)) != OK) {
         ALOGE("%s: Failed to write stream dataSpace!", __FUNCTION__);
         return err;
@@ -216,12 +228,12 @@
         return err;
     }
 
-    if ((err = parcel->writeInt32(mDynamicRangeProfile)) != OK) {
+    if ((err = parcel->writeInt64(mDynamicRangeProfile)) != OK) {
         ALOGE("%s: Failed to write dynamic range profile type", __FUNCTION__);
         return err;
     }
 
-    if ((err = parcel->writeInt32(mStreamUseCase)) != OK) {
+    if ((err = parcel->writeInt64(mStreamUseCase)) != OK) {
         ALOGE("%s: Failed to write stream use case!", __FUNCTION__);
         return err;
     }
@@ -247,6 +259,7 @@
         mApiLevel(0),
         mIsNdk(false),
         mLatencyMs(-1),
+        mMaxPreviewFps(0),
         mSessionType(0),
         mInternalReconfigure(0),
         mRequestCount(0),
@@ -263,6 +276,7 @@
                 mApiLevel(apiLevel),
                 mIsNdk(isNdk),
                 mLatencyMs(latencyMs),
+                mMaxPreviewFps(0),
                 mSessionType(0),
                 mInternalReconfigure(0),
                 mRequestCount(0),
@@ -319,6 +333,12 @@
         return err;
     }
 
+    float maxPreviewFps;
+    if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
+        return err;
+    }
+
     int32_t sessionType;
     if ((err = parcel->readInt32(&sessionType)) != OK) {
         ALOGE("%s: Failed to read session type from parcel", __FUNCTION__);
@@ -362,6 +382,7 @@
     mApiLevel = apiLevel;
     mIsNdk = isNdk;
     mLatencyMs = latencyMs;
+    mMaxPreviewFps = maxPreviewFps;
     mSessionType = sessionType;
     mInternalReconfigure = internalReconfigure;
     mRequestCount = requestCount;
@@ -415,6 +436,11 @@
         return err;
     }
 
+    if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to write maxPreviewFps!", __FUNCTION__);
+        return err;
+    }
+
     if ((err = parcel->writeInt32(mSessionType)) != OK) {
         ALOGE("%s: Failed to write session type!", __FUNCTION__);
         return err;
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 5b8da34..11d4960 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -81,7 +81,7 @@
     return mDynamicRangeProfile;
 }
 
-int OutputConfiguration::getStreamUseCase() const {
+int64_t OutputConfiguration::getStreamUseCase() const {
     return mStreamUseCase;
 }
 
@@ -192,8 +192,8 @@
         return err;
     }
 
-    int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
-    if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+    int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    if ((err = parcel->readInt64(&streamUseCase)) != OK) {
         ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
         return err;
     }
@@ -232,8 +232,8 @@
     mDynamicRangeProfile = dynamicProfile;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
-          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d, timestampBase = %d,"
-          " mirrorMode = %d",
+          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
+          ", timestampBase = %d, mirrorMode = %d",
           __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
           String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
           mMirrorMode);
@@ -317,7 +317,7 @@
     err = parcel->writeInt64(mDynamicRangeProfile);
     if (err != OK) return err;
 
-    err = parcel->writeInt32(mStreamUseCase);
+    err = parcel->writeInt64(mStreamUseCase);
     if (err != OK) return err;
 
     err = parcel->writeInt32(mTimestampBase);
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 26dc70c..15f5622 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -37,6 +37,7 @@
     int mWidth;
     int mHeight;
     int mFormat;
+    float mMaxPreviewFps;
     int mDataSpace;
     int64_t mUsage;
 
@@ -65,20 +66,20 @@
     // Dynamic range profile
     int64_t mDynamicRangeProfile;
     // Stream use case
-    int mStreamUseCase;
+    int64_t mStreamUseCase;
 
     CameraStreamStats() :
-            mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
+            mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
             mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
             mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
             mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
             mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
-    CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
-            int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
+    CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
+            int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
             int streamUseCase)
-            : mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
-              mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
-              mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
+            : mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
+              mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
+              mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
               mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
               mDynamicRangeProfile(dynamicRangeProfile),
               mStreamUseCase(streamUseCase) {}
@@ -123,6 +124,7 @@
     bool mIsNdk;
     // latency in ms for camera open, close, or session creation.
     int mLatencyMs;
+    float mMaxPreviewFps;
 
     // Session info and statistics
     int mSessionType;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 6b0f333..b842885 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -63,7 +63,7 @@
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
     bool                       isMultiResolution() const;
-    int                        getStreamUseCase() const;
+    int64_t                    getStreamUseCase() const;
     int                        getTimestampBase() const;
     int                        getMirrorMode() const;
 
@@ -185,7 +185,7 @@
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
     int64_t                    mDynamicRangeProfile;
-    int                        mStreamUseCase;
+    int64_t                    mStreamUseCase;
     int                        mTimestampBase;
     int                        mMirrorMode;
 };
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 4c492f0..4891034 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -4216,7 +4216,7 @@
     /**
      * <p>The stream use cases supported by this camera device.</p>
      *
-     * <p>Type: int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
+     * <p>Type: int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
      *
      * <p>This tag may appear in:
      * <ul>
@@ -4260,7 +4260,7 @@
      * reprocessable session, constrained high speed session, or RAW stream combinations, the
      * application should leave stream use cases within the session as DEFAULT.</p>
      */
-    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
             ACAMERA_SCALER_START + 25,
     ACAMERA_SCALER_END,
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 4cc1292..85ab0c2 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -330,7 +330,8 @@
                 return ACAMERA_ERROR_UNKNOWN;
     }
 
-    mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfigW);
+    mConfiguredOutputs[streamId] =
+            std::move(std::make_pair(std::move(output->mWindow), std::move(outConfigW)));
 
     return ACAMERA_OK;
 }
@@ -623,7 +624,8 @@
         outConfigInsert.windowHandles[0] = anw;
         outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
         native_handle_ptr_wrapper wrap(anw);
-        outputSet.insert(std::make_pair(anw, outConfigInsertW));
+
+        outputSet.emplace(std::make_pair(std::move(anw), std::move(outConfigInsertW)));
     }
     std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> addSet = outputSet;
     std::vector<int32_t> deleteList;
@@ -680,7 +682,7 @@
     }
 
     // add new streams
-    for (auto outputPair : addSet) {
+    for (const auto &outputPair : addSet) {
         int streamId;
         Status status = Status::UNKNOWN_ERROR;
         auto ret = mRemote->createStream(outputPair.second,
@@ -845,12 +847,32 @@
             return;
         }
 
-        const auto& windowHandles = outputPairIt->second.second.mOutputConfiguration.windowHandles;
-        for (const auto& outHandle : windowHandles) {
-            for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
-                int32_t windowId = streamAndWindowId.windowId;
-                if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
-                    const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
+        // Get the surfaces corresponding to the error stream id, go through
+        // them and try to match the surfaces in the corresponding
+        // CaptureRequest.
+        const auto& errorWindowHandles =
+                outputPairIt->second.second.mOutputConfiguration.windowHandles;
+        for (const auto& errorWindowHandle : errorWindowHandles) {
+            for (const auto &requestStreamAndWindowId :
+                        request->mCaptureRequest.streamAndWindowIds) {
+                // Go through the surfaces in the capture request and see which
+                // ones match the surfaces in the error stream.
+                int32_t requestWindowId = requestStreamAndWindowId.windowId;
+                auto requestSurfacePairIt =
+                        mConfiguredOutputs.find(requestStreamAndWindowId.streamId);
+                if (requestSurfacePairIt == mConfiguredOutputs.end()) {
+                    ALOGE("%s: Error: request stream id %d does not exist", __FUNCTION__,
+                              requestStreamAndWindowId.streamId);
+                    setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
+                    return;
+                }
+
+                const auto &requestWindowHandles =
+                        requestSurfacePairIt->second.second.mOutputConfiguration.windowHandles;
+                if (utils::isWindowNativeHandleEqual(
+                        requestWindowHandles[requestWindowId], errorWindowHandle)) {
+                    const native_handle_t* anw =
+                            requestWindowHandles[requestWindowId].getNativeHandle();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 6f5820e..62779a4 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -109,8 +109,30 @@
         mOutputConfiguration.windowGroupId = -1;
     };
 
-    OutputConfigurationWrapper(OutputConfiguration &outputConfiguration)
-            : mOutputConfiguration((outputConfiguration)) { }
+    OutputConfigurationWrapper(const OutputConfigurationWrapper &other) {
+        *this = other;
+    }
+
+    // Needed to make sure that OutputConfiguration in
+    // OutputConfigurationWrapper, when copied doesn't call hidl_handle's
+    // assignment operator / copy constructor, which will lead to native handle
+    // cloning, which is not what we want for app callbacks which have the native
+    // handle as parameter.
+    OutputConfigurationWrapper &operator=(const OutputConfigurationWrapper &other) {
+        const OutputConfiguration &outputConfiguration = other.mOutputConfiguration;
+        mOutputConfiguration.rotation = outputConfiguration.rotation;
+        mOutputConfiguration.isDeferred = outputConfiguration.isDeferred;
+        mOutputConfiguration.width = outputConfiguration.width;
+        mOutputConfiguration.height = outputConfiguration.height;
+        mOutputConfiguration.windowGroupId = outputConfiguration.windowGroupId;
+        mOutputConfiguration.windowHandles.resize(outputConfiguration.windowHandles.size());
+        mOutputConfiguration.physicalCameraId = outputConfiguration.physicalCameraId;
+        size_t i = 0;
+        for (const auto &handle : outputConfiguration.windowHandles) {
+            mOutputConfiguration.windowHandles[i++] = handle.getNativeHandle();
+        }
+        return *this;
+    }
 
     bool operator ==(const OutputConfiguration &other) const {
         const OutputConfiguration &self = mOutputConfiguration;
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index 3dc62e9..bda664a 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -353,7 +353,9 @@
 
     err = statusAidlToStatusT(statusAidl);
     std::string msgStr(statusAidl.getMessage());
-    *errorDetailMsg = toString8(msgStr);
+    if (errorDetailMsg != nullptr) {
+        *errorDetailMsg = toString8(msgStr);
+    }
     if (err != OK) {
         ALOGE("Failed on decrypt, error description:%s", statusAidl.getDescription().c_str());
         return err;
@@ -415,4 +417,4 @@
 
     return DrmUtils::GetLogMessagesAidl<ICryptoPluginAidl>(mPlugin, logs);
 }
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
index cbb6ddf..a290704 100644
--- a/drm/libmediadrm/CryptoHalHidl.cpp
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -342,7 +342,9 @@
                 [&](Status_V1_2 status, uint32_t hBytesWritten, hidl_string hDetailedError) {
                     if (status == Status_V1_2::OK) {
                         bytesWritten = hBytesWritten;
-                        *errorDetailMsg = toString8(hDetailedError);
+                        if (errorDetailMsg != nullptr) {
+                            *errorDetailMsg = toString8(hDetailedError);
+                        }
                     }
                     err = toStatusT(status);
                 });
@@ -353,7 +355,9 @@
                 [&](Status status, uint32_t hBytesWritten, hidl_string hDetailedError) {
                     if (status == Status::OK) {
                         bytesWritten = hBytesWritten;
-                        *errorDetailMsg = toString8(hDetailedError);
+                        if (errorDetailMsg != nullptr) {
+                            *errorDetailMsg = toString8(hDetailedError);
+                        }
                     }
                     err = toStatusT(status);
                 });
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index aa40793..c394d5a 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -286,4 +286,11 @@
     return mDrmHalHidl->getLogMessages(logs);
 }
 
+status_t DrmHal::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+    status_t statusResult;
+    statusResult = mDrmHalAidl->getSupportedSchemes(schemes);
+    if (statusResult == OK) return statusResult;
+    return mDrmHalHidl->getSupportedSchemes(schemes);
+}
+
 }  // namespace android
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 284abd5..bdd83e9 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -1189,6 +1189,25 @@
     return serializedMetrics;
 }
 
+status_t DrmHalAidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mFactories.empty()) return UNKNOWN_ERROR;
+    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        CryptoSchemes curSchemes{};
+        auto err = mFactories[i]->getSupportedCryptoSchemes(&curSchemes);
+        if (!err.isOk()) {
+            continue;
+        }
+
+        for (auto uuidObj : curSchemes.uuids) {
+            schemes.insert(schemes.end(), uuidObj.uuid.begin(), uuidObj.uuid.end());
+        }
+    }
+
+    return OK;
+}
+
 void DrmHalAidl::cleanup() {
     closeOpenSessions();
 
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c83b52b..c38dbef 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -20,6 +20,7 @@
 #include <aidl/android/media/BnResourceManagerClient.h>
 #include <android/binder_manager.h>
 #include <android/hardware/drm/1.2/types.h>
+#include <android/hardware/drm/1.3/IDrmFactory.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <media/EventMetric.h>
@@ -1514,4 +1515,23 @@
     return DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs);
 }
 
+status_t DrmHalHidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+    Mutex::Autolock autoLock(mLock);
+    for (auto &factory : mFactories) {
+        sp<drm::V1_3::IDrmFactory> factoryV1_3 = drm::V1_3::IDrmFactory::castFrom(factory);
+        if (factoryV1_3 == nullptr) {
+            continue;
+        }
+
+        factoryV1_3->getSupportedCryptoSchemes(
+            [&](const hardware::hidl_vec<hardware::hidl_array<uint8_t, 16>>& schemes_hidl) {
+                for (const auto &scheme : schemes_hidl) {
+                    schemes.insert(schemes.end(), scheme.data(), scheme.data() + scheme.size());
+                }
+            });
+    }
+
+    return OK;
+}
+
 }  // namespace android
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index c06f09b..fd095b7 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -42,7 +42,7 @@
         }
         return type_names[attribute];
     }
-    
+
     static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
                                        "KEY_EXPIRED", "VENDOR_DEFINED",
                                        "SESSION_RECLAIMED"};
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index 731755b..be0cd4b 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -177,7 +177,7 @@
         [](const char* instance, void* context) {
             auto fullName = std::string(IDrmFactoryAidl::descriptor) + "/" + std::string(instance);
             auto factory = IDrmFactoryAidl::fromBinder(
-                    ::ndk::SpAIBinder(AServiceManager_getService(fullName.c_str())));
+                    ::ndk::SpAIBinder(AServiceManager_waitForService(fullName.c_str())));
             if (factory == nullptr) {
                 ALOGE("not found IDrmFactory. Instance name:[%s]", fullName.c_str());
                 return;
diff --git a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
index eabd41f..597b72d 100644
--- a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
+++ b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
@@ -20,6 +20,7 @@
 
 #include <binder/MemoryDealer.h>
 #include <hidlmemory/FrameworkUtils.h>
+#include <media/stagefright/foundation/AString.h>
 #include <mediadrm/CryptoHal.h>
 #include <mediadrm/DrmHal.h>
 #include <utils/String8.h>
@@ -401,7 +402,7 @@
         .secureMemory = nullptr};
 
     const uint64_t offset = 0;
-    AString *errorDetailMsg = nullptr;
+    AString errorDetailMsg;
     CryptoPlugin::Mode mode;
     bool shouldPassRandomCryptoMode = mFuzzedDataProvider->ConsumeBool();
     if (shouldPassRandomCryptoMode) {
@@ -411,7 +412,7 @@
             kCryptoMode[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumCryptoMode - 1)];
     }
     mCrypto->decrypt(keyId, iv, mode, pattern, sourceBuffer, offset, subSamples, numSubSamples,
-                     destBuffer, errorDetailMsg);
+                     destBuffer, &errorDetailMsg);
 
     if (heapSeqNum >= 0) {
         mCrypto->unsetHeap(heapSeqNum);
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index f5e75ac..eab597b 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -117,6 +117,7 @@
             Vector<uint8_t> const &sessionId,
             const char *playbackId);
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
 
 private:
     sp<IDrm> mDrmHalHidl;
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
index e35140e..0f51ce9 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
@@ -105,6 +105,7 @@
                                            bool* required) const;
     virtual status_t setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
 
     ::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
                                  const std::vector<uint8_t>& in_sessionId,
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
index 94ef285..11f0608 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
@@ -184,6 +184,7 @@
             const char *playbackId);
 
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
 
     // Methods of IDrmPluginListener
     Return<void> sendEvent(EventType eventType,
diff --git a/drm/libmediadrm/include/mediadrm/IDrm.h b/drm/libmediadrm/include/mediadrm/IDrm.h
index a88784d..ee2be6a 100644
--- a/drm/libmediadrm/include/mediadrm/IDrm.h
+++ b/drm/libmediadrm/include/mediadrm/IDrm.h
@@ -165,6 +165,8 @@
 
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
 
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const = 0;
+
 protected:
     IDrm() {}
 
diff --git a/drm/mediadrm/plugins/TEST_MAPPING b/drm/mediadrm/plugins/TEST_MAPPING
index fd4ef95..9919e90 100644
--- a/drm/mediadrm/plugins/TEST_MAPPING
+++ b/drm/mediadrm/plugins/TEST_MAPPING
@@ -1,19 +1,10 @@
 {
   "presubmit": [
     {
-      "name": "CtsMediaDrmTestCases",
+      "name": "CtsMediaDrmFrameworkTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "include-filter": "android.mediadrm.cts.MediaDrmClearkeyTest"
-        },
-        {
-          "include-filter": "android.mediadrm.cts.MediaDrmMetricsTest"
-        },
-        {
-          "include-filter": "android.mediadrm.cts.NativeMediaDrmClearkeyTest"
         }
       ]
     }
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 7331ded..ea51e9d 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -28,6 +28,7 @@
 #include "DrmPlugin.h"
 #include "Session.h"
 #include "Utils.h"
+#include "AidlClearKeryProperties.h"
 
 namespace {
 const std::string kKeySetIdPrefix("ckid");
@@ -81,12 +82,13 @@
 
 void DrmPlugin::initProperties() {
     mStringProperties.clear();
-    mStringProperties[kVendorKey] = kVendorValue;
-    mStringProperties[kVersionKey] = kVersionValue;
-    mStringProperties[kPluginDescriptionKey] = kPluginDescriptionValue;
-    mStringProperties[kAlgorithmsKey] = kAlgorithmsValue;
-    mStringProperties[kListenerTestSupportKey] = kListenerTestSupportValue;
-    mStringProperties[kDrmErrorTestKey] = kDrmErrorTestValue;
+    mStringProperties[kVendorKey] = kAidlVendorValue;
+    mStringProperties[kVersionKey] = kAidlVersionValue;
+    mStringProperties[kPluginDescriptionKey] = kAidlPluginDescriptionValue;
+    mStringProperties[kAlgorithmsKey] = kAidlAlgorithmsValue;
+    mStringProperties[kListenerTestSupportKey] = kAidlListenerTestSupportValue;
+    mStringProperties[kDrmErrorTestKey] = kAidlDrmErrorTestValue;
+    mStringProperties[kAidlVersionKey] = kAidlVersionValue;
 
     std::vector<uint8_t> valueVector;
     valueVector.clear();
@@ -377,6 +379,8 @@
         value = mStringProperties[kListenerTestSupportKey];
     } else if (name == kDrmErrorTestKey) {
         value = mStringProperties[kDrmErrorTestKey];
+    } else if (name == kAidlVersionKey) {
+        value = mStringProperties[kAidlVersionKey];
     } else {
         ALOGE("App requested unknown string property %s", name.c_str());
         status = Status::ERROR_DRM_CANNOT_HANDLE;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
index 019c726..c87aabc 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
@@ -1,9 +1,9 @@
-service vendor.drm-clearkey-service /vendor/bin/hw/android.hardware.drm-service.clearkey
+service vendor.drm-clearkey-service /vendor/bin/hw/android.hardware.drm-service-lazy.clearkey
+    oneshot
     disabled
     class hal
     user media
     group mediadrm drmrpc
     ioprio rt 4
     task_profiles ProcessCapacityHigh
-    interface aidl android.hardware.drm.IDrmFactory/clearkey
-    interface aidl android.hardware.drm.ICryptoFactory/clearkey
+    interface aidl android.hardware.drm.IDrmFactory/clearkey
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h b/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
new file mode 100644
index 0000000..fb2cceb
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef AIDL_CLEARKEY_PROPERTIES_H
+#define AIDL_CLEARKEY_PROPERTIES_H
+#include <string>
+
+namespace clearkeydrm {
+static const std::string kAidlVendorValue("Google");
+static const std::string kAidlVersionValue("aidl-1");
+static const std::string kAidlPluginDescriptionValue("ClearKey CDM");
+static const std::string kAidlAlgorithmsValue("");
+static const std::string kAidlListenerTestSupportValue("true");
+
+static const std::string kAidlDrmErrorTestValue("");
+static const std::string kAidlResourceContentionValue("resourceContention");
+static const std::string kAidlLostStateValue("lostState");
+static const std::string kAidlFrameTooLargeValue("frameTooLarge");
+static const std::string kAidlInvalidStateValue("invalidState");
+}  // namespace clearkeydrm
+
+#endif
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
index 9a22633..bfda388 100644
--- a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
@@ -34,6 +34,7 @@
 static const std::string kLostStateValue("lostState");
 static const std::string kFrameTooLargeValue("frameTooLarge");
 static const std::string kInvalidStateValue("invalidState");
+static const std::string kAidlVersionKey("aidlVersion");
 
 static const std::string kDeviceIdKey("deviceId");
 static const uint8_t kTestDeviceIdData[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 16e794a..97e0b1d 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -38,13 +38,13 @@
     VideoFrame(uint32_t width, uint32_t height,
             uint32_t displayWidth, uint32_t displayHeight,
             uint32_t tileWidth, uint32_t tileHeight,
-            uint32_t angle, uint32_t bpp, bool hasData, size_t iccSize):
+            uint32_t angle, uint32_t bpp, uint32_t bitDepth, bool hasData, size_t iccSize):
         mWidth(width), mHeight(height),
         mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
         mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
         mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
         mSize(hasData ? (bpp * width * height) : 0),
-        mIccSize(iccSize), mReserved(0) {
+        mIccSize(iccSize), mBitDepth(bitDepth) {
     }
 
     void init(const VideoFrame& copy, const void* iccData, size_t iccSize) {
@@ -84,7 +84,7 @@
     uint32_t mRowBytes;        // Number of bytes per row before rotation
     uint32_t mSize;            // Number of bytes of frame data
     uint32_t mIccSize;         // Number of bytes of ICC data
-    uint32_t mReserved;        // (padding to make mData 64-bit aligned)
+    uint32_t mBitDepth;        // number of bits per R / G / B channel
 };
 
 }; // namespace android
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index c4a6601..0bd0d88 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -7,11 +7,9 @@
     ioprio rt 4
     task_profiles ProcessCapacityHigh HighPerformance
     onrestart restart vendor.audio-hal
+    onrestart restart vendor.audio-hal-aidl
     onrestart restart vendor.audio-hal-4-0-msd
     onrestart restart audio_proxy_service
-    # Keep the original service names for backward compatibility
-    onrestart restart vendor.audio-hal-2-0
-    onrestart restart audio-hal-2-0
 
 on property:vts.native_server.on=1
     stop audioserver
@@ -20,42 +18,34 @@
 
 on property:init.svc.audioserver=stopped
     stop vendor.audio-hal
+    stop vendor.audio-hal-aidl
     stop vendor.audio-hal-4-0-msd
     stop audio_proxy_service
-    # Keep the original service names for backward compatibility
-    stop vendor.audio-hal-2-0
-    stop audio-hal-2-0
     # See b/155364397. Need to have HAL service running for VTS.
     # Can't use 'restart' because then HAL service would restart
     # audioserver bringing it back into running state.
     start vendor.audio-hal
+    start vendor.audio-hal-aidl
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
-    # Keep the original service names for backward compatibility
-    start vendor.audio-hal-2-0
-    start audio-hal-2-0
 
 on property:init.svc.audioserver=running
     start vendor.audio-hal
+    start vendor.audio-hal-aidl
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
-    # Keep the original service names for backward compatibility
-    start vendor.audio-hal-2-0
-    start audio-hal-2-0
 
 on property:sys.audio.restart.hal=1
     # See b/159966243. Avoid restart loop between audioserver and HAL.
     # Keep the original service names for backward compatibility
     stop vendor.audio-hal
+    stop vendor.audio-hal-aidl
     stop vendor.audio-hal-4-0-msd
     stop audio_proxy_service
-    stop vendor.audio-hal-2-0
-    stop audio-hal-2-0
     start vendor.audio-hal
+    start vendor.audio-hal-aidl
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
-    start vendor.audio-hal-2-0
-    start audio-hal-2-0
     # reset the property
     setprop sys.audio.restart.hal 0
 
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index bb63e1f..7afea91 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -225,7 +225,7 @@
         work->result = C2_CORRUPTED;
         return;
     }
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
@@ -266,7 +266,7 @@
     ALOGV("causal sample size %d", mFilledLen);
     if (mIsFirst && outPos != 0) {
         mIsFirst = false;
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
     }
     fillEmptyWork(work);
     if (outPos != 0) {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
index 6ab14db..4920b23 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
@@ -54,7 +54,7 @@
     bool mIsFirst;
     bool mSignalledError;
     bool mSignalledOutputEos;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     int32_t mFilledLen;
     int16_t mInputFrame[kNumSamplesPerFrame];
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 84728ae..29b1040 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -307,7 +307,7 @@
         work->result = wView.error();
         return;
     }
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
@@ -341,7 +341,7 @@
     ALOGV("causal sample size %d", mFilledLen);
     if (mIsFirst && outPos != 0) {
         mIsFirst = false;
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
     }
     fillEmptyWork(work);
     if (outPos != 0) {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
index 0cc9e9f..72990c3 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
@@ -55,7 +55,7 @@
     bool mIsFirst;
     bool mSignalledError;
     bool mSignalledOutputEos;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     int32_t mFilledLen;
     int16_t mInputFrame[kNumSamplesPerFrame];
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index 7f82486..a7ae85b 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -18,6 +18,8 @@
     static_libs: ["libavcdec"],
 
     srcs: ["C2SoftAvcDec.cpp"],
+
+    export_include_dirs: ["."],
 }
 
 cc_library {
@@ -32,6 +34,8 @@
 
     srcs: ["C2SoftAvcEnc.cpp"],
 
+    export_include_dirs: ["."],
+
     cflags: [
         "-Wno-unused-variable",
     ],
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index f1669fd..8c7f8db 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -9,6 +9,16 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_library_headers {
+    name: "libcodec2_soft_common_headers",
+    defaults: ["libcodec2-impl-defaults"],
+    vendor_available: true,
+
+    export_include_dirs: [
+        "include",
+    ],
+}
+
 cc_library {
     name: "libcodec2_soft_common",
     defaults: ["libcodec2-impl-defaults"],
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index b7a5686..4f5caec 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -123,7 +123,7 @@
         // matches size limits in codec library
         addParameter(
             DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-                .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
                 .withFields({
                     C2F(mSize, width).inRange(2, 1920, 2),
                     C2F(mSize, height).inRange(2, 1088, 2),
@@ -133,7 +133,7 @@
 
         addParameter(
             DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-                .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+                .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
                 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
                 .withSetter(
                     Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
index 8924e6d..7994d32 100644
--- a/media/codec2/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -520,6 +520,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : component{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = component.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> component;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 7937664..d0972ee 100644
--- a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -142,6 +142,10 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
+
 };
 
 } // namespace utils
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index def8a18..2b9ec7d 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -212,9 +212,8 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        mSource->configure(
-                mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
-        return OK;
+        return GetStatus(mSource->configure(
+                mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace)));
     }
 
     void disconnect() override {
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index dd37c4b..c15b5ca 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1896,7 +1896,9 @@
     names->clear();
     // TODO: expand to standard params
     for (const auto &[key, desc] : mVendorParams) {
-        names->push_back(key);
+        if (desc->isVisible()) {
+            names->push_back(key);
+        }
     }
     return OK;
 }
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 63bd64b..2b8a160 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -281,6 +281,11 @@
             }
         };
 
+        // The color format is ordered by preference. The intention here is to advertise:
+        //   c2.android.* codecs: YUV420s, Surface, <the rest>
+        //   all other codecs:    Surface, YUV420s, <the rest>
+        // TODO: get this preference via Codec2 API
+
         // vendor video codecs prefer opaque format
         if (trait.name.find("android") == std::string::npos) {
             addDefaultColorFormat(COLOR_FormatSurface);
@@ -290,9 +295,8 @@
         addDefaultColorFormat(COLOR_FormatYUV420SemiPlanar);
         addDefaultColorFormat(COLOR_FormatYUV420PackedPlanar);
         addDefaultColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
-        // framework video encoders must support surface format, though it is unclear
-        // that they will be able to map it if it is opaque
-        if (encoder && trait.name.find("android") != std::string::npos) {
+        // Android video codecs prefer CPU-readable formats
+        if (trait.name.find("android") != std::string::npos) {
             addDefaultColorFormat(COLOR_FormatSurface);
         }
         for (int32_t colorFormat : supportedColorFormats) {
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 674921e..fe63651 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -7,6 +7,17 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_library_headers {
+    name: "libsfplugin_ccodec_utils_headers",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [ "//apex_available:platform", "com.android.media.swcodec", ],
+
+    export_include_dirs: [
+        ".",
+    ],
+}
+
 cc_library {
     name: "libsfplugin_ccodec_utils",
     vendor_available: true,
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 598500d..1d8aea3 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -78,6 +78,7 @@
 
     export_shared_lib_headers: [
         "libbase",
+        "libdmabufheap",
         "android.hardware.media.bufferpool@2.0",
     ],
 
@@ -100,7 +101,6 @@
         "libdmabufheap",
         "libfmq",
         "libgralloctypes",
-        "libhardware",
         "libhidlbase",
         "libion",
         "liblog",
@@ -149,7 +149,6 @@
     shared_libs: [
         "libui",
         "libdl",
-        "libhardware",
         "libvndksupport",
         "libprocessgroup",
     ],
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index d8d6f06..bc4053d 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -261,7 +261,7 @@
     for (const ui::PlaneLayout &plane : planes) {
         layout->rootPlanes++;
         uint32_t lastOffsetInBits = 0;
-        uint32_t rootIx = 0;
+        uint32_t rootIx = layout->numPlanes;
 
         for (const PlaneLayoutComponent &component : plane.components) {
             if (!gralloc4::isStandardPlaneLayoutComponentType(component.type)) {
@@ -309,7 +309,6 @@
 
             layout->numPlanes++;
             lastOffsetInBits = component.offsetInBits + component.sizeInBits;
-            rootIx++;
         }
     }
     return C2_OK;
@@ -699,17 +698,6 @@
                 C2PlanarLayout::PLANE_V,          // rootIx
                 0,                                // offset
             };
-            // handle interleaved formats
-            intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
-            if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
-                layout->rootPlanes = 2;
-                layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
-                layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
-            } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
-                layout->rootPlanes = 2;
-                layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
-                layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
-            }
             break;
         }
 
@@ -830,17 +818,6 @@
                     C2PlanarLayout::PLANE_V,          // rootIx
                     0,                                // offset
                 };
-                // handle interleaved formats
-                intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
-                if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
-                    layout->rootPlanes = 2;
-                    layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
-                    layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
-                } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
-                    layout->rootPlanes = 2;
-                    layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
-                    layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
-                }
                 break;
             }
 
@@ -886,6 +863,29 @@
     }
     mLocked = true;
 
+    // handle interleaved formats
+    if (layout->type == C2PlanarLayout::TYPE_YUV && layout->rootPlanes == 3) {
+        intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
+        intptr_t uvColInc = layout->planes[C2PlanarLayout::PLANE_U].colInc;
+        if (uvOffset > 0 && uvOffset < uvColInc) {
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
+            layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
+        } else if (uvOffset < 0 && uvOffset > -uvColInc) {
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
+            layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
+        }
+    }
+
+    ALOGV("C2AllocationGralloc::map: layout: type=%d numPlanes=%d rootPlanes=%d",
+          layout->type, layout->numPlanes, layout->rootPlanes);
+    for (int i = 0; i < layout->numPlanes; ++i) {
+        const C2PlaneInfo &plane = layout->planes[i];
+        ALOGV("C2AllocationGralloc::map: plane[%d]: colInc=%d rowInc=%d rootIx=%u offset=%u",
+              i, plane.colInc, plane.rowInc, plane.rootIx, plane.offset);
+    }
+
     return C2_OK;
 }
 
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index e55bdc0..2115cc3 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -228,10 +228,10 @@
     tv.tv_nsec = timeoutNs % 1000000000;
 
     int ret =  syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
-    if (ret == 0 || ret == EAGAIN) {
+    if (ret == 0 || errno == EAGAIN) {
         return C2_OK;
     }
-    if (ret == EINTR || ret == ETIMEDOUT) {
+    if (errno == EINTR || errno == ETIMEDOUT) {
         return C2_TIMED_OUT;
     }
     return C2_BAD_VALUE;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index fb935b6..eccbf46 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1127,15 +1127,15 @@
                     void *data;
                     size_t size;
 
-                    if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+                    if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
                                                &data, &size)
-                        && size >= 24) {
-                        const uint8_t *ptr = (const uint8_t *)data + (size - 24);
+                        && size >= 5) {
+                        const uint8_t *ptr = (const uint8_t *)data;
                         const uint8_t profile = ptr[2] >> 1;
-                        const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
+                        const uint8_t blCompatibilityId = (ptr[4]) >> 4;
                         bool create_two_tracks = false;
 
-                        if (bl_compatibility_id && bl_compatibility_id != 15) {
+                        if (blCompatibilityId && blCompatibilityId != 15) {
                             create_two_tracks = true;
                         }
 
@@ -1168,11 +1168,11 @@
                             mLastTrack->next = track_b;
                             track_b->next = NULL;
 
-                            // we want to remove the csd-0 key from the metadata, but
+                            // we want to remove the csd-2 key from the metadata, but
                             // don't have an AMediaFormat_* function to do so. Settle
-                            // for replacing this csd-0 with an empty csd-0.
+                            // for replacing this csd-2 with an empty csd-2.
                             uint8_t emptybuffer[8] = {};
-                            AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_0,
+                            AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_2,
                                                    emptybuffer, 0);
 
                             if (4 == profile || 7 == profile || 8 == profile ) {
@@ -1184,8 +1184,6 @@
                             } else if (10 == profile) {
                                 AMediaFormat_setString(track_b->meta,
                                         AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
-                                AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_0,
-                                    data, size - 24);
                             } // Should never get to else part
 
                             mLastTrack = track_b;
@@ -2618,22 +2616,8 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            void *data = nullptr;
-            size_t size = 0;
-            if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
-                //if csd-0 is already present, then append dvcc
-                auto csd0_dvcc = heapbuffer<uint8_t>(size + chunk_data_size);
-
-                memcpy(csd0_dvcc.get(), data, size);
-                memcpy(csd0_dvcc.get() + size, buffer.get(), chunk_data_size);
-
-                AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
-                                    csd0_dvcc.get(), size + chunk_data_size);
-            } else {
-                //if not set csd-0 directly
-                AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+            AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
                                     buffer.get(), chunk_data_size);
-            }
             AMediaFormat_setString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME,
                                    MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
 
@@ -3501,7 +3485,7 @@
         }
         unsigned mask = br.getBits(8);
         for (unsigned i = 0; i < 8; i++) {
-            if (((0x1 << i) && mask) == 0)
+            if (((0x1 << i) & mask) == 0)
                 continue;
 
             if (br.numBitsLeft() < 8) {
@@ -4511,12 +4495,12 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         void *data;
         size_t size;
-        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)
-                || size < 24) {
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)
+                || size != 24) {
             return NULL;
         }
 
-        const uint8_t *ptr = (const uint8_t *)data + (size - 24);
+        const uint8_t *ptr = (const uint8_t *)data;
         // dv_major.dv_minor Should be 1.0 or 2.1
         if ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)) {
             return NULL;
@@ -4596,7 +4580,7 @@
             return ERROR_MALFORMED;
         }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
-        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
             return ERROR_MALFORMED;
         }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
@@ -5172,11 +5156,11 @@
         ALOGV("%s DolbyVision stream detected", __FUNCTION__);
         void *data;
         size_t size;
-        CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_0, &data, &size));
+        CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_2, &data, &size));
 
-        const uint8_t *ptr = (const uint8_t *)data + (size - 24);
+        const uint8_t *ptr = (const uint8_t *)data;
 
-        CHECK(size >= 24);
+        CHECK(size == 24);
 
         // dv_major.dv_minor Should be 1.0 or 2.1
         CHECK(!((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)));
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 8faecae..aa59a0c 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -65,6 +65,7 @@
         "libhidlbase",
         "libhidlmemory",
         "libjsoncpp",
+        "libmedia_helper",
         "libprocessgroup",
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
diff --git a/media/janitors/media_leads_OWNERS b/media/janitors/media_leads_OWNERS
new file mode 100644
index 0000000..b7dbdee
--- /dev/null
+++ b/media/janitors/media_leads_OWNERS
@@ -0,0 +1,9 @@
+# gerrit owner/approvers corresponding to the TLs within the media team
+# loosely (as of 2022/3) fgoldfain@ and direct reports
+arifdikici@google.com
+elaurent@google.com
+fgoldfain@google.com    #{LAST_RESORT_SUGGESTION}
+lajos@google.com
+nchalko@google.com
+olly@google.com
+robertshih@google.com
diff --git a/media/libaaudio/TEST_MAPPING b/media/libaaudio/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/media/libaaudio/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 26a7dc5..e3771f3 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -778,8 +778,16 @@
         __INTRODUCED_IN(26);
 
 /**
- * Request an audio device identified device using an ID.
- * On Android, for example, the ID could be obtained from the Java AudioManager.
+ * Request an audio device identified by an ID.
+ *
+ * The ID could be obtained from the Java AudioManager.
+ * AudioManager.getDevices() returns an array of {@link AudioDeviceInfo},
+ * which contains a getId() method. That ID can be passed to this function.
+ *
+ * It is possible that you may not get the device that you requested.
+ * So if it is important to you, you should call
+ * AAudioStream_getDeviceId() after the stream is opened to
+ * verify the actual ID.
  *
  * The default, if you do not call this function, is {@link #AAUDIO_UNSPECIFIED},
  * in which case the primary device will be used.
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 0f2d7a2..edda6d0 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -91,6 +91,13 @@
  */
 AAUDIO_API bool AAudioStream_isMMapUsed(AAudioStream* stream);
 
+/**
+ * Returns the count of audio server death.
+ *
+ * @return audio server death count.
+ */
+AAUDIO_API int AAudio_getAudioServerDeathCount();
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 9f0564f..34ecd25 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -605,13 +605,6 @@
     return AAUDIO_ERROR_INVALID_STATE;
 }
 
-aaudio_result_t AudioStreamInternal::updateStateMachine() {
-    if (isDataCallbackActive()) {
-        return AAUDIO_OK; // state is getting updated by the callback thread read/write call
-    }
-    return processCommands();
-}
-
 void AudioStreamInternal::logTimestamp(AAudioServiceMessage &command) {
     static int64_t oldPosition = 0;
     static int64_t oldTime = 0;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 2367572..4ea61d2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -48,7 +48,7 @@
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) override;
 
-    virtual aaudio_result_t updateStateMachine() override;
+    virtual aaudio_result_t processCommands() override;
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
@@ -110,8 +110,6 @@
 
     aaudio_result_t drainTimestampsFromService();
 
-    aaudio_result_t processCommands();
-
     aaudio_result_t stopCallback_l();
 
     virtual void prepareBuffersForStart() {}
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 90ff4a5..b0504c9 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,6 +25,7 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
+#include <media/AudioSystem.h>
 #include "AudioClock.h"
 #include "AudioGlobal.h"
 #include "AudioStreamBuilder.h"
@@ -604,3 +605,7 @@
     // Do not return channel index masks as they are not public.
     return AAudio_isChannelIndexMask(channelMask) ? AAUDIO_UNSPECIFIED : channelMask;
 }
+
+AAUDIO_API int AAudio_getAudioServerDeathCount() {
+    return android::AudioSystem::getAudioFlingerDeathCount();
+}
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 06f05b0..8a5186a 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -21,7 +21,9 @@
 #include <atomic>
 #include <stdint.h>
 
+#include <linux/futex.h>
 #include <media/MediaMetricsItem.h>
+#include <sys/syscall.h>
 
 #include <aaudio/AAudio.h>
 
@@ -362,34 +364,37 @@
 }
 
 void AudioStream::setState(aaudio_stream_state_t state) {
-    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
-    if (state == mState) {
+    aaudio_stream_state_t oldState = mState.load();
+    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), oldState, state);
+    if (state == oldState) {
         return; // no change
     }
     // Track transition to DISCONNECTED state.
     if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
-                .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
+                .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(oldState))
                 .record();
     }
     // CLOSED is a final state
-    if (mState == AAUDIO_STREAM_STATE_CLOSED) {
+    if (oldState == AAUDIO_STREAM_STATE_CLOSED) {
         ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
-    } else if (mState == AAUDIO_STREAM_STATE_CLOSING
+    } else if (oldState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
         ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
     // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
-    } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
+    } else if (oldState == AAUDIO_STREAM_STATE_DISCONNECTED
                && !(state == AAUDIO_STREAM_STATE_CLOSING
                    || state == AAUDIO_STREAM_STATE_CLOSED)) {
         ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
 
     } else {
-        mState = state;
+        mState.store(state);
+        // Wake up a wakeForStateChange thread if it exists.
+        syscall(SYS_futex, &mState, FUTEX_WAKE_PRIVATE, INT_MAX, NULL, NULL, 0);
     }
 }
 
@@ -408,9 +413,15 @@
         if (durationNanos > timeoutNanoseconds) {
             durationNanos = timeoutNanoseconds;
         }
-        AudioClock::sleepForNanos(durationNanos);
-        timeoutNanoseconds -= durationNanos;
+        struct timespec time;
+        time.tv_sec = durationNanos / AAUDIO_NANOS_PER_SECOND;
+        // Add the fractional nanoseconds.
+        time.tv_nsec = durationNanos - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
 
+        // Sleep for durationNanos. If mState changes from the callback
+        // thread, this thread will wake up earlier.
+        syscall(SYS_futex, &mState, FUTEX_WAIT_PRIVATE, currentState, &time, NULL, 0);
+        timeoutNanoseconds -= durationNanos;
         aaudio_result_t result = updateStateMachine();
         if (result != AAUDIO_OK) {
             return result;
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 5fb4528..8dd5538 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -100,10 +100,17 @@
                                        int64_t *timeNanoseconds) = 0;
 
     /**
-     * Update state machine.()
-     * @return
+     * Update state machine.
+     * @return result of the operation.
      */
-    virtual aaudio_result_t updateStateMachine() = 0;
+    aaudio_result_t updateStateMachine() {
+        if (isDataCallbackActive()) {
+            return AAUDIO_OK; // state is getting updated by the callback thread read/write call
+        }
+        return processCommands();
+    };
+
+    virtual aaudio_result_t processCommands() = 0;
 
     // =========== End ABSTRACT methods ===========================
 
@@ -184,7 +191,7 @@
     // ============== Queries ===========================
 
     aaudio_stream_state_t getState() const {
-        return mState;
+        return mState.load();
     }
 
     virtual int32_t getBufferSize() const {
@@ -674,6 +681,8 @@
 
     const android::sp<MyPlayerBase>   mPlayerBase;
 
+    std::atomic<aaudio_stream_state_t>          mState{AAUDIO_STREAM_STATE_UNINITIALIZED};
+
     // These do not change after open().
     int32_t                     mSamplesPerFrame = AAUDIO_UNSPECIFIED;
     aaudio_channel_mask_t       mChannelMask = AAUDIO_UNSPECIFIED;
@@ -682,7 +691,6 @@
     aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
     bool                        mSharingModeMatchRequired = false; // must match sharing mode requested
     audio_format_t              mFormat = AUDIO_FORMAT_DEFAULT;
-    aaudio_stream_state_t       mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
     int32_t                     mFramesPerBurst = 0;
     int32_t                     mBufferCapacity = 0;
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dd11169..f32ef65 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -127,7 +127,7 @@
             mCallbackEnabled.store(false);
         }
 
-        if (updateStateMachine() != AAUDIO_OK) {
+        if (processCommands() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
         }
@@ -192,7 +192,7 @@
             mCallbackEnabled.store(false);
         }
 
-        if (updateStateMachine() != AAUDIO_OK) {
+        if (processCommands() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
         }
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index ed31ec9..9a136a7 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -96,29 +96,8 @@
         setFormat(AUDIO_FORMAT_PCM_FLOAT);
     }
 
-    // Maybe change device format to get a FAST path.
-    // AudioRecord does not support FAST mode for FLOAT data.
-    // TODO AudioRecord should allow FLOAT data paths for FAST tracks.
-    // So IF the user asks for low latency FLOAT
-    // AND the sampleRate is likely to be compatible with FAST
-    // THEN request I16 and convert to FLOAT when passing to user.
-    // Note that hard coding 48000 Hz is not ideal because the sampleRate
-    // for a FAST path might not be 48000 Hz.
-    // It normally is but there is a chance that it is not.
-    // And there is no reliable way to know that in advance.
-    // Luckily the consequences of a wrong guess are minor.
-    // We just may not get a FAST track.
-    // But we wouldn't have anyway without this hack.
-    constexpr int32_t kMostLikelySampleRateForFast = 48000;
-    if (getFormat() == AUDIO_FORMAT_PCM_FLOAT
-            && perfMode == AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
-            && (audio_channel_count_from_in_mask(channelMask) <= 2) // FAST only for mono and stereo
-            && (getSampleRate() == kMostLikelySampleRateForFast
-                || getSampleRate() == AAUDIO_UNSPECIFIED)) {
-        setDeviceFormat(AUDIO_FORMAT_PCM_16_BIT);
-    } else {
-        setDeviceFormat(getFormat());
-    }
+
+    setDeviceFormat(getFormat());
 
     // To avoid glitching, let AudioFlinger pick the optimal burst size.
     uint32_t notificationFrames = 0;
@@ -385,8 +364,7 @@
     return checkForDisconnectRequest(false);
 }
 
-aaudio_result_t AudioStreamRecord::updateStateMachine()
-{
+aaudio_result_t AudioStreamRecord::processCommands() {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_wrapping_frames_t position;
     status_t err;
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 5ce73f9..252ff3c 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -58,7 +58,7 @@
 
     int64_t getFramesWritten() override;
 
-    aaudio_result_t updateStateMachine() override;
+    aaudio_result_t processCommands() override;
 
     aaudio_direction_t getDirection() const override {
         return AAUDIO_DIRECTION_INPUT;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 6f1dc92..09caa5c 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -378,8 +378,7 @@
     return checkForDisconnectRequest(false);;
 }
 
-aaudio_result_t AudioStreamTrack::updateStateMachine()
-{
+aaudio_result_t AudioStreamTrack::processCommands() {
     status_t err;
     aaudio_wrapping_frames_t position;
     switch (getState()) {
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 0f4d72b..1f877b5 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -79,7 +79,7 @@
         return AAUDIO_DIRECTION_OUTPUT;
     }
 
-    aaudio_result_t updateStateMachine() override;
+    aaudio_result_t processCommands() override;
 
     int64_t incrementClientFrameCounter(int32_t frames) override {
         return incrementFramesWritten(frames);
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index f45b816..f0cb595 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -3,6 +3,7 @@
     AAudio_convertResultToText;
     AAudio_convertStreamStateToText;
     AAudio_createStreamBuilder;
+    AAudio_getAudioServerDeathCount;    # introduced=33
     AAudio_getMMapPolicy;
     AAudio_setMMapPolicy;
     AAudioStreamBuilder_setPerformanceMode;
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 0792fc5..e322791 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -37,22 +37,69 @@
 
 constexpr int kBytesPerI24Packed = 3;
 
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+void local_convert_float_to_int16(const float *input,
+                                  int16_t *output,
+                                  int count) {
+    for (int i = 0; i < count; i++) {
+        int32_t n = (int32_t) (*input++ * 32768.0f);
+        *output++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
+    }
+}
+
+TEST(test_flowgraph, local_convert_float_int16) {
+    static constexpr int kNumSamples = 8;
+    static constexpr std::array<float, kNumSamples> input = {
+        1.0f, 0.5f, -0.25f, -1.0f,
+        0.0f, 53.9f, -87.2f, -1.02f};
+    static constexpr std::array<int16_t, kNumSamples>  expected = {
+        32767, 16384, -8192, -32768,
+        0, 32767, -32768, -32768};
+    std::array<int16_t, kNumSamples> output;
+
+    // Do it inline, which will probably work even with the buggy compiler.
+    // This validates the expected data.
+    const float *in = input.data();
+    int16_t *out = output.data();
+    output.fill(777);
+    for (int i = 0; i < kNumSamples; i++) {
+        int32_t n = (int32_t) (*in++ * 32768.0f);
+        *out++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
+    }
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+    }
+
+    // Convert audio signal using the function.
+    output.fill(777);
+    local_convert_float_to_int16(input.data(), output.data(), kNumSamples);
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
 TEST(test_flowgraph, module_sinki16) {
-    static const float input[] = {1.0f, 0.5f, -0.25f, -1.0f, 0.0f, 53.9f, -87.2f};
-    static const int16_t expected[] = {32767, 16384, -8192, -32768, 0, 32767, -32768};
-    int16_t output[20];
+    static constexpr int kNumSamples = 8;
+    static constexpr std::array<float, kNumSamples> input = {
+        1.0f, 0.5f, -0.25f, -1.0f,
+        0.0f, 53.9f, -87.2f, -1.02f};
+    static constexpr std::array<int16_t, kNumSamples>  expected = {
+        32767, 16384, -8192, -32768,
+        0, 32767, -32768, -32768};
+    std::array<int16_t, kNumSamples + 10> output; // larger than input
+
     SourceFloat sourceFloat{1};
     SinkI16 sinkI16{1};
 
-    int numInputFrames = sizeof(input) / sizeof(input[0]);
-    sourceFloat.setData(input, numInputFrames);
+    sourceFloat.setData(input.data(), kNumSamples);
     sourceFloat.output.connect(&sinkI16.input);
 
-    int numOutputFrames = sizeof(output) / sizeof(int16_t);
-    int32_t numRead = sinkI16.read(output, numOutputFrames);
-    ASSERT_EQ(numInputFrames, numRead);
+    output.fill(777);
+    int32_t numRead = sinkI16.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples, numRead);
     for (int i = 0; i < numRead; i++) {
-        EXPECT_EQ(expected[i], output[i]);
+        EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
     }
 }
 
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index 11724e0..b0c9a0c 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -1932,7 +1932,7 @@
         case media::AudioPortType::SESSION:
             legacy.session = VALUE_OR_RETURN(
                     aidl2legacy_int32_t_audio_port_config_session_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, session))));
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, session))));
             return legacy;
 
     }
@@ -1966,9 +1966,9 @@
             return OK;
         }
         case AUDIO_PORT_TYPE_SESSION:
-            UNION_SET(*aidl, session, VALUE_OR_RETURN_STATUS(
+            UNION_SET(*aidl, unspecified, false);
+            UNION_SET(*aidlSys, session, VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_port_config_session_ext_int32_t(legacy.session)));
-            UNION_SET(*aidlSys, unspecified, false);
             return OK;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
@@ -2816,7 +2816,7 @@
         case media::AudioPortType::SESSION:
             legacy.session = VALUE_OR_RETURN(
                     aidl2legacy_int32_t_audio_port_session_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, session))));
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, session))));
             return legacy;
 
     }
@@ -2852,9 +2852,9 @@
             return OK;
         }
         case AUDIO_PORT_TYPE_SESSION:
-            UNION_SET(*aidl, session, VALUE_OR_RETURN_STATUS(
+            UNION_SET(*aidl, unspecified, false);
+            UNION_SET(*aidlSys, session, VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_port_session_ext_int32_t(legacy.session)));
-            UNION_SET(*aidlSys, unspecified, false);
             return OK;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index a5fb394..0871365 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -307,6 +307,8 @@
         int32_t maxSharedAudioHistoryMs)
 {
     status_t status = NO_ERROR;
+    LOG_ALWAYS_FATAL_IF(mInitialized, "%s: should not be called twice", __func__);
+    mInitialized = true;
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "notificationFrames %u, sessionId %d, transferType %d, flags %#x, attributionSource %s"
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index a7b10b2..fe6a24a 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "AudioSystem"
 //#define LOG_NDEBUG 0
 
+#include <atomic>
+
 #include <utils/Log.h>
 
 #include <android/media/IAudioPolicyService.h>
@@ -82,6 +84,8 @@
 // HotwordDetectionService.
 sp<IBinder> gAudioFlingerBinder = nullptr;
 
+std::atomic<int> gAudioFlingerDeathCount{0};
+
 void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
     if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
         ALOGE("setAudioFlingerBinder: received a binder of type %s",
@@ -535,6 +539,8 @@
 
     reportError(DEAD_OBJECT);
 
+    gAudioFlingerDeathCount += 1;
+
     ALOGW("AudioFlinger server died!");
 }
 
@@ -2410,6 +2416,10 @@
     return af->getAAudioHardwareBurstMinUsec();
 }
 
+int32_t AudioSystem::getAudioFlingerDeathCount() {
+    return gAudioFlingerDeathCount.load();
+}
+
 // ---------------------------------------------------------------------------
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index bceca2d..bec6b10 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -277,10 +277,12 @@
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
-    (void)set(streamType, sampleRate, format, channelMask,
-            frameCount, flags, callback, notificationFrames,
-            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+    // make_unique does not aggregate init until c++20
+    mSetParams = std::unique_ptr<SetParams>{
+            new SetParams{streamType, sampleRate, format, channelMask, frameCount, flags, callback,
+                          notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/,
+                          sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+                          doNotReconnect, maxRequiredSpeed, selectedDeviceId}};
 }
 
 namespace {
@@ -355,10 +357,11 @@
     } else if (user) {
         LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
     }
-    (void)set(streamType, sampleRate, format, channelMask,
-            frameCount, flags, mLegacyCallbackWrapper, notificationFrames,
-            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+    mSetParams = std::unique_ptr<SetParams>{new SetParams{
+            streamType, sampleRate, format, channelMask, frameCount, flags, mLegacyCallbackWrapper,
+            notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId,
+            transferType, offloadInfo, attributionSource, pAttributes, doNotReconnect,
+            maxRequiredSpeed, selectedDeviceId}};
 }
 
 AudioTrack::AudioTrack(
@@ -387,10 +390,11 @@
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
-    (void)set(streamType, sampleRate, format, channelMask,
-            0 /*frameCount*/, flags, callback, notificationFrames,
-            sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed);
+    mSetParams = std::unique_ptr<SetParams>{
+            new SetParams{streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags,
+                          callback, notificationFrames, sharedBuffer, false /*threadCanCallJava*/,
+                          sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+                          doNotReconnect, maxRequiredSpeed, AUDIO_PORT_HANDLE_NONE}};
 }
 
 AudioTrack::AudioTrack(
@@ -424,11 +428,18 @@
     } else if (user) {
         LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
     }
+    mSetParams = std::unique_ptr<SetParams>{new SetParams{
+            streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags,
+            mLegacyCallbackWrapper, notificationFrames, sharedBuffer, false /*threadCanCallJava*/,
+            sessionId, transferType, offloadInfo, attributionSource, pAttributes, doNotReconnect,
+            maxRequiredSpeed, AUDIO_PORT_HANDLE_NONE}};
+}
 
-    (void)set(streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags,
-              mLegacyCallbackWrapper, notificationFrames, sharedBuffer,
-              false /*threadCanCallJava*/, sessionId, transferType, offloadInfo, attributionSource,
-              pAttributes, doNotReconnect, maxRequiredSpeed);
+void AudioTrack::onFirstRef() {
+    if (mSetParams) {
+        set(*mSetParams);
+        mSetParams.reset();
+    }
 }
 
 AudioTrack::~AudioTrack()
@@ -545,7 +556,6 @@
     pid_t myPid;
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    sp<IAudioTrackCallback> _callback = callback.promote();
     std::string errorMessage;
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
@@ -608,7 +618,7 @@
     case TRANSFER_DEFAULT:
         if (sharedBuffer != 0) {
             transferType = TRANSFER_SHARED;
-        } else if (_callback == nullptr|| threadCanCallJava) {
+        } else if (callback == nullptr|| threadCanCallJava) {
             transferType = TRANSFER_SYNC;
         } else {
             transferType = TRANSFER_CALLBACK;
@@ -616,7 +626,7 @@
         break;
     case TRANSFER_CALLBACK:
     case TRANSFER_SYNC_NOTIF_CALLBACK:
-        if (_callback == nullptr || sharedBuffer != 0) {
+        if (callback == nullptr || sharedBuffer != 0) {
             errorMessage = StringPrintf(
                     "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
                     convertTransferToText(transferType), __func__);
@@ -771,7 +781,7 @@
     mAuxEffectId = 0;
     mCallback = callback;
 
-    if (_callback != nullptr) {
+    if (callback != nullptr) {
         mAudioTrackThread = sp<AudioTrackThread>::make(*this);
         mAudioTrackThread->run("AudioTrack", ANDROID_PRIORITY_AUDIO, 0 /*stack*/);
         // thread begins in paused state, and will not reference us until start()
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index d8c18c0..3751f80 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -1,7 +1,15 @@
 {
   "presubmit": [
     {
-       "name": "audio_aidl_conversion_tests"
+      "name": "audio_aidl_conversion_tests"
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
index 2cdf4f6..d9c6df4 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
@@ -31,4 +31,6 @@
     AudioPortDeviceExtSys device;
     /** System-only parameters when the port is an audio mix. */
     AudioPortMixExtSys mix;
+    /** Framework audio session identifier. */
+    int session;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 6afe023..10da028 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -227,4 +227,9 @@
     int getAAudioHardwareBurstMinUsec();
 
     void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
+
+    // When adding a new method, please review and update
+    // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
+    // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
+    // AudioFlinger.cpp IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index e2ef772..8ac89a8 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -390,4 +390,8 @@
      * for the specified audio attributes.
      */
     AudioProfile[] getDirectProfilesForAttributes(in AudioAttributesInternal attr);
+
+    // When adding a new method, please review and update
+    // AudioPolicyService.cpp AudioPolicyService::onTransact()
+    // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
 }
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
index 813cd5c..6ec0405 100644
--- a/media/libaudioclient/aidl/android/media/IEffect.aidl
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -62,4 +62,8 @@
      * TODO(ytai): Explain how this should be used exactly.
      */
     SharedFileRegion getCblk();
+
+    // When adding a new method, please review and update
+    // Effects.cpp AudioFlinger::EffectHandle::onTransact()
+    // Effects.cpp IEFFECT_BINDER_METHOD_MACRO_LIST
 }
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index 036e72e..5536bcb 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -84,13 +84,15 @@
 };
 
 template <typename T, typename X, typename FUNC>
-std::vector<T> getFlags(const xsdc_enum_range<X> &range, const FUNC &func,
-                        const std::string &findString = {}) {
+std::vector<T> getFlags(const xsdc_enum_range<X>& range, const FUNC& func,
+                        const std::string& findString = {},
+                        const std::set<X>& excludedValues = {}) {
     std::vector<T> vec;
     for (const auto &xsdEnumVal : range) {
         T enumVal;
         std::string enumString = toString(xsdEnumVal);
         if (enumString.find(findString) != std::string::npos &&
+            (excludedValues.find(xsdEnumVal) == excludedValues.end()) &&
             func(enumString.c_str(), &enumVal)) {
             vec.push_back(enumVal);
         }
@@ -102,13 +104,29 @@
     getFlags<audio_stream_type_t, xsd::AudioStreamType, decltype(audio_stream_type_from_string)>(
         xsdc_enum_range<xsd::AudioStreamType>{}, audio_stream_type_from_string);
 
+/**
+ * AudioFormat - AUDIO_FORMAT_HE_AAC_V1 and AUDIO_FORMAT_HE_AAC_V2
+ * are excluded from kFormats[] in order to avoid the abort triggered
+ * for these two types of AudioFormat in
+ * AidlConversion::legacy2aidl_audio_format_t_AudioFormatDescription()
+ */
 static const std::vector<audio_format_t> kFormats =
-    getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
-        xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string);
+        getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
+                xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string, {},
+                {xsd::AudioFormat::AUDIO_FORMAT_HE_AAC_V1,
+                 xsd::AudioFormat::AUDIO_FORMAT_HE_AAC_V2});
 
+/**
+ * AudioChannelMask - AUDIO_CHANNEL_IN_6
+ * is excluded from kChannelMasks[] in order to avoid the abort triggered
+ * for this type of AudioChannelMask in
+ * AidlConversion::legacy2aidl_audio_channel_mask_t_AudioChannelLayout()
+ */
 static const std::vector<audio_channel_mask_t> kChannelMasks =
-    getFlags<audio_channel_mask_t, xsd::AudioChannelMask, decltype(audio_channel_mask_from_string)>(
-        xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string);
+        getFlags<audio_channel_mask_t, xsd::AudioChannelMask,
+                 decltype(audio_channel_mask_from_string)>(
+                xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string, {},
+                {xsd::AudioChannelMask::AUDIO_CHANNEL_IN_6});
 
 static const std::vector<audio_usage_t> kUsages =
     getFlags<audio_usage_t, xsd::AudioUsage, decltype(audio_usage_from_string)>(
@@ -126,9 +144,17 @@
     getFlags<audio_gain_mode_t, xsd::AudioGainMode, decltype(audio_gain_mode_from_string)>(
         xsdc_enum_range<xsd::AudioGainMode>{}, audio_gain_mode_from_string);
 
+/**
+ * AudioDevice - AUDIO_DEVICE_IN_AMBIENT and AUDIO_DEVICE_IN_COMMUNICATION
+ * are excluded from kDevices[] in order to avoid the abort triggered
+ * for these two types of AudioDevice in
+ * AidlConversion::aidl2legacy_AudioDeviceDescription_audio_devices_t()
+ */
 static const std::vector<audio_devices_t> kDevices =
-    getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
-        xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string);
+        getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
+                xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string, {},
+                {xsd::AudioDevice::AUDIO_DEVICE_IN_AMBIENT,
+                 xsd::AudioDevice::AUDIO_DEVICE_IN_COMMUNICATION});
 
 static const std::vector<audio_input_flags_t> kInputFlags =
     getFlags<audio_input_flags_t, xsd::AudioInOutFlag, decltype(audio_input_flag_from_string)>(
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index faea716..cb05dd9 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -740,6 +740,7 @@
     wp<IAudioRecordCallback> mCallback;
     sp<IAudioRecordCallback> mLegacyCallbackWrapper;
 
+    bool                    mInitialized = false;   // Protect against double set
     // for notification APIs
     uint32_t                mNotificationFramesReq; // requested number of frames between each
                                                     // notification callback
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index e89ce15..01f22a8 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -565,6 +565,11 @@
     static status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                             std::vector<audio_profile>* audioProfiles);
 
+    /**
+     * @return count of AudioFlinger death.
+     */
+    static int32_t getAudioFlingerDeathCount();
+
     // A listener for capture state changes.
     class CaptureStateListener : public virtual RefBase {
     public:
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 1708cc7..1cf6ef9 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -458,6 +458,38 @@
                             float maxRequiredSpeed = 1.0f,
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
+            struct SetParams {
+                audio_stream_type_t streamType;
+                uint32_t sampleRate;
+                audio_format_t format;
+                audio_channel_mask_t channelMask;
+                size_t frameCount;
+                audio_output_flags_t flags;
+                wp<IAudioTrackCallback> callback;
+                int32_t notificationFrames;
+                sp<IMemory> sharedBuffer;
+                bool threadCanCallJava;
+                audio_session_t sessionId;
+                transfer_type transferType;
+                // TODO don't take pointers here
+                const audio_offload_info_t *offloadInfo;
+                AttributionSourceState attributionSource;
+                const audio_attributes_t* pAttributes;
+                bool doNotReconnect;
+                float maxRequiredSpeed;
+                audio_port_handle_t selectedDeviceId;
+            };
+        private:
+            // Note: Consumes parameters
+            void        set(SetParams& s) {
+                (void)set(s.streamType, s.sampleRate, s.format, s.channelMask, s.frameCount,
+                          s.flags, std::move(s.callback), s.notificationFrames,
+                          std::move(s.sharedBuffer), s.threadCanCallJava, s.sessionId,
+                          s.transferType, s.offloadInfo, std::move(s.attributionSource),
+                          s.pAttributes, s.doNotReconnect, s.maxRequiredSpeed, s.selectedDeviceId);
+                        }
+            void       onFirstRef() override;
+        public:
             status_t    set(audio_stream_type_t streamType,
                             uint32_t sampleRate,
                             audio_format_t format,
@@ -1349,6 +1381,8 @@
     wp<IAudioTrackCallback> mCallback;                   // callback handler for events, or NULL
     sp<IAudioTrackCallback> mLegacyCallbackWrapper;      // wrapper for legacy callback interface
     // for notification APIs
+    std::unique_ptr<SetParams> mSetParams;          // Temporary copy of ctor params to allow for
+                                                    // deferred set after first reference.
 
     bool                    mInitialized = false;   // Set after track is initialized
     // next 2 fields are const after constructor or set()
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index e047378..3c3715d 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -482,9 +482,9 @@
      * Legacy server should implement this interface in order to be wrapped.
      */
     class Delegate : public IAudioFlinger {
-    protected:
         friend class AudioFlingerServerAdapter;
-
+    public:
+        // expose the TransactionCode enum for TimeCheck purposes.
         enum class TransactionCode {
             CREATE_TRACK = media::BnAudioFlingerService::TRANSACTION_createTrack,
             CREATE_RECORD = media::BnAudioFlingerService::TRANSACTION_createRecord,
@@ -553,6 +553,7 @@
             SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
         };
 
+    protected:
         /**
          * And optional hook, called on every transaction, allowing additional operations to be
          * performed before/after the unparceling  ofthe data and dispatching to the respective
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index f6d249a..efe8437 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -1,7 +1,15 @@
 {
   "presubmit": [
     {
-       "name": "audiofoundation_parcelable_test"
+      "name": "audiofoundation_parcelable_test"
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libaudiohal/DevicesFactoryHalInterface.cpp b/media/libaudiohal/DevicesFactoryHalInterface.cpp
index 325a547..5ad26fc 100644
--- a/media/libaudiohal/DevicesFactoryHalInterface.cpp
+++ b/media/libaudiohal/DevicesFactoryHalInterface.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <string>
+
 #include <media/audiohal/DevicesFactoryHalInterface.h>
 #include <media/audiohal/FactoryHalHidl.h>
 
@@ -21,8 +23,10 @@
 
 // static
 sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
+    using namespace std::string_literals;
     return createPreferredImpl<DevicesFactoryHalInterface>(
-            "android.hardware.audio", "IDevicesFactory");
+            std::make_pair("android.hardware.audio"s, "IDevicesFactory"s),
+            std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s));
 }
 
 } // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalInterface.cpp b/media/libaudiohal/EffectsFactoryHalInterface.cpp
index bc3b4c1..8a28f64 100644
--- a/media/libaudiohal/EffectsFactoryHalInterface.cpp
+++ b/media/libaudiohal/EffectsFactoryHalInterface.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <string>
+
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/audiohal/FactoryHalHidl.h>
 
@@ -21,8 +23,10 @@
 
 // static
 sp<EffectsFactoryHalInterface> EffectsFactoryHalInterface::create() {
+    using namespace std::string_literals;
     return createPreferredImpl<EffectsFactoryHalInterface>(
-            "android.hardware.audio.effect", "IEffectsFactory");
+            std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s),
+            std::make_pair("android.hardware.audio"s, "IDevicesFactory"s));
 }
 
 // static
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index 804edcc..590fec5 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -16,6 +16,10 @@
 
 #define LOG_TAG "FactoryHalHidl"
 
+#include <algorithm>
+#include <array>
+#include <utility>
+
 #include <media/audiohal/FactoryHalHidl.h>
 
 #include <dlfcn.h>
@@ -28,15 +32,16 @@
 namespace android::detail {
 
 namespace {
-/** Supported HAL versions, in order of preference.
+/** Supported HAL versions, from most recent to least recent.
  */
-const char* sAudioHALVersions[] = {
-    "7.1",
-    "7.0",
-    "6.0",
-    "5.0",
-    "4.0",
-    nullptr
+#define CONC_VERSION(maj, min) #maj "." #min
+#define DECLARE_VERSION(maj, min) std::make_pair(std::make_pair(maj, min), CONC_VERSION(maj, min))
+static constexpr std::array<std::pair<std::pair<int, int>, const char*>, 5> sAudioHALVersions = {
+    DECLARE_VERSION(7, 1),
+    DECLARE_VERSION(7, 0),
+    DECLARE_VERSION(6, 0),
+    DECLARE_VERSION(5, 0),
+    DECLARE_VERSION(4, 0)
 };
 
 bool createHalService(const std::string& version, const std::string& interface,
@@ -94,11 +99,22 @@
 
 }  // namespace
 
-void* createPreferredImpl(const std::string& package, const std::string& interface) {
-    for (auto version = detail::sAudioHALVersions; *version != nullptr; ++version) {
-        void* rawInterface = nullptr;
-        if (hasHalService(package, *version, interface)
-                && createHalService(*version, interface, &rawInterface)) {
+void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface) {
+    auto findMostRecentVersion = [](const InterfaceName& iface) {
+        return std::find_if(detail::sAudioHALVersions.begin(), detail::sAudioHALVersions.end(),
+                [&](const auto& v) { return hasHalService(iface.first, v.second, iface.second); });
+    };
+    auto ifaceVersionIt = findMostRecentVersion(iface);
+    auto siblingVersionIt = findMostRecentVersion(siblingIface);
+    if (ifaceVersionIt != detail::sAudioHALVersions.end() &&
+            siblingVersionIt != detail::sAudioHALVersions.end() &&
+            // same major version
+            ifaceVersionIt->first.first == siblingVersionIt->first.first) {
+        std::string libraryVersion =
+                ifaceVersionIt->first >= siblingVersionIt->first ?
+                ifaceVersionIt->second : siblingVersionIt->second;
+        void* rawInterface;
+        if (createHalService(libraryVersion, iface.second, &rawInterface)) {
             return rawInterface;
         }
     }
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/media/libaudiohal/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index dd435fe..d30883a 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -10,6 +10,7 @@
 filegroup {
     name: "audio_core_hal_client_sources",
     srcs: [
+        "CoreConversionHelperHidl.cpp",
         "DeviceHalHidl.cpp",
         "DevicesFactoryHalHidl.cpp",
         "StreamHalHidl.cpp",
@@ -20,6 +21,7 @@
     name: "audio_effect_hal_client_sources",
     srcs: [
         "EffectBufferHalHidl.cpp",
+        "EffectConversionHelperHidl.cpp",
         "EffectHalHidl.cpp",
         "EffectsFactoryHalHidl.cpp",
     ],
@@ -28,10 +30,6 @@
 cc_defaults {
     name: "libaudiohal_default",
 
-    srcs: [
-        "ConversionHelperHidl.cpp",
-    ],
-
     cflags: [
         "-Wall",
         "-Wextra",
@@ -76,6 +74,7 @@
     srcs: [
         ":audio_core_hal_client_sources",
         ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common@4.0",
@@ -98,6 +97,7 @@
     srcs: [
         ":audio_core_hal_client_sources",
         ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common@5.0",
@@ -120,6 +120,7 @@
     srcs: [
         ":audio_core_hal_client_sources",
         ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common@6.0",
@@ -136,20 +137,41 @@
     ]
 }
 
+cc_library_static {
+    name: "libaudiohal.effect@7.0",
+    defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_effect_hal_client_sources",
+    ],
+    static_libs: [
+        "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-util",
+        "android.hardware.audio.effect@7.0",
+        "android.hardware.audio.effect@7.0-util",
+    ],
+    cflags: [
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=0",
+        "-include common/all-versions/VersionMacro.h",
+    ]
+}
+
 cc_library_shared {
     name: "libaudiohal@7.0",
     defaults: ["libaudiohal_default"],
     srcs: [
         ":audio_core_hal_client_sources",
-        ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
     ],
-    shared_libs: [
+    static_libs: [
         "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-enums",
         "android.hardware.audio.common@7.0-util",
         "android.hardware.audio.effect@7.0",
         "android.hardware.audio.effect@7.0-util",
         "android.hardware.audio@7.0",
         "android.hardware.audio@7.0-util",
+        "libaudiohal.effect@7.0",
     ],
     cflags: [
         "-DMAJOR_VERSION=7",
@@ -163,13 +185,19 @@
     defaults: ["libaudiohal_default"],
     srcs: [
         ":audio_core_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
     ],
-    shared_libs: [
+    static_libs: [
         "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-util",
+        "android.hardware.audio.common@7.1-enums",
         "android.hardware.audio.common@7.1-util",
+        "android.hardware.audio.effect@7.0",
+        "android.hardware.audio.effect@7.0-util",
         "android.hardware.audio@7.0",
         "android.hardware.audio@7.1",
         "android.hardware.audio@7.1-util",
+        "libaudiohal.effect@7.0",
     ],
     cflags: [
         "-DMAJOR_VERSION=7",
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index 9368551..6e2c831 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -17,33 +17,25 @@
 #ifndef ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
 #define ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
 
-#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
+#include <functional>
+
 #include <hidl/HidlSupport.h>
 #include <system/audio.h>
-#include <utils/String8.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
-
-using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
-using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
-
-using ::android::hardware::Return;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
 
 namespace android {
 
+template<typename HalResult>
 class ConversionHelperHidl {
   protected:
-    static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
-    static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
-    static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
-    static void argsFromHal(const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs);
+    using HalResultConverter = std::function<status_t(const HalResult&)>;
+    const std::string mClassName;
 
-    ConversionHelperHidl(const char* className);
+    ConversionHelperHidl(std::string_view className, HalResultConverter resultConv)
+            : mClassName(className), mResultConverter(resultConv) {}
 
     template<typename R, typename T>
-    status_t processReturn(const char* funcName, const Return<R>& ret, T *retval) {
+    status_t processReturn(const char* funcName,
+            const ::android::hardware::Return<R>& ret, T *retval) {
         if (ret.isOk()) {
             // This way it also works for enum class to unscoped enum conversion.
             *retval = static_cast<T>(static_cast<R>(ret));
@@ -53,35 +45,40 @@
     }
 
     template<typename T>
-    status_t processReturn(const char* funcName, const Return<T>& ret) {
+    status_t processReturn(const char* funcName, const ::android::hardware::Return<T>& ret) {
         if (!ret.isOk()) {
             emitError(funcName, ret.description().c_str());
         }
         return ret.isOk() ? OK : FAILED_TRANSACTION;
     }
 
-    status_t processReturn(const char* funcName, const Return<CoreResult>& ret) {
+    status_t processReturn(const char* funcName,
+            const ::android::hardware::Return<HalResult>& ret) {
         if (!ret.isOk()) {
             emitError(funcName, ret.description().c_str());
         }
-        return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
+        return ret.isOk() ? mResultConverter(ret) : FAILED_TRANSACTION;
     }
 
     template<typename T>
     status_t processReturn(
-            const char* funcName, const Return<T>& ret, CoreResult retval) {
+            const char* funcName, const ::android::hardware::Return<T>& ret, HalResult retval) {
         if (!ret.isOk()) {
             emitError(funcName, ret.description().c_str());
         }
-        return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
+        return ret.isOk() ? mResultConverter(retval) : FAILED_TRANSACTION;
+    }
+
+    const std::string& getClassName() const {
+        return mClassName;
     }
 
   private:
-    const char* mClassName;
+    HalResultConverter mResultConverter;
 
-    static status_t analyzeResult(const CoreResult& result);
-
-    void emitError(const char* funcName, const char* description);
+    void emitError(const char* funcName, const char* description) {
+        ALOGE("%s %p %s: %s (from rpc)", mClassName.c_str(), this, funcName, description);
+    }
 };
 
 }  // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/CoreConversionHelperHidl.cpp
similarity index 87%
rename from media/libaudiohal/impl/ConversionHelperHidl.cpp
rename to media/libaudiohal/impl/CoreConversionHelperHidl.cpp
index 1d34814..2ac8a42 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/CoreConversionHelperHidl.cpp
@@ -21,7 +21,7 @@
 #include <media/AudioParameter.h>
 #include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
 
 namespace android {
 
@@ -29,7 +29,8 @@
 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
 // static
-status_t ConversionHelperHidl::keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
+status_t CoreConversionHelperHidl::keysFromHal(
+        const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
     AudioParameter halKeys(keys);
     if (halKeys.size() == 0) return BAD_VALUE;
     hidlKeys->resize(halKeys.size());
@@ -74,7 +75,7 @@
 }
 
 // static
-status_t ConversionHelperHidl::parametersFromHal(
+status_t CoreConversionHelperHidl::parametersFromHal(
         const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams) {
     AudioParameter params(kvPairs);
     if (params.size() == 0) return BAD_VALUE;
@@ -90,7 +91,7 @@
 }
 
 // static
-void ConversionHelperHidl::parametersToHal(
+void CoreConversionHelperHidl::parametersToHal(
         const hidl_vec<ParameterValue>& parameters, String8 *values) {
     AudioParameter params;
     for (size_t i = 0; i < parameters.size(); ++i) {
@@ -99,12 +100,11 @@
     values->setTo(params.toString());
 }
 
-ConversionHelperHidl::ConversionHelperHidl(const char* className)
-        : mClassName(className) {
-}
+CoreConversionHelperHidl::CoreConversionHelperHidl(std::string_view className)
+        : ConversionHelperHidl<CoreResult>(className, analyzeResult) {}
 
 // static
-void ConversionHelperHidl::argsFromHal(
+void CoreConversionHelperHidl::argsFromHal(
         const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs) {
     hidlArgs->resize(args.size());
     for (size_t i = 0; i < args.size(); ++i) {
@@ -113,19 +113,15 @@
 }
 
 // static
-status_t ConversionHelperHidl::analyzeResult(const Result& result) {
+status_t CoreConversionHelperHidl::analyzeResult(const CoreResult& result) {
     switch (result) {
         case Result::OK: return OK;
         case Result::INVALID_ARGUMENTS: return BAD_VALUE;
         case Result::INVALID_STATE: return NOT_ENOUGH_DATA;
         case Result::NOT_INITIALIZED: return NO_INIT;
         case Result::NOT_SUPPORTED: return INVALID_OPERATION;
-        default: return NO_INIT;
     }
-}
-
-void ConversionHelperHidl::emitError(const char* funcName, const char* description) {
-    ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
+    return NO_INIT;
 }
 
 }  // namespace android
diff --git a/media/libaudiohal/impl/CoreConversionHelperHidl.h b/media/libaudiohal/impl/CoreConversionHelperHidl.h
new file mode 100644
index 0000000..a4d76f3
--- /dev/null
+++ b/media/libaudiohal/impl/CoreConversionHelperHidl.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
+#define ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
+
+#include "ConversionHelperHidl.h"
+
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
+
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+
+namespace android {
+
+class CoreConversionHelperHidl : public ConversionHelperHidl<CoreResult> {
+  protected:
+    static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
+    static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
+    static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
+    static void argsFromHal(const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs);
+
+    CoreConversionHelperHidl(std::string_view className);
+
+  private:
+    static status_t analyzeResult(const CoreResult& result);
+};
+
+}  // namespace android
+
+#endif // ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 16863e4..0cdf621 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -23,6 +23,7 @@
 #include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioContainers.h>
+#include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
 
 #include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
@@ -45,13 +46,16 @@
 using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
+#define TIME_CHECK() auto timeCheck = \
+        mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
 DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
-        : ConversionHelperHidl("Device"), mDevice(device) {
+        : CoreConversionHelperHidl("DeviceHalHidl"), mDevice(device) {
 }
 
 DeviceHalHidl::DeviceHalHidl(
         const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device)
-        : ConversionHelperHidl("Device"),
+        : CoreConversionHelperHidl("DeviceHalHidl"),
 #if MAJOR_VERSION <= 6 || (MAJOR_VERSION == 7 && MINOR_VERSION == 0)
           mDevice(device),
 #endif
@@ -84,22 +88,26 @@
 }
 
 status_t DeviceHalHidl::initCheck() {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("initCheck", mDevice->initCheck());
 }
 
 status_t DeviceHalHidl::setVoiceVolume(float volume) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     if (mPrimaryDevice == 0) return INVALID_OPERATION;
     return processReturn("setVoiceVolume", mPrimaryDevice->setVoiceVolume(volume));
 }
 
 status_t DeviceHalHidl::setMasterVolume(float volume) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("setMasterVolume", mDevice->setMasterVolume(volume));
 }
 
 status_t DeviceHalHidl::getMasterVolume(float *volume) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMasterVolume(
@@ -113,17 +121,20 @@
 }
 
 status_t DeviceHalHidl::setMode(audio_mode_t mode) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     if (mPrimaryDevice == 0) return INVALID_OPERATION;
     return processReturn("setMode", mPrimaryDevice->setMode(AudioMode(mode)));
 }
 
 status_t DeviceHalHidl::setMicMute(bool state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("setMicMute", mDevice->setMicMute(state));
 }
 
 status_t DeviceHalHidl::getMicMute(bool *state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMicMute(
@@ -137,11 +148,13 @@
 }
 
 status_t DeviceHalHidl::setMasterMute(bool state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("setMasterMute", mDevice->setMasterMute(state));
 }
 
 status_t DeviceHalHidl::getMasterMute(bool *state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMasterMute(
@@ -155,6 +168,7 @@
 }
 
 status_t DeviceHalHidl::setParameters(const String8& kvPairs) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     hidl_vec<ParameterValue> hidlParams;
     status_t status = parametersFromHal(kvPairs, &hidlParams);
@@ -165,6 +179,7 @@
 }
 
 status_t DeviceHalHidl::getParameters(const String8& keys, String8 *values) {
+    TIME_CHECK();
     values->clear();
     if (mDevice == 0) return NO_INIT;
     hidl_vec<hidl_string> hidlKeys;
@@ -185,6 +200,7 @@
 
 status_t DeviceHalHidl::getInputBufferSize(
         const struct audio_config *config, size_t *size) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioConfig hidlConfig;
     HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
@@ -207,6 +223,7 @@
         struct audio_config *config,
         const char *address,
         sp<StreamOutHalInterface> *outStream) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
     if (status_t status = CoreUtils::deviceAddressFromHal(deviceType, address, &hidlDevice);
@@ -263,6 +280,7 @@
         audio_devices_t outputDevice,
         const char *outputDeviceAddress,
         sp<StreamInHalInterface> *inStream) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
     if (status_t status = CoreUtils::deviceAddressFromHal(devices, address, &hidlDevice);
@@ -326,6 +344,7 @@
 }
 
 status_t DeviceHalHidl::supportsAudioPatches(bool *supportsPatches) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("supportsAudioPatches", mDevice->supportsAudioPatches(), supportsPatches);
 }
@@ -336,6 +355,7 @@
         unsigned int num_sinks,
         const struct audio_port_config *sinks,
         audio_patch_handle_t *patch) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     if (patch == nullptr) return BAD_VALUE;
 
@@ -381,6 +401,7 @@
 }
 
 status_t DeviceHalHidl::releaseAudioPatch(audio_patch_handle_t patch) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
 }
@@ -403,10 +424,12 @@
 }
 
 status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+    TIME_CHECK();
     return getAudioPortImpl(port);
 }
 
 status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+    TIME_CHECK();
 #if MAJOR_VERSION >= 7
     return getAudioPortImpl(port);
 #else
@@ -427,6 +450,7 @@
 }
 
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioPortConfig hidlConfig;
     HidlUtils::audioPortConfigFromHal(*config, &hidlConfig);
@@ -441,6 +465,7 @@
 }
 #elif MAJOR_VERSION >= 4
 status_t DeviceHalHidl::getMicrophones(std::vector<media::MicrophoneInfo> *microphonesInfo) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMicrophones(
@@ -461,6 +486,7 @@
 #if MAJOR_VERSION >= 6
 status_t DeviceHalHidl::addDeviceEffect(
         audio_port_handle_t device, sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
             static_cast<AudioPortHandle>(device), effect->effectId()));
@@ -475,6 +501,7 @@
 #if MAJOR_VERSION >= 6
 status_t DeviceHalHidl::removeDeviceEffect(
         audio_port_handle_t device, sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
             static_cast<AudioPortHandle>(device), effect->effectId()));
@@ -487,6 +514,7 @@
 #endif
 
 status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
 #if MAJOR_VERSION == 7 && MINOR_VERSION == 1
     if (supportsSetConnectedState7_1) {
@@ -513,6 +541,7 @@
 }
 
 error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     audio_hw_sync_t value;
     Result result;
@@ -525,6 +554,7 @@
 }
 
 status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
     hidlHandle->data[0] = fd;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 8a97a55..f6519b6 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -22,11 +22,11 @@
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/EffectHalInterface.h>
 
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
 
 namespace android {
 
-class DeviceHalHidl : public DeviceHalInterface, public ConversionHelperHidl
+class DeviceHalHidl : public DeviceHalInterface, public CoreConversionHelperHidl
 {
   public:
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 8f3c907..4069a6b 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -26,7 +26,6 @@
 #include <media/audiohal/hidl/HalDeathHandler.h>
 #include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
 #include "DeviceHalHidl.h"
 #include "DevicesFactoryHalHidl.h"
 
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.cpp b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
index 65297af..9d5f72e 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
@@ -23,7 +23,6 @@
 #include <hidlmemory/mapping.h>
 #include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
 #include "EffectBufferHalHidl.h"
 
 using ::android::hardware::Return;
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.cpp b/media/libaudiohal/impl/EffectConversionHelperHidl.cpp
new file mode 100644
index 0000000..9e4f79c
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HalHidl"
+#include <utils/Log.h>
+
+#include "EffectConversionHelperHidl.h"
+
+namespace android {
+
+EffectConversionHelperHidl::EffectConversionHelperHidl(std::string_view className)
+        : ConversionHelperHidl<EffectResult>(className, analyzeResult) {
+}
+
+// static
+status_t EffectConversionHelperHidl::analyzeResult(const EffectResult& result) {
+    switch (result) {
+        case EffectResult::OK: return OK;
+        case EffectResult::INVALID_ARGUMENTS: return BAD_VALUE;
+        case EffectResult::INVALID_STATE: return NOT_ENOUGH_DATA;
+        case EffectResult::NOT_INITIALIZED: return NO_INIT;
+        case EffectResult::NOT_SUPPORTED: return INVALID_OPERATION;
+        case EffectResult::RESULT_TOO_BIG: return NO_MEMORY;
+    }
+    return NO_INIT;
+}
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.h b/media/libaudiohal/impl/EffectConversionHelperHidl.h
new file mode 100644
index 0000000..4371d12
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
+#define ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
+
+#include "ConversionHelperHidl.h"
+
+#include PATH(android/hardware/audio/effect/FILE_VERSION/types.h)
+
+using EffectResult = ::android::hardware::audio::effect::CPP_VERSION::Result;
+
+namespace android {
+
+class EffectConversionHelperHidl : public ConversionHelperHidl<EffectResult> {
+  protected:
+    static status_t analyzeResult(const EffectResult& result);
+
+    EffectConversionHelperHidl(std::string_view className);
+};
+
+}  // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 1bb1e5f..f46a953 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -41,7 +41,8 @@
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
 EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
-        : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
+        : EffectConversionHelperHidl("Effect"),
+          mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
     effect_descriptor_t halDescriptor{};
     if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
         mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
@@ -59,19 +60,6 @@
     }
 }
 
-// static
-status_t EffectHalHidl::analyzeResult(const Result& result) {
-    switch (result) {
-        case Result::OK: return OK;
-        case Result::INVALID_ARGUMENTS: return BAD_VALUE;
-        case Result::INVALID_STATE: return NOT_ENOUGH_DATA;
-        case Result::NOT_INITIALIZED: return NO_INIT;
-        case Result::NOT_SUPPORTED: return INVALID_OPERATION;
-        case Result::RESULT_TOO_BIG: return NO_MEMORY;
-        default: return NO_INIT;
-    }
-}
-
 status_t EffectHalHidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
     if (!mBuffersChanged) {
         if (buffer.get() == nullptr || mInBuffer.get() == nullptr) {
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 07745db..e139768 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -23,6 +23,8 @@
 #include <fmq/MessageQueue.h>
 #include <system/audio_effect.h>
 
+#include "EffectConversionHelperHidl.h"
+
 using ::android::hardware::EventFlag;
 using ::android::hardware::MessageQueue;
 
@@ -31,7 +33,7 @@
 
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-class EffectHalHidl : public EffectHalInterface
+class EffectHalHidl : public EffectHalInterface, public EffectConversionHelperHidl
 {
   public:
     // Set the input buffer.
@@ -77,8 +79,6 @@
     EventFlag* mEfGroup;
     bool mIsInput = false;
 
-    static status_t analyzeResult(const Result& result);
-
     // Can not be constructed directly by clients.
     EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
 
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 90954b2..d7217fc 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -21,8 +21,9 @@
 
 #include <UuidUtils.h>
 #include <util/EffectUtils.h>
+#include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
+#include "EffectConversionHelperHidl.h"
 #include "EffectBufferHalHidl.h"
 #include "EffectHalHidl.h"
 #include "EffectsFactoryHalHidl.h"
@@ -38,7 +39,7 @@
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
 EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
-        : ConversionHelperHidl("EffectsFactory") {
+        : EffectConversionHelperHidl("EffectsFactory") {
     ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
     mEffectsFactory = effectsFactory;
 }
@@ -205,7 +206,10 @@
 
 } // namespace effect
 
-extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
+// When a shared library is built from a static library, even explicit
+// exports from a static library are optimized out unless actually used by
+// the shared library. See EffectsFactoryHalHidlEntry.cpp.
+extern "C" void* createIEffectsFactoryImpl() {
     auto service = hardware::audio::effect::CPP_VERSION::IEffectsFactory::getService();
     return service ? new effect::EffectsFactoryHalHidl(service) : nullptr;
 }
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index 7491133..e1882e1 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -20,16 +20,15 @@
 #include PATH(android/hardware/audio/effect/FILE_VERSION/IEffectsFactory.h)
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 
-#include "ConversionHelperHidl.h"
+#include "EffectConversionHelperHidl.h"
 
 namespace android {
 namespace effect {
 
 using ::android::hardware::hidl_vec;
-using ::android::ConversionHelperHidl;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public ConversionHelperHidl
+class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public EffectConversionHelperHidl
 {
   public:
     EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
new file mode 100644
index 0000000..2c6f2c6
--- /dev/null
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+extern "C" void* createIEffectsFactoryImpl();
+
+extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
+    return createIEffectsFactoryImpl();
+}
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 8ba0f72..021ec51 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -22,6 +22,7 @@
 #include <media/AudioParameter.h>
 #include <mediautils/memory.h>
 #include <mediautils/SchedulingPolicyService.h>
+#include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutCallback.h)
@@ -45,8 +46,11 @@
 using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
-StreamHalHidl::StreamHalHidl(IStream *stream)
-        : ConversionHelperHidl("Stream"),
+#define TIME_CHECK() auto TimeCheck = \
+       mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
+StreamHalHidl::StreamHalHidl(std::string_view className, IStream *stream)
+        : CoreConversionHelperHidl(className),
           mStream(stream),
           mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT),
           mCachedBufferSize(0){
@@ -67,6 +71,7 @@
 }
 
 status_t StreamHalHidl::getBufferSize(size_t *size) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
     if (status == OK) {
@@ -76,6 +81,7 @@
 }
 
 status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+    TIME_CHECK();
     *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
     if (!mStream) return NO_INIT;
 #if MAJOR_VERSION <= 6
@@ -105,6 +111,7 @@
 }
 
 status_t StreamHalHidl::setParameters(const String8& kvPairs) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     hidl_vec<ParameterValue> hidlParams;
     status_t status = parametersFromHal(kvPairs, &hidlParams);
@@ -114,6 +121,7 @@
 }
 
 status_t StreamHalHidl::getParameters(const String8& keys, String8 *values) {
+    TIME_CHECK();
     values->clear();
     if (!mStream) return NO_INIT;
     hidl_vec<hidl_string> hidlKeys;
@@ -134,21 +142,25 @@
 }
 
 status_t StreamHalHidl::addEffect(sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("addEffect", mStream->addEffect(effect->effectId()));
 }
 
 status_t StreamHalHidl::removeEffect(sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("removeEffect", mStream->removeEffect(effect->effectId()));
 }
 
 status_t StreamHalHidl::standby() {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("standby", mStream->standby());
 }
 
 status_t StreamHalHidl::dump(int fd, const Vector<String16>& args) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
     hidlHandle->data[0] = fd;
@@ -173,17 +185,20 @@
 }
 
 status_t StreamHalHidl::start() {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("start", mStream->start());
 }
 
 status_t StreamHalHidl::stop() {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("stop", mStream->stop());
 }
 
 status_t StreamHalHidl::createMmapBuffer(int32_t minSizeFrames,
                                   struct audio_mmap_buffer_info *info) {
+    TIME_CHECK();
     Result retval;
     Return<void> ret = mStream->createMmapBuffer(
             minSizeFrames,
@@ -216,6 +231,7 @@
 }
 
 status_t StreamHalHidl::getMmapPosition(struct audio_mmap_position *position) {
+    TIME_CHECK();
     Result retval;
     Return<void> ret = mStream->getMmapPosition(
             [&](Result r, const MmapPosition& hidlPosition) {
@@ -244,7 +260,7 @@
 status_t StreamHalHidl::getHalPid(pid_t *pid) {
     using ::android::hidl::base::V1_0::DebugInfo;
     using ::android::hidl::manager::V1_0::IServiceManager;
-
+    TIME_CHECK();
     DebugInfo debugInfo;
     auto ret = mStream->getDebugInfo([&] (const auto &info) {
         debugInfo = info;
@@ -275,6 +291,7 @@
 status_t StreamHalHidl::legacyCreateAudioPatch(const struct audio_port_config& port,
                                                std::optional<audio_source_t> source,
                                                audio_devices_t type) {
+    TIME_CHECK();
     LOG_ALWAYS_FATAL_IF(port.type != AUDIO_PORT_TYPE_DEVICE, "port type must be device");
     unique_malloced_ptr<char> address;
     if (strcmp(port.ext.device.address, "") != 0) {
@@ -293,6 +310,7 @@
 }
 
 status_t StreamHalHidl::legacyReleaseAudioPatch() {
+    TIME_CHECK();
     AudioParameter param;
     param.addInt(String8(AudioParameter::keyRouting), 0);
     return setParameters(param.toString());
@@ -352,7 +370,8 @@
 
 StreamOutHalHidl::StreamOutHalHidl(
         const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream)
-        : StreamHalHidl(stream.get()), mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
+        : StreamHalHidl("StreamOutHalHidl", stream.get())
+        , mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
 }
 
 StreamOutHalHidl::~StreamOutHalHidl() {
@@ -376,11 +395,13 @@
 }
 
 status_t StreamOutHalHidl::getFrameSize(size_t *size) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("getFrameSize", mStream->getFrameSize(), size);
 }
 
 status_t StreamOutHalHidl::getLatency(uint32_t *latency) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     if (mWriterClient == gettid() && mCommandMQ) {
         return callWriterThread(
@@ -394,12 +415,14 @@
 }
 
 status_t StreamOutHalHidl::setVolume(float left, float right) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("setVolume", mStream->setVolume(left, right));
 }
 
 #if MAJOR_VERSION == 2
 status_t StreamOutHalHidl::selectPresentation(int presentationId, int programId) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     std::vector<ParameterValue> parameters;
     String8 halParameters;
@@ -410,6 +433,7 @@
 }
 #elif MAJOR_VERSION >= 4
 status_t StreamOutHalHidl::selectPresentation(int presentationId, int programId) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("selectPresentation",
             mStream->selectPresentation(presentationId, programId));
@@ -417,6 +441,7 @@
 #endif
 
 status_t StreamOutHalHidl::write(const void *buffer, size_t bytes, size_t *written) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     *written = 0;
 
@@ -562,6 +587,7 @@
 }
 
 status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getRenderPosition(
@@ -575,6 +601,7 @@
 }
 
 status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getNextWriteTimestamp(
@@ -588,6 +615,7 @@
 }
 
 status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     status_t status = processReturn(
             "setCallback", mStream->setCallback(new StreamOutCallback(this)));
@@ -598,6 +626,7 @@
 }
 
 status_t StreamOutHalHidl::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Return<void> ret = mStream->supportsPauseAndResume(
             [&](bool p, bool r) {
@@ -608,32 +637,38 @@
 }
 
 status_t StreamOutHalHidl::pause() {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("pause", mStream->pause());
 }
 
 status_t StreamOutHalHidl::resume() {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("pause", mStream->resume());
 }
 
 status_t StreamOutHalHidl::supportsDrain(bool *supportsDrain) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("supportsDrain", mStream->supportsDrain(), supportsDrain);
 }
 
 status_t StreamOutHalHidl::drain(bool earlyNotify) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "drain", mStream->drain(earlyNotify ? AudioDrain::EARLY_NOTIFY : AudioDrain::ALL));
 }
 
 status_t StreamOutHalHidl::flush() {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("pause", mStream->flush());
 }
 
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     if (mWriterClient == gettid() && mCommandMQ) {
         return callWriterThread(
@@ -667,6 +702,7 @@
 #elif MAJOR_VERSION >= 4
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
+    TIME_CHECK();
 #if MAJOR_VERSION == 4
     ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
 #else
@@ -717,6 +753,7 @@
 #else
 
 status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getDualMonoMode(
@@ -730,12 +767,14 @@
 }
 
 status_t StreamOutHalHidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setDualMonoMode", mStream->setDualMonoMode(static_cast<DualMonoMode>(mode)));
 }
 
 status_t StreamOutHalHidl::getAudioDescriptionMixLevel(float* leveldB) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getAudioDescriptionMixLevel(
@@ -749,12 +788,14 @@
 }
 
 status_t StreamOutHalHidl::setAudioDescriptionMixLevel(float leveldB) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setAudioDescriptionMixLevel", mStream->setAudioDescriptionMixLevel(leveldB));
 }
 
 status_t StreamOutHalHidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getPlaybackRateParameters(
@@ -775,6 +816,7 @@
 }
 
 status_t StreamOutHalHidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setPlaybackRateParameters", mStream->setPlaybackRateParameters(
@@ -809,6 +851,7 @@
 
 status_t StreamOutHalHidl::setEventCallback(
         const sp<StreamOutHalInterfaceEventCallback>& callback) {
+    TIME_CHECK();
     if (mStream == nullptr) return NO_INIT;
     mEventCallback = callback;
     status_t status = processReturn(
@@ -823,12 +866,14 @@
 using hardware::audio::V7_1::LatencyMode;
 
 status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setLatencyMode", mStream->setLatencyMode(static_cast<LatencyMode>(mode)));
 };
 
 status_t StreamOutHalHidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getRecommendedLatencyModes(
@@ -869,6 +914,7 @@
 
 status_t StreamOutHalHidl::setLatencyModeCallback(
         const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
+    TIME_CHECK();
 
     if (mStream == nullptr) return NO_INIT;
     mLatencyModeCallback = callback;
@@ -940,7 +986,8 @@
 
 StreamInHalHidl::StreamInHalHidl(
         const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
-        : StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
+        : StreamHalHidl("StreamInHalHidl", stream.get())
+        , mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
 }
 
 StreamInHalHidl::~StreamInHalHidl() {
@@ -953,16 +1000,19 @@
 }
 
 status_t StreamInHalHidl::getFrameSize(size_t *size) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("getFrameSize", mStream->getFrameSize(), size);
 }
 
 status_t StreamInHalHidl::setGain(float gain) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("setGain", mStream->setGain(gain));
 }
 
 status_t StreamInHalHidl::read(void *buffer, size_t bytes, size_t *read) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     *read = 0;
 
@@ -1090,11 +1140,13 @@
 }
 
 status_t StreamInHalHidl::getInputFramesLost(uint32_t *framesLost) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("getInputFramesLost", mStream->getInputFramesLost(), framesLost);
 }
 
 status_t StreamInHalHidl::getCapturePosition(int64_t *frames, int64_t *time) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     if (mReaderClient == gettid() && mCommandMQ) {
         ReadParameters params;
@@ -1134,6 +1186,7 @@
 #elif MAJOR_VERSION >= 4
 status_t StreamInHalHidl::getActiveMicrophones(
         std::vector<media::MicrophoneInfo> *microphonesInfo) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getActiveMicrophones(
@@ -1152,6 +1205,7 @@
 
 status_t StreamInHalHidl::updateSinkMetadata(const
         StreamInHalInterface::SinkMetadata& sinkMetadata) {
+    TIME_CHECK();
 #if MAJOR_VERSION == 4
     ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
 #else
@@ -1179,12 +1233,14 @@
 }
 #else
 status_t StreamInHalHidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("setPreferredMicrophoneDirection",
         mStream->setMicrophoneDirection(static_cast<MicrophoneDirection>(direction)));
 }
 
 status_t StreamInHalHidl::setPreferredMicrophoneFieldDimension(float zoom) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("setPreferredMicrophoneFieldDimension",
                 mStream->setMicrophoneFieldDimension(zoom));
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 4e80e88..54fbefe 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -28,7 +28,7 @@
 #include <media/audiohal/StreamHalInterface.h>
 #include <mediautils/Synchronization.h>
 
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
 #include "StreamPowerLog.h"
 
 using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStream;
@@ -45,7 +45,7 @@
 
 class DeviceHalHidl;
 
-class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
+class StreamHalHidl : public virtual StreamHalInterface, public CoreConversionHelperHidl
 {
   public:
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
@@ -97,7 +97,7 @@
 
   protected:
     // Subclasses can not be constructed directly by clients.
-    explicit StreamHalHidl(IStream *stream);
+    StreamHalHidl(std::string_view className, IStream *stream);
 
     ~StreamHalHidl() override;
 
diff --git a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h b/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
index d353ed0..866dd3e 100644
--- a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
+++ b/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
@@ -18,21 +18,42 @@
 #define ANDROID_HARDWARE_FACTORY_HAL_HIDL_H
 
 #include <string>
+#include <utility>
 
 #include <utils/StrongPointer.h>
 
 namespace android {
 
+// The pair of the interface's package name and the interface name,
+// e.g. <"android.hardware.audio", "IDevicesFactory">.
+// Splitting is used for easier construction of versioned names (FQNs).
+using InterfaceName = std::pair<std::string, std::string>;
+
 namespace detail {
 
-void* createPreferredImpl(const std::string& package, const std::string& interface);
+void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface);
 
 }  // namespace detail
 
-/** @Return the preferred available implementation or nullptr if none are available. */
+/**
+ * Create a client for the "preferred" (most recent) implementation of an interface.
+ * by loading the appropriate version of the shared library containing the implementation.
+ *
+ * In the audio HAL, there are two families of interfaces: core and effects. Both are
+ * packed into the same shared library for memory efficiency. Since the core and the effects
+ * interface can have different minor versions on the device, in order to avoid loading multiple
+ * shared libraries the loader function considers which interface among two has the most
+ * recent version. Thus, a pair of interface names must be passed in.
+ *
+ * @param iface the interface that needs to be created.
+ * @param siblingIface the interface which occupies the same shared library.
+ * @return the preferred available implementation or nullptr if none are available.
+ */
 template <class Interface>
-static sp<Interface> createPreferredImpl(const std::string& package, const std::string& interface) {
-    return sp<Interface>{static_cast<Interface*>(detail::createPreferredImpl(package, interface))};
+static sp<Interface> createPreferredImpl(
+        const InterfaceName& iface, const InterfaceName& siblingIface) {
+    return sp<Interface>{
+        static_cast<Interface*>(detail::createPreferredImpl(iface, siblingIface))};
 }
 
 } // namespace android
diff --git a/media/libaudioprocessing/TEST_MAPPING b/media/libaudioprocessing/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/media/libaudioprocessing/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 041b427..923f5c1 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -47,6 +47,7 @@
     info->mRotationAngle = videoFrame->mRotationAngle;
     info->mBytesPerPixel = videoFrame->mBytesPerPixel;
     info->mDurationUs = videoFrame->mDurationUs;
+    info->mBitDepth = videoFrame->mBitDepth;
     if (videoFrame->mIccSize > 0) {
         info->mIccData.assign(
                 videoFrame->getFlattenedIccData(),
@@ -377,13 +378,14 @@
         //       issue (e.g. by copying).
         VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->unsecurePointer());
 
-        ALOGV("Image dimension %dx%d, display %dx%d, angle %d, iccSize %d",
+        ALOGV("Image dimension %dx%d, display %dx%d, angle %d, iccSize %d, bitDepth %d",
                 videoFrame->mWidth,
                 videoFrame->mHeight,
                 videoFrame->mDisplayWidth,
                 videoFrame->mDisplayHeight,
                 videoFrame->mRotationAngle,
-                videoFrame->mIccSize);
+                videoFrame->mIccSize,
+                videoFrame->mBitDepth);
 
         initFrameInfo(&mImageInfo, videoFrame);
 
@@ -729,4 +731,13 @@
     return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
 }
 
+uint32_t HeifDecoderImpl::getColorDepth() {
+    HeifFrameInfo* info = &mImageInfo;
+    if (info != nullptr) {
+        return mImageInfo.mBitDepth;
+    } else {
+        return 0;
+    }
+}
+
 } // namespace android
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index 2b9c710..86a8628 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -54,6 +54,8 @@
 
     size_t skipScanlines(size_t count) override;
 
+    uint32_t getColorDepth() override;
+
 private:
     struct DecodeThread;
 
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
index fa51aef..56f4765 100644
--- a/media/libheif/include/HeifDecoderAPI.h
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -46,7 +46,8 @@
     uint32_t mHeight;
     int32_t  mRotationAngle;           // Rotation angle, clockwise, should be multiple of 90
     uint32_t mBytesPerPixel;           // Number of bytes for one pixel
-    int64_t mDurationUs;               // Duration of the frame in us
+    int64_t  mDurationUs;              // Duration of the frame in us
+    uint32_t mBitDepth;                // Number of bits of R/G/B channel
     std::vector<uint8_t> mIccData;     // ICC data array
 };
 
@@ -162,6 +163,11 @@
      */
     virtual size_t skipScanlines(size_t count) = 0;
 
+    /*
+     * Returns color depth of R/G/B channel.
+     */
+    virtual uint32_t getColorDepth() = 0;
+
 private:
     HeifDecoder(const HeifFrameInfo&) = delete;
     HeifDecoder& operator=(const HeifFrameInfo&) = delete;
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index ec52a49..a6f0b60 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -43,10 +43,10 @@
 }
 
 //static
-MediaResource MediaResource::CodecResource(bool secure, bool video, int64_t instanceCount) {
+MediaResource MediaResource::CodecResource(bool secure, SubType subType, int64_t instanceCount) {
     return MediaResource(
             secure ? Type::kSecureCodec : Type::kNonSecureCodec,
-            video ? SubType::kVideoCodec : SubType::kAudioCodec,
+            subType,
             instanceCount);
 }
 
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index 68cc25e..3b69d4f 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -37,7 +37,8 @@
     MediaResource(Type type, SubType subType, int64_t value);
     MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value);
 
-    static MediaResource CodecResource(bool secure, bool video, int64_t instanceCount = 1);
+    static MediaResource CodecResource(bool secure, MediaResourceSubType subType,
+            int64_t instanceCount = 1);
     static MediaResource GraphicMemoryResource(int64_t value);
     static MediaResource CpuBoostResource();
     static MediaResource VideoBatteryResource();
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index a433fc6..b9d795d 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -29,6 +29,7 @@
 cc_library {
     name: "libmedia_helper",
     vendor_available: true,
+    min_sdk_version: "29",
     vndk: {
         enabled: true,
     },
@@ -58,4 +59,9 @@
             enabled: false,
         },
     },
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "test_com.android.media",
+    ],
 }
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 4247375..90472eb 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -140,6 +140,8 @@
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
 #define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
+#define AMEDIAMETRICS_PROP_METHODCODE     "methodCode"     // int64_t an int indicating method
+#define AMEDIAMETRICS_PROP_METHODNAME     "methodName"     // string method name
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
@@ -224,6 +226,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_START      "start"  // AudioTrack, AudioRecord
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_STOP       "stop"   // AudioTrack, AudioRecord
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT    "timeout"  // AudioFlinger, AudioPolicy
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN   "underrun" // from Thread
 
 // Possible values for AMEDIAMETRICS_PROP_CALLERNAME
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 609298f..55b1ed7 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -28,6 +28,7 @@
 #include <datasource/PlayerServiceDataSourceFactory.h>
 #include <datasource/PlayerServiceFileSource.h>
 #include <media/IMediaHTTPService.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaCodecList.h>
@@ -194,17 +195,6 @@
         return NULL;
     }
 
-    if (metaOnly) {
-        return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail);
-    }
-
-    sp<IMediaSource> source = mExtractor->getTrack(i);
-
-    if (source.get() == NULL) {
-        ALOGE("unable to instantiate image track.");
-        return NULL;
-    }
-
     const char *mime;
     bool isHeif = false;
     if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
@@ -223,16 +213,47 @@
         trackMeta->setCString(kKeyMIMEType, mime);
     }
 
-    bool preferhw = property_get_bool(
-            "media.stagefright.thumbnail.prefer_hw_codecs", false);
-    uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
-    Vector<AString> matchingCodecs;
     sp<AMessage> format = new AMessage;
     status_t err = convertMetaDataToMessage(trackMeta, &format);
     if (err != OK) {
         format = NULL;
     }
 
+    uint32_t bitDepth = 8;
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        int32_t profile;
+        if (format->findInt32("profile", &profile)) {
+            if (HEVCProfileMain10 == profile || HEVCProfileMain10HDR10 == profile ||
+                    HEVCProfileMain10HDR10Plus == profile) {
+                  bitDepth = 10;
+            }
+        }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+        int32_t profile;
+        if (format->findInt32("profile", &profile)) {
+            if (AV1ProfileMain10 == profile || AV1ProfileMain10HDR10 == profile ||
+                    AV1ProfileMain10HDR10Plus == profile) {
+                  bitDepth = 10;
+            }
+        }
+    }
+
+    if (metaOnly) {
+        return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail, bitDepth);
+    }
+
+    sp<IMediaSource> source = mExtractor->getTrack(i);
+
+    if (source.get() == NULL) {
+        ALOGE("unable to instantiate image track.");
+        return NULL;
+    }
+
+    bool preferhw = property_get_bool(
+            "media.stagefright.thumbnail.prefer_hw_codecs", false);
+    uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
+    Vector<AString> matchingCodecs;
+
     // If decoding thumbnail check decoder supports thumbnail dimensions instead
     int32_t thumbHeight, thumbWidth;
     if (thumbnail && format != NULL
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 5da32c9..10a1ee4 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -50,7 +50,7 @@
 
 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
-        int32_t dstBpp, bool allocRotated, bool metaOnly) {
+        int32_t dstBpp, uint32_t bitDepth, bool allocRotated, bool metaOnly) {
     int32_t rotationAngle;
     if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
         rotationAngle = 0;  // By default, no rotation
@@ -105,7 +105,7 @@
     }
 
     VideoFrame frame(width, height, displayWidth, displayHeight,
-            tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
+            tileWidth, tileHeight, rotationAngle, dstBpp, bitDepth, !metaOnly, iccSize);
 
     size_t size = frame.getFlattenedSize();
     sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
@@ -126,15 +126,15 @@
 
 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
-        int32_t dstBpp, bool allocRotated = false) {
-    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp,
+        int32_t dstBpp, uint8_t bitDepth, bool allocRotated = false) {
+    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth,
             allocRotated, false /*metaOnly*/);
 }
 
 sp<IMemory> allocMetaFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
-        int32_t dstBpp) {
-    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp,
+        int32_t dstBpp, uint8_t bitDepth) {
+    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth,
             false /*allocRotated*/, true /*metaOnly*/);
 }
 
@@ -211,7 +211,7 @@
 
 //static
 sp<IMemory> FrameDecoder::getMetadataOnly(
-        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
+        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
     OMX_COLOR_FORMATTYPE dstFormat;
     ui::PixelFormat captureFormat;
     int32_t dstBpp;
@@ -235,7 +235,8 @@
         }
     }
 
-    sp<IMemory> metaMem = allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp);
+    sp<IMemory> metaMem =
+            allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth);
 
     // try to fill sequence meta's duration based on average frame rate,
     // default to 33ms if frame rate is unavailable.
@@ -534,7 +535,6 @@
     if (dstFormat() == COLOR_Format32bitABGR2101010) {
         videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
     } else {
-        // TODO: Use Flexible color instead
         videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
     }
 
@@ -649,6 +649,11 @@
         height = slice_height;
     }
 
+    uint32_t bitDepth = 8;
+    if (COLOR_FormatYUVP010 == srcFormat) {
+        bitDepth = 10;
+    }
+
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
                 trackMeta(),
@@ -657,6 +662,7 @@
                 0,
                 0,
                 dstBpp(),
+                bitDepth,
                 mCaptureLayer != nullptr /*allocRotated*/);
         if (frameMem == nullptr) {
             return NO_MEMORY;
@@ -851,7 +857,6 @@
     if (dstFormat() == COLOR_Format32bitABGR2101010) {
         videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
     } else {
-        // TODO: Use Flexible color instead
         videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
     }
 
@@ -922,9 +927,17 @@
         return ERROR_MALFORMED;
     }
 
+    int32_t srcFormat;
+    CHECK(outputFormat->findInt32("color-format", &srcFormat));
+
+    uint32_t bitDepth = 8;
+    if (COLOR_FormatYUVP010 == srcFormat) {
+        bitDepth = 10;
+    }
+
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
-                trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
+                trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp(), bitDepth);
 
         if (frameMem == nullptr) {
             return NO_MEMORY;
@@ -935,9 +948,6 @@
         setFrame(frameMem);
     }
 
-    int32_t srcFormat;
-    CHECK(outputFormat->findInt32("color-format", &srcFormat));
-
     ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index f81a5eb..63d3180 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -36,6 +36,7 @@
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALookup.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
@@ -44,6 +45,7 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
 #include <media/mediarecorder.h>
@@ -372,9 +374,7 @@
     uint8_t mProfileCompatible;
     uint8_t mLevelIdc;
 
-    uint8_t mDoviProfile;
-    void *mDoviConfigData;
-    size_t mDoviConfigDataSize;
+    int32_t mDoviProfile;
 
     void *mCodecSpecificData;
     size_t mCodecSpecificDataSize;
@@ -428,7 +428,7 @@
     status_t parseHEVCCodecSpecificData(
             const uint8_t *data, size_t size, HevcParameterSets &paramSets);
 
-    status_t makeDoviCodecSpecificData();
+    status_t getDolbyVisionProfile();
 
     // Track authoring progress status
     void trackProgressStatus(int64_t timeUs, status_t err = OK);
@@ -628,14 +628,14 @@
 }
 
 const char *MPEG4Writer::Track::getDoviFourCC() const {
-    if (mDoviProfile == 5) {
+    if (mDoviProfile == DolbyVisionProfileDvheStn) {
         return "dvh1";
-    } else if (mDoviProfile == 8) {
+    } else if (mDoviProfile == DolbyVisionProfileDvheSt) {
         return "hvc1";
-    } else if (mDoviProfile == 9 || mDoviProfile == 32) {
+    } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
         return "avc1";
     }
-    return (const char*)NULL;
+    return nullptr;
 }
 
 // static
@@ -693,6 +693,11 @@
     }
 
     if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+        // For MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        // getFourCCForMime() requires profile information
+        // to decide the final FourCC codes.
+        // So we let the creation of the new track now and
+        // assign FourCC codes later using getDoviFourCC()
         ALOGV("Add source mime '%s'", mime);
     } else if (Track::getFourCCForMime(mime) == NULL) {
         ALOGE("Unsupported mime '%s'", mime);
@@ -2173,8 +2178,7 @@
       mMinCttsOffsetTimeUs(0),
       mMinCttsOffsetTicks(0),
       mMaxCttsOffsetTicks(0),
-      mDoviConfigData(NULL),
-      mDoviConfigDataSize(0),
+      mDoviProfile(0),
       mCodecSpecificData(NULL),
       mCodecSpecificDataSize(0),
       mGotAllCodecSpecificData(false),
@@ -2636,7 +2640,7 @@
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
-        makeDoviCodecSpecificData();
+        getDolbyVisionProfile();
         if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
                 !mMeta->findData(kKeyHVCC, &type, &data, &size)) {
             ALOGE("Failed: No HVCC/AVCC for Dolby Vision ..\n");
@@ -2683,10 +2687,6 @@
         mCodecSpecificData = NULL;
     }
 
-    if (mDoviConfigData != NULL) {
-        free(mDoviConfigData);
-        mDoviConfigData = NULL;
-    }
 }
 
 void MPEG4Writer::Track::initTrackingProgressStatus(MetaData *params) {
@@ -3365,34 +3365,37 @@
     return OK;
 }
 
-status_t MPEG4Writer::Track::makeDoviCodecSpecificData() {
+status_t MPEG4Writer::Track::getDolbyVisionProfile() {
     uint32_t type;
     const void *data = NULL;
     size_t size = 0;
 
-    if (mDoviConfigData != NULL) {
-        ALOGE("Already have Dolby Vision codec specific data");
-        return OK;
+    if (!mMeta->findData(kKeyDVCC, &type, &data, &size) &&
+        !mMeta->findData(kKeyDVVC, &type, &data, &size) &&
+        !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+            ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
+            return ERROR_MALFORMED;
     }
+    static const ALookup<uint8_t, int32_t> dolbyVisionProfileMap = {
+        {1, DolbyVisionProfileDvavPen},
+        {3, DolbyVisionProfileDvheDen},
+        {4, DolbyVisionProfileDvheDtr},
+        {5, DolbyVisionProfileDvheStn},
+        {6, DolbyVisionProfileDvheDth},
+        {7, DolbyVisionProfileDvheDtb},
+        {8, DolbyVisionProfileDvheSt},
+        {9, DolbyVisionProfileDvavSe},
+        {10, DolbyVisionProfileDvav110}
+    };
 
-    if (!mMeta->findData(kKeyDVCC, &type, &data, &size)
-             && !mMeta->findData(kKeyDVVC, &type, &data, &size)
-             && !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
-        ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
-        return ERROR_MALFORMED;
+    // Dolby Vision profile information is extracted as per
+    // https://dolby.my.salesforce.com/sfc/p/#700000009YuG/a/4u000000l6FB/076wHYEmyEfz09m0V1bo85_25hlUJjaiWTbzorNmYY4
+    uint8_t dv_profile = ((((uint8_t *)data)[2] >> 1) & 0x7f);
+
+    if (!dolbyVisionProfileMap.map(dv_profile, &mDoviProfile)) {
+      ALOGE("Failed to get Dolby Profile from DV Config data");
+      return ERROR_MALFORMED;
     }
-
-    mDoviConfigData = malloc(size);
-    if (mDoviConfigData == NULL) {
-        ALOGE("Failed allocating Dolby Vision config data");
-        return ERROR_MALFORMED;
-    }
-
-    mDoviConfigDataSize = size;
-    memcpy(mDoviConfigData, data, size);
-
-    mDoviProfile = (((char *)data)[2] >> 1) & 0x7f; //getting profile info
-
     return OK;
 }
 
@@ -3542,24 +3545,26 @@
                             buffer->range_length());
                 }
                 if (mIsDovi) {
-                    err = makeDoviCodecSpecificData();
-
-                    const void *data = NULL;
-                    size_t size = 0;
-
-                    uint32_t type = 0;
-                    if (mDoviProfile == 9){
-                        mMeta->findData(kKeyAVCC, &type, &data, &size);
-                    } else if (mDoviProfile < 9)  {
-                        mMeta->findData(kKeyHVCC, &type, &data, &size);
-                    }
-
-                    if (data != NULL && copyCodecSpecificData((uint8_t *)data, size) == OK) {
-                        mGotAllCodecSpecificData = true;
+                    err = getDolbyVisionProfile();
+                    if(err == OK) {
+                        const void *data = NULL;
+                        size_t size = 0;
+                        uint32_t type = 0;
+                        if (mDoviProfile == DolbyVisionProfileDvavSe) {
+                            mMeta->findData(kKeyAVCC, &type, &data, &size);
+                        } else if (mDoviProfile < DolbyVisionProfileDvavSe) {
+                            mMeta->findData(kKeyHVCC, &type, &data, &size);
+                        } else {
+                            ALOGW("DV Profiles > DolbyVisionProfileDvavSe are not supported");
+                            err = ERROR_MALFORMED;
+                        }
+                        if (err == OK && data != NULL &&
+                            copyCodecSpecificData((uint8_t *)data, size) == OK) {
+                                mGotAllCodecSpecificData = true;
+                        }
                     }
                 }
             }
-
             buffer->release();
             buffer = NULL;
             if (OK != err) {
@@ -4429,10 +4434,12 @@
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
         writeHvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
-        if (mDoviProfile <= 8) {
+        if (mDoviProfile <= DolbyVisionProfileDvheSt) {
             writeHvccBox();
-        } else if (mDoviProfile == 9 || mDoviProfile == 32) {
+        } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
             writeAvccBox();
+        } else {
+          TRESPASS("Unsupported Dolby Vision profile");
         }
         writeDoviConfigBox();
     }
@@ -4482,45 +4489,48 @@
     size_t size;
     bool found =
             meta->findData(kKeyHdrStaticInfo, &type, reinterpret_cast<const void**>(&data), &size);
-    if (found && size == 25) {
-        uint16_t displayPrimariesRX = U16LE_AT(&data[1]);
-        uint16_t displayPrimariesRY = U16LE_AT(&data[3]);
-
-        uint16_t displayPrimariesGX = U16LE_AT(&data[5]);
-        uint16_t displayPrimariesGY = U16LE_AT(&data[7]);
-
-        uint16_t displayPrimariesBX = U16LE_AT(&data[9]);
-        uint16_t displayPrimariesBY = U16LE_AT(&data[11]);
-
-        uint16_t whitePointX = U16LE_AT(&data[13]);
-        uint16_t whitePointY = U16LE_AT(&data[15]);
-
-        uint16_t maxDisplayMasteringLuminance = U16LE_AT(&data[17]);
-        uint16_t minDisplayMasteringLuminance = U16LE_AT(&data[19]);
-
-        uint16_t maxContentLightLevel = U16LE_AT(&data[21]);
-        uint16_t maxPicAverageLightLevel = U16LE_AT(&data[23]);
-
-        mOwner->beginBox("mdcv");
-        mOwner->writeInt16(displayPrimariesGX);
-        mOwner->writeInt16(displayPrimariesGY);
-        mOwner->writeInt16(displayPrimariesBX);
-        mOwner->writeInt16(displayPrimariesBY);
-        mOwner->writeInt16(displayPrimariesRX);
-        mOwner->writeInt16(displayPrimariesRY);
-        mOwner->writeInt16(whitePointX);
-        mOwner->writeInt16(whitePointY);
-        mOwner->writeInt32(maxDisplayMasteringLuminance * 10000);
-        mOwner->writeInt32(minDisplayMasteringLuminance * 10000);
-        mOwner->endBox();  // mdcv.
-
-        mOwner->beginBox("clli");
-        mOwner->writeInt16(maxContentLightLevel);
-        mOwner->writeInt16(maxPicAverageLightLevel);
-        mOwner->endBox();  // clli.
-    } else {
-        ALOGW("Ignoring HDR static info with unexpected size %d", (int)size);
+    if (!found) {
+        return; // Nothing to encode.
     }
+    if (size != 25) {
+        ALOGW("Ignoring HDR static info with unexpected size %d", (int)size);
+        return;
+    }
+    uint16_t displayPrimariesRX = U16LE_AT(&data[1]);
+    uint16_t displayPrimariesRY = U16LE_AT(&data[3]);
+
+    uint16_t displayPrimariesGX = U16LE_AT(&data[5]);
+    uint16_t displayPrimariesGY = U16LE_AT(&data[7]);
+
+    uint16_t displayPrimariesBX = U16LE_AT(&data[9]);
+    uint16_t displayPrimariesBY = U16LE_AT(&data[11]);
+
+    uint16_t whitePointX = U16LE_AT(&data[13]);
+    uint16_t whitePointY = U16LE_AT(&data[15]);
+
+    uint16_t maxDisplayMasteringLuminance = U16LE_AT(&data[17]);
+    uint16_t minDisplayMasteringLuminance = U16LE_AT(&data[19]);
+
+    uint16_t maxContentLightLevel = U16LE_AT(&data[21]);
+    uint16_t maxPicAverageLightLevel = U16LE_AT(&data[23]);
+
+    mOwner->beginBox("mdcv");
+    mOwner->writeInt16(displayPrimariesGX);
+    mOwner->writeInt16(displayPrimariesGY);
+    mOwner->writeInt16(displayPrimariesBX);
+    mOwner->writeInt16(displayPrimariesBY);
+    mOwner->writeInt16(displayPrimariesRX);
+    mOwner->writeInt16(displayPrimariesRY);
+    mOwner->writeInt16(whitePointX);
+    mOwner->writeInt16(whitePointY);
+    mOwner->writeInt32(maxDisplayMasteringLuminance * 10000);
+    mOwner->writeInt32(minDisplayMasteringLuminance * 10000);
+    mOwner->endBox();  // mdcv.
+
+    mOwner->beginBox("clli");
+    mOwner->writeInt16(maxContentLightLevel);
+    mOwner->writeInt16(maxPicAverageLightLevel);
+    mOwner->endBox();  // clli.
 }
 
 void MPEG4Writer::Track::writeAudioFourCCBox() {
@@ -4991,21 +5001,29 @@
 }
 
 void MPEG4Writer::Track::writeDoviConfigBox() {
-    CHECK(mDoviConfigData);
-    CHECK_EQ(mDoviConfigDataSize, 24u);
+    CHECK_NE(mDoviProfile, 0u);
 
-    uint8_t *ptr = (uint8_t *)mDoviConfigData;
-    uint8_t profile = (ptr[2] >> 1) & 0x7f;
+    uint32_t type = 0;
+    const void *data = nullptr;
+    size_t size = 0;
+    // check to see which key has the configuration box.
+    if (mMeta->findData(kKeyDVCC, &type, &data, &size) ||
+        mMeta->findData(kKeyDVVC, &type, &data, &size) ||
+        mMeta->findData(kKeyDVWC, &type, &data, &size)) {
 
-    if (profile > 10) {
-        mOwner->beginBox("dvwC");
-    } else if (profile > 7) {
-        mOwner->beginBox("dvvC");
-    } else {
-        mOwner->beginBox("dvcC");
+       // if this box is present we write the box, or
+       // this mp4 will be interpreted as a backward
+       // compatible stream.
+        if (mDoviProfile > DolbyVisionProfileDvav110) {
+            mOwner->beginBox("dvwC");
+        } else if (mDoviProfile > DolbyVisionProfileDvheDtb) {
+            mOwner->beginBox("dvvC");
+        } else {
+            mOwner->beginBox("dvcC");
+        }
+        mOwner->write(data, size);
+        mOwner->endBox();  // dvwC/dvvC/dvcC
     }
-    mOwner->write(mDoviConfigData, mDoviConfigDataSize);
-    mOwner->endBox();  // dvwC/dvvC/dvcC
 }
 
 void MPEG4Writer::Track::writeD263Box() {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index d026287..50a3f0d 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -102,6 +102,8 @@
 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
 static const char *kCodecModeAudio = "audio";
+static const char *kCodecModeImage = "image";
+static const char *kCodecModeUnknown = "unknown";
 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
@@ -674,6 +676,24 @@
     notify->post();
 }
 
+static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
+    switch (domain) {
+        case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
+        case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
+        case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
+        default:                       return MediaResourceSubType::kUnspecifiedSubType;
+    }
+}
+
+static const char * toCodecMode(MediaCodec::Domain domain) {
+    switch (domain) {
+        case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
+        case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
+        case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
+        default:                       return kCodecModeUnknown;
+    }
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -769,9 +789,9 @@
       mFlags(0),
       mStickyError(OK),
       mSoftRenderer(NULL),
-      mIsVideo(false),
-      mVideoWidth(0),
-      mVideoHeight(0),
+      mDomain(DOMAIN_UNKNOWN),
+      mWidth(0),
+      mHeight(0),
       mRotationDegrees(0),
       mHDRMetadataFlags(0),
       mDequeueInputTimeoutGeneration(0),
@@ -1177,7 +1197,7 @@
         });
     }
 
-    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
         mBytesInput += buffer->size();
         mFramesInput++;
     }
@@ -1206,7 +1226,7 @@
 
     CHECK_NE(mState, UNINITIALIZED);
 
-    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
         int32_t flags = 0;
         (void) buffer->meta()->findInt32("flags", &flags);
 
@@ -1418,7 +1438,13 @@
         mCodecInfo->getSupportedMediaTypes(&mediaTypes);
         for (size_t i = 0; i < mediaTypes.size(); ++i) {
             if (mediaTypes[i].startsWith("video/")) {
-                mIsVideo = true;
+                mDomain = DOMAIN_VIDEO;
+                break;
+            } else if (mediaTypes[i].startsWith("audio/")) {
+                mDomain = DOMAIN_AUDIO;
+                break;
+            } else if (mediaTypes[i].startsWith("image/")) {
+                mDomain = DOMAIN_IMAGE;
                 break;
             }
         }
@@ -1431,7 +1457,7 @@
         return NAME_NOT_FOUND;
     }
 
-    if (mIsVideo) {
+    if (mDomain == DOMAIN_VIDEO) {
         // video codec needs dedicated looper
         if (mCodecLooper == NULL) {
             mCodecLooper = new ALooper;
@@ -1464,16 +1490,15 @@
 
     if (mMetricsHandle != 0) {
         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
-        mediametrics_setCString(mMetricsHandle, kCodecMode,
-                                mIsVideo ? kCodecModeVideo : kCodecModeAudio);
+        mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
     }
 
-    if (mIsVideo) {
+    if (mDomain == DOMAIN_VIDEO) {
         mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
     }
 
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(secureCodec, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -1554,16 +1579,16 @@
         mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
     }
 
-    if (mIsVideo) {
-        format->findInt32("width", &mVideoWidth);
-        format->findInt32("height", &mVideoHeight);
+    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
+        format->findInt32("width", &mWidth);
+        format->findInt32("height", &mHeight);
         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
             mRotationDegrees = 0;
         }
 
         if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mVideoWidth);
-            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mVideoHeight);
+            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
+            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
             mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
             int32_t maxWidth = 0;
             if (format->findInt32("max-width", &maxWidth)) {
@@ -1577,21 +1602,23 @@
             if (format->findInt32("color-format", &colorFormat)) {
                 mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
             }
-            float frameRate = -1.0;
-            if (format->findFloat("frame-rate", &frameRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
-            }
-            float captureRate = -1.0;
-            if (format->findFloat("capture-rate", &captureRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
-            }
-            float operatingRate = -1.0;
-            if (format->findFloat("operating-rate", &operatingRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
-            }
-            int32_t priority = -1;
-            if (format->findInt32("priority", &priority)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+            if (mDomain == DOMAIN_VIDEO) {
+                float frameRate = -1.0;
+                if (format->findFloat("frame-rate", &frameRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+                }
+                float captureRate = -1.0;
+                if (format->findFloat("capture-rate", &captureRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+                }
+                float operatingRate = -1.0;
+                if (format->findFloat("operating-rate", &operatingRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+                }
+                int32_t priority = -1;
+                if (format->findInt32("priority", &priority)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+                }
             }
             int32_t colorStandard = -1;
             if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
@@ -1613,9 +1640,9 @@
         }
 
         // Prevent possible integer overflow in downstream code.
-        if (mVideoWidth < 0 || mVideoHeight < 0 ||
-               (uint64_t)mVideoWidth * mVideoHeight > (uint64_t)INT32_MAX / 4) {
-            ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
+        if (mWidth < 0 || mHeight < 0 ||
+               (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
+            ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
             return BAD_VALUE;
         }
 
@@ -1648,7 +1675,7 @@
     }
 
     // push min/max QP to MediaMetrics after shaping
-    if (mIsVideo && mMetricsHandle != 0) {
+    if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
         int32_t qpIMin = -1;
         if (format->findInt32("video-qp-i-min", &qpIMin)) {
             mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
@@ -1701,7 +1728,8 @@
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
+            toMediaResourceSubType(mDomain)));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
@@ -2123,7 +2151,7 @@
                       bool reverse) {
     AString mediaType;
     if (!format->findString("mime", &mediaType)) {
-        ALOGW("mapFormat: no mediaType information");
+        ALOGV("mapFormat: no mediaType information");
         return;
     }
     ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
@@ -2282,7 +2310,7 @@
 }
 
 uint64_t MediaCodec::getGraphicBufferSize() {
-    if (!mIsVideo) {
+    if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
         return 0;
     }
 
@@ -2290,7 +2318,7 @@
     size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
     for (size_t i = 0; i < portNum; ++i) {
         // TODO: this is just an estimation, we should get the real buffer size from ACodec.
-        size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2;
+        size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
     }
     return size;
 }
@@ -2302,7 +2330,8 @@
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
+            toMediaResourceSubType(mDomain)));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
@@ -3076,10 +3105,8 @@
                         case STOPPING:
                         {
                             if (mFlags & kFlagSawMediaServerDie) {
-                                bool postPendingReplies = true;
                                 if (mState == RELEASING && !mReplyID) {
                                     ALOGD("Releasing asynchronously, so nothing to reply here.");
-                                    postPendingReplies = false;
                                 }
                                 // MediaServer died, there definitely won't
                                 // be a shutdown complete notification after
@@ -3092,8 +3119,11 @@
                                 if (mState == RELEASING) {
                                     mComponentName.clear();
                                 }
-                                if (postPendingReplies) {
+                                if (mReplyID) {
                                     postPendingRepliesAndDeferredMessages(origin + ":dead");
+                                } else {
+                                    ALOGD("no pending replies: %s:dead following %s",
+                                          origin.c_str(), mLastReplyOrigin.c_str());
                                 }
                                 sendErrorResponse = false;
                             } else if (!mReplyID) {
@@ -3234,8 +3264,8 @@
                             : MediaCodecInfo::Attributes(0);
                     if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
                         // software codec is currently ignored.
-                        mResourceManagerProxy->addResource(
-                                MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+                        mResourceManagerProxy->addResource(MediaResource::CodecResource(
+                            mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
                     }
 
                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
@@ -3401,7 +3431,7 @@
                     }
 
                     CHECK_EQ(mState, STARTING);
-                    if (mIsVideo) {
+                    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
                         mResourceManagerProxy->addResource(
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 1854588..4b6470a 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -243,6 +243,39 @@
     }
 }
 
+static const ALookup<uint8_t, int32_t>&  getDolbyVisionProfileTable() {
+    static const ALookup<uint8_t, int32_t> profileTable = {
+        {1, DolbyVisionProfileDvavPen},
+        {3, DolbyVisionProfileDvheDen},
+        {4, DolbyVisionProfileDvheDtr},
+        {5, DolbyVisionProfileDvheStn},
+        {6, DolbyVisionProfileDvheDth},
+        {7, DolbyVisionProfileDvheDtb},
+        {8, DolbyVisionProfileDvheSt},
+        {9, DolbyVisionProfileDvavSe},
+        {10, DolbyVisionProfileDvav110},
+    };
+    return profileTable;
+}
+
+static const ALookup<uint8_t, int32_t>&  getDolbyVisionLevelsTable() {
+    static const ALookup<uint8_t, int32_t> levelsTable = {
+        {0, DolbyVisionLevelUnknown},
+        {1, DolbyVisionLevelHd24},
+        {2, DolbyVisionLevelHd30},
+        {3, DolbyVisionLevelFhd24},
+        {4, DolbyVisionLevelFhd30},
+        {5, DolbyVisionLevelFhd60},
+        {6, DolbyVisionLevelUhd24},
+        {7, DolbyVisionLevelUhd30},
+        {8, DolbyVisionLevelUhd48},
+        {9, DolbyVisionLevelUhd60},
+        {10, DolbyVisionLevelUhd120},
+        {11, DolbyVisionLevel8k30},
+        {12, DolbyVisionLevel8k60},
+    };
+    return levelsTable;
+}
 static void parseDolbyVisionProfileLevelFromDvcc(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
     // dv_major.dv_minor Should be 1.0 or 2.1
     if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
@@ -262,33 +295,9 @@
 
     // All Dolby Profiles will have profile and level info in MediaFormat
     // Profile 8 and 9 will have bl_compatibility_id too.
-    const static ALookup<uint8_t, int32_t> profiles{
-        {1, DolbyVisionProfileDvavPen},
-        {3, DolbyVisionProfileDvheDen},
-        {4, DolbyVisionProfileDvheDtr},
-        {5, DolbyVisionProfileDvheStn},
-        {6, DolbyVisionProfileDvheDth},
-        {7, DolbyVisionProfileDvheDtb},
-        {8, DolbyVisionProfileDvheSt},
-        {9, DolbyVisionProfileDvavSe},
-        {10, DolbyVisionProfileDvav110},
-    };
+    const ALookup<uint8_t, int32_t> &profiles = getDolbyVisionProfileTable();
+    const ALookup<uint8_t, int32_t> &levels = getDolbyVisionLevelsTable();
 
-    const static ALookup<uint8_t, int32_t> levels{
-        {0, DolbyVisionLevelUnknown},
-        {1, DolbyVisionLevelHd24},
-        {2, DolbyVisionLevelHd30},
-        {3, DolbyVisionLevelFhd24},
-        {4, DolbyVisionLevelFhd30},
-        {5, DolbyVisionLevelFhd60},
-        {6, DolbyVisionLevelUhd24},
-        {7, DolbyVisionLevelUhd30},
-        {8, DolbyVisionLevelUhd48},
-        {9, DolbyVisionLevelUhd60},
-        {10, DolbyVisionLevelUhd120},
-        {11, DolbyVisionLevel8k30},
-        {12, DolbyVisionLevel8k60},
-    };
     // set rpuAssoc
     if (rpu_present_flag && el_present_flag && !bl_present_flag) {
         format->setInt32("rpuAssoc", 1);
@@ -1516,30 +1525,18 @@
     if (meta->findData(kKeyDVCC, &type, &data, &size)
             || meta->findData(kKeyDVVC, &type, &data, &size)
             || meta->findData(kKeyDVWC, &type, &data, &size)) {
-        sp<ABuffer> buffer, csdOrg;
-        if (msg->findBuffer("csd-0", &csdOrg)) {
-            buffer = new (std::nothrow) ABuffer(size + csdOrg->size());
-            if (buffer.get() == NULL || buffer->base() == NULL) {
-                return NO_MEMORY;
-            }
-
-            memcpy(buffer->data(), csdOrg->data(), csdOrg->size());
-            memcpy(buffer->data() + csdOrg->size(), data, size);
-        } else {
-            buffer = new (std::nothrow) ABuffer(size);
-            if (buffer.get() == NULL || buffer->base() == NULL) {
-                return NO_MEMORY;
-            }
-            memcpy(buffer->data(), data, size);
-        }
-
-        buffer->meta()->setInt32("csd", true);
-        buffer->meta()->setInt64("timeUs", 0);
-        msg->setBuffer("csd-0", buffer);
-
         const uint8_t *ptr = (const uint8_t *)data;
         ALOGV("DV: calling parseDolbyVisionProfileLevelFromDvcc with data size %zu", size);
         parseDolbyVisionProfileLevelFromDvcc(ptr, size, msg);
+        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+        if (buffer.get() == nullptr || buffer->base() == nullptr) {
+            return NO_MEMORY;
+        }
+        memcpy(buffer->data(), data, size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setBuffer("csd-2", buffer);
     }
 
     *format = msg;
@@ -2041,133 +2038,146 @@
                    mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
             meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
         } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
-            int32_t needCreateDoviCSD = 0;
-            int32_t profile = 0;
-            uint8_t bl_compatibility = 0;
-            if (msg->findInt32("profile", &profile)) {
-                if (profile == DolbyVisionProfileDvheSt) {
-                    profile = 8;
-                    bl_compatibility = 4;
-                } else if (profile == DolbyVisionProfileDvavSe) {
-                    profile = 9;
-                    bl_compatibility = 2;
-                }
-                if (profile == 8 || profile == 9) {
-                    needCreateDoviCSD = 1;
-                }
-            } else {
-                ALOGW("did not find dolby vision profile");
-            }
-            // No dovi csd data, need to create it
-            if (needCreateDoviCSD) {
-                uint8_t dvcc[24];
-                int32_t level = 0;
-                uint8_t level_val = 0;
+            int32_t profile = -1;
+            uint8_t blCompatibilityId = -1;
+            int32_t level = 0;
+            uint8_t profileVal = -1;
+            uint8_t profileVal1 = -1;
+            uint8_t profileVal2 = -1;
+            constexpr size_t dvccSize = 24;
 
-                if (msg->findInt32("level", &level)) {
-                    const static ALookup<int32_t, uint8_t> levels {
-                        {DolbyVisionLevelUnknown, 0},
-                        {DolbyVisionLevelHd24, 1},
-                        {DolbyVisionLevelHd30, 2},
-                        {DolbyVisionLevelFhd24, 3},
-                        {DolbyVisionLevelFhd30, 4},
-                        {DolbyVisionLevelFhd60, 5},
-                        {DolbyVisionLevelUhd24, 6},
-                        {DolbyVisionLevelUhd30, 7},
-                        {DolbyVisionLevelUhd48, 8},
-                        {DolbyVisionLevelUhd60, 9},
-                        {DolbyVisionLevelUhd120, 10},
-                        {DolbyVisionLevel8k30, 11},
-                        {DolbyVisionLevel8k60, 12},
-                    };
-                    levels.map(level, &level_val);
-                    ALOGV("found dolby vision level: %d, value: %d", level, level_val);
+            const ALookup<uint8_t, int32_t> &profiles =
+                getDolbyVisionProfileTable();
+            const ALookup<uint8_t, int32_t> &levels =
+                getDolbyVisionLevelsTable();
+
+            if (!msg->findBuffer("csd-2", &csd2)) {
+                // MP4 extractors are expected to generate csd buffer
+                // some encoders might not be generating it, in which
+                // case we populate the track metadata dv (cc|vc|wc)
+                // from the 'profile' and 'level' info.
+                // This is done according to Dolby Vision ISOBMFF spec
+
+                if (!msg->findInt32("profile", &profile)) {
+                    ALOGE("Dolby Vision profile not found");
+                    return BAD_VALUE;
                 }
+                msg->findInt32("level", &level);
+
+                if (profile == DolbyVisionProfileDvheSt) {
+                    if (!profiles.rlookup(DolbyVisionProfileDvheSt, &profileVal)) { // dvhe.08
+                        ALOGE("Dolby Vision profile lookup error");
+                        return BAD_VALUE;
+                    }
+                    blCompatibilityId = 4;
+                } else if (profile == DolbyVisionProfileDvavSe) {
+                    if (!profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal)) { // dvav.09
+                        ALOGE("Dolby Vision profile lookup error");
+                        return BAD_VALUE;
+                    }
+                    blCompatibilityId = 2;
+                } else {
+                    ALOGE("Dolby Vision profile look up error");
+                    return BAD_VALUE;
+                }
+
+                profile = (int32_t) profileVal;
+
+                uint8_t level_val = 0;
+                if (!levels.map(level, &level_val)) {
+                    ALOGE("Dolby Vision level lookup error");
+                    return BAD_VALUE;
+                }
+
+                std::vector<uint8_t> dvcc(dvccSize);
 
                 dvcc[0] = 1; // major version
                 dvcc[1] = 0; // minor version
-                dvcc[2] = (uint8_t)((profile & 0x7f) << 1);// dolby vision profile
+                dvcc[2] = (uint8_t)((profile & 0x7f) << 1); // dolby vision profile
                 dvcc[2] = (uint8_t)((dvcc[2] | (uint8_t)((level_val >> 5) & 0x1)) & 0xff);
                 dvcc[3] = (uint8_t)((level_val & 0x1f) << 3); // dolby vision level
                 dvcc[3] = (uint8_t)(dvcc[3] | (1 << 2)); // rpu_present_flag
                 dvcc[3] = (uint8_t)(dvcc[3] | (1)); // bl_present_flag
-                dvcc[4] = (uint8_t)(bl_compatibility << 4);// bl_compatibility id
+                dvcc[4] = (uint8_t)(blCompatibilityId << 4); // bl_compatibility id
 
-                std::vector<uint8_t> dvcc_data(24);
-                memcpy(dvcc_data.data(), dvcc, 24);
-                if (profile > 10) {
-                    meta->setData(kKeyDVWC, kTypeDVWC, dvcc_data.data(), 24);
-                } else if (profile > 7) {
-                    meta->setData(kKeyDVVC, kTypeDVVC, dvcc_data.data(), 24);
+                profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+                profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+                if (profile > (int32_t) profileVal) {
+                    meta->setData(kKeyDVWC, kTypeDVWC, dvcc.data(), dvccSize);
+                } else if (profile > (int32_t) profileVal1) {
+                    meta->setData(kKeyDVVC, kTypeDVVC, dvcc.data(), dvccSize);
                 } else {
-                    meta->setData(kKeyDVCC, kTypeDVCC, dvcc_data.data(), 24);
+                    meta->setData(kKeyDVCC, kTypeDVCC, dvcc.data(), dvccSize);
                 }
-            } else if (csd0size >= 24) { // have dovi csd, just send it out...
-                uint8_t *dvconfig = csd0->data() + (csd0size -24);
-                profile = dvconfig[2] >> 1;
-                if (profile > 10) {
-                    meta->setData(kKeyDVWC, kTypeDVWC, dvconfig, 24);
-                } else if (profile > 7) {
-                    meta->setData(kKeyDVVC, kTypeDVVC, dvconfig, 24);
-                } else {
-                    meta->setData(kKeyDVCC, kTypeDVCC, dvconfig, 24);
-                }
+
             } else {
-                return BAD_VALUE;
+                // we have csd-2, just use that to populate dvcc
+                if (csd2->size() == dvccSize) {
+                    uint8_t *dvcc = csd2->data();
+                    profile = dvcc[2] >> 1;
+
+                    profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+                    profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+                    if (profile > (int32_t) profileVal) {
+                        meta->setData(kKeyDVWC, kTypeDVWC, csd2->data(), csd2->size());
+                    } else if (profile > (int32_t) profileVal1) {
+                        meta->setData(kKeyDVVC, kTypeDVVC, csd2->data(), csd2->size());
+                    } else {
+                         meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
+                    }
+
+                } else {
+                    ALOGE("Convert MessageToMetadata csd-2 is present but not valid");
+                    return BAD_VALUE;
+                }
             }
-
-            // Send the avc/hevc/av1 csd data...
-            if (csd0size >= 24) {
-                sp<ABuffer> csd;
-                if ( profile > 1 && profile < 9) {
-                    if (msg->findBuffer("csd-hevc", &csd)) {
-                        meta->setData(kKeyHVCC, kTypeHVCC, csd->data(), csd->size());
-                    } else if (csd0size > 24) {
-                        std::vector<uint8_t> hvcc(csd0size + 1024);
-                        size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
-                        meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
-                    }
-                } else if (profile == 9) {
-                    sp<ABuffer> csd1;
-                    if (msg->findBuffer("csd-avc", &csd)) {
-                        meta->setData(kKeyAVCC, kTypeAVCC, csd->data(), csd->size());
-                    } else if (msg->findBuffer("csd-1", &csd1)) {
-                        std::vector<char> avcc(csd0size + csd1->size() + 1024);
-                        size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
-                        meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
-                    } else { // for dolby vision avc, csd0 also holds csd1
-                        size_t i = 0;
-                        int csd0realsize = 0;
-                        do {
-                            i = findNextNalStartCode(csd0->data() + i,
-                                            csd0->size() - i) - csd0->data();
-                            if (i > 0) {
-                                csd0realsize = i;
-                                break;
-                            }
-                            i += 4;
-                        } while(i < csd0->size());
-                        // buffer0 -> csd0
-                        sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
-                        if (buffer0.get() == NULL || buffer0->base() == NULL) {
-                            return NO_MEMORY;
+            profiles.rlookup(DolbyVisionProfileDvavPen, &profileVal);
+            profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal1);
+            profiles.rlookup(DolbyVisionProfileDvav110, &profileVal2);
+            if ((profile > (int32_t) profileVal) && (profile < (int32_t) profileVal1)) {
+                std::vector<uint8_t> hvcc(csd0size + 1024);
+                size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
+                meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
+            } else if (profile == (int32_t) profileVal2) {
+                meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+            } else {
+                sp<ABuffer> csd1;
+                if (msg->findBuffer("csd-1", &csd1)) {
+                    std::vector<char> avcc(csd0size + csd1->size() + 1024);
+                    size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
+                    meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+                }
+                else {
+                    // for dolby vision avc, csd0 also holds csd1
+                    size_t i = 0;
+                    int csd0realsize = 0;
+                    do {
+                        i = findNextNalStartCode(csd0->data() + i,
+                                        csd0->size() - i) - csd0->data();
+                        if (i > 0) {
+                            csd0realsize = i;
+                            break;
                         }
-                        memcpy(buffer0->data(), csd0->data(), csd0realsize);
-                        // buffer1 -> csd1
-                        sp<ABuffer> buffer1 = new (std::nothrow)
-                                ABuffer(csd0->size() - csd0realsize);
-                        if (buffer1.get() == NULL || buffer1->base() == NULL) {
-                            return NO_MEMORY;
-                        }
-                        memcpy(buffer1->data(), csd0->data()+csd0realsize,
-                                    csd0->size() - csd0realsize);
-
-                        std::vector<char> avcc(csd0->size() + 1024);
-                        size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
-                        meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+                        i += 4;
+                    } while(i < csd0->size());
+                    // buffer0 -> csd0
+                    sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
+                    if (buffer0.get() == NULL || buffer0->base() == NULL) {
+                        return NO_MEMORY;
                     }
-                } else if (profile == 10) {
-                    meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size() - 24);
+                    memcpy(buffer0->data(), csd0->data(), csd0realsize);
+                    // buffer1 -> csd1
+                    sp<ABuffer> buffer1 = new (std::nothrow)
+                            ABuffer(csd0->size() - csd0realsize);
+                    if (buffer1.get() == NULL || buffer1->base() == NULL) {
+                        return NO_MEMORY;
+                    }
+                    memcpy(buffer1->data(), csd0->data()+csd0realsize,
+                                csd0->size() - csd0realsize);
+
+                    std::vector<char> avcc(csd0->size() + 1024);
+                    size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
+                    meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
                 }
             }
         } else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
@@ -2216,6 +2226,17 @@
         meta->setData(kKeyStreamHeader, 'mdat', csd0->data(), csd0->size());
     } else if (msg->findBuffer("d263", &csd0)) {
         meta->setData(kKeyD263, kTypeD263, csd0->data(), csd0->size());
+    } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION && msg->findBuffer("csd-2", &csd2)) {
+        meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
+
+        // Remove CSD-2 from the data here to avoid duplicate data in meta
+        meta->remove(kKeyOpaqueCSD2);
+
+        if (msg->findBuffer("csd-avc", &csd0)) {
+            meta->setData(kKeyAVCC, kTypeAVCC, csd0->data(), csd0->size());
+        } else if (msg->findBuffer("csd-hevc", &csd0)) {
+            meta->setData(kKeyHVCC, kTypeHVCC, csd0->data(), csd0->size());
+        }
     }
     // XXX TODO add whatever other keys there are
 
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 0a4e598..4334f1e 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -19,6 +19,8 @@
 #include <utils/Log.h>
 #include <utils/String8.h>
 
+#include <inttypes.h>
+
 #include "ALooperRoster.h"
 
 #include "ADebug.h"
@@ -142,7 +144,7 @@
             sp<AHandler> handler = info.mHandler.promote();
             if (handler != NULL) {
                 handler->mVerboseStats = verboseStats;
-                s.appendFormat(": %u messages processed", handler->mMessageCounter);
+                s.appendFormat(": %" PRIu64 " messages processed", handler->mMessageCounter);
                 if (verboseStats) {
                     for (size_t j = 0; j < handler->mMessages.size(); j++) {
                         char fourcc[15];
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
index 53d8a9b..337460a 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
@@ -66,7 +66,7 @@
     }
 
     bool mVerboseStats;
-    uint32_t mMessageCounter;
+    uint64_t mMessageCounter;
     KeyedVector<uint32_t, uint32_t> mMessages;
 
     void deliverMessage(const sp<AMessage> &msg);
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index d59e4f5..e417324 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -50,7 +50,8 @@
     sp<IMemory> extractFrame(FrameRect *rect = NULL);
 
     static sp<IMemory> getMetadataOnly(
-            const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail = false);
+            const sp<MetaData> &trackMeta, int colorFormat,
+            bool thumbnail = false, uint32_t bitDepth = 0);
 
 protected:
     virtual ~FrameDecoder();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index ea7429c..f5af50d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -70,6 +70,13 @@
 using aidl::android::media::MediaResourceParcel;
 
 struct MediaCodec : public AHandler {
+    enum Domain {
+        DOMAIN_UNKNOWN = 0,
+        DOMAIN_VIDEO = 1,
+        DOMAIN_AUDIO = 2,
+        DOMAIN_IMAGE = 3
+    };
+
     enum ConfigureFlags {
         CONFIGURE_FLAG_ENCODE           = 1,
         CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
@@ -437,10 +444,10 @@
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
-    bool mIsVideo;
+    Domain mDomain;
     AString mLogSessionId;
-    int32_t mVideoWidth;
-    int32_t mVideoHeight;
+    int32_t mWidth;
+    int32_t mHeight;
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 84653eb..78792c5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -816,7 +816,7 @@
 constexpr char KEY_OPERATING_RATE[] = "operating-rate";
 constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
 constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
-constexpr char KEY_PICTURE_TYPE[] = "picture_type";
+constexpr char KEY_PICTURE_TYPE[] = "picture-type";
 constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
 constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
 constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 6b1d2a1..1c8eef5 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -204,7 +204,7 @@
 };
 
 bool AMPEG4ElementaryAssembler::initCheck() {
-    if(mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0) {
+    if(mIsGeneric && (mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0)) {
         android_errorWriteLog(0x534e4554, "124777537");
         return false;
     }
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index ac1e9b1..a8e64b6 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -393,3 +393,51 @@
     std::this_thread::sleep_for(std::chrono::milliseconds(100));
     looper->stop();
 }
+
+TEST(MediaCodecTest, DeadWhileStoppingError) {
+    // Test scenario:
+    //
+    // 1) Client thread calls stop(); MediaCodec looper thread calls
+    //    initiateShutdown(); shutdown is being handled at the component thread.
+    // 2) An error occurs while handling initiateShutdown().
+    // 3) MediaCodec looper thread handles the error.
+    // 4) Codec service dies after the error is handled
+    // 5) MediaCodec looper thread handles the death.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateShutdown(_))
+                .WillByDefault([mockCodec](bool) {
+                    // 2)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                    // 4)
+                    mockCodec->callback()->onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+                    // Codec service has died, no callback.
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+    codec->stop();
+    // sleep here so that the looper thread can handle the error
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    looper->stop();
+}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index 58c68ef..ae97c50 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -36,7 +36,6 @@
 
     static_libs: [
         "libstagefright_timedtext",
-        "libstagefright_foundation",
     ],
 
     header_libs: [
@@ -47,6 +46,7 @@
         "liblog",
         "libmedia",
         "libbinder",
+        "libstagefright_foundation",
     ],
 
     cflags: [
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index ee7af70..b97f347 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -174,10 +174,13 @@
             params.sampleRate = 16000;
         } else {
             params.sampleRate = max(1, params.sampleRate);
+            params.channelCount = max(0, params.channelCount);
         }
         format->setInt32("channel-count", params.channelCount);
         format->setInt32("sample-rate", params.sampleRate);
     } else if (!strncmp(params.mime, "video/", 6)) {
+        params.width = max(1, params.width);
+        params.height = max(1, params.height);
         format->setInt32("width", params.width);
         format->setInt32("height", params.height);
     }
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 537df76..edddaa4 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -33,7 +33,7 @@
 
     shared_libs: [
         "android.hardware.media.omx@1.0",
-        "libandroidicu",
+        "libicu",
         "libfmq",
         "libbinder",
         "libhidlbase",
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 354971a..6f25cec 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -15,6 +15,8 @@
  */
 
 #include <inttypes.h>
+#include <mutex>
+#include <set>
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NdkMediaCodec"
@@ -42,6 +44,7 @@
 
 
 static media_status_t translate_error(status_t err) {
+
     if (err == OK) {
         return AMEDIA_OK;
     } else if (err == -EAGAIN) {
@@ -51,7 +54,18 @@
     } else if (err == DEAD_OBJECT) {
         return AMEDIACODEC_ERROR_RECLAIMED;
     }
-    ALOGE("sf error code: %d", err);
+
+    {
+        // minimize log flooding. Some CTS behavior made this noisy and apps could do the same.
+        static std::set<status_t> untranslated;
+        static std::mutex mutex;
+        std::lock_guard lg(mutex);
+
+        if (untranslated.find(err) == untranslated.end()) {
+            ALOGE("untranslated sf error code: %d", err);
+            untranslated.insert(err);
+        }
+    }
     return AMEDIA_ERROR_UNKNOWN;
 }
 
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index ba8f199..1756e47 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -30,6 +30,7 @@
         "ISchedulingPolicyService.cpp",
         "LimitProcessMemory.cpp",
         "MemoryLeakTrackUtil.cpp",
+        "MethodStatistics.cpp",
         "ProcessInfo.cpp",
         "SchedulingPolicyService.cpp",
         "ServiceUtilities.cpp",
@@ -43,11 +44,12 @@
     ],
     shared_libs: [
         "libaudioclient_aidl_conversion",
-        "libaudioutils", // for clock.h
+        "libaudioutils", // for clock.h, Statistics.h
         "libbinder",
         "libcutils",
         "liblog",
         "libutils",
+        "libutilscallstack",
         "libhidlbase",
         "libpermission",
         "android.hardware.graphics.bufferqueue@1.0",
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
new file mode 100644
index 0000000..875c43d
--- /dev/null
+++ b/media/utils/MethodStatistics.cpp
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/MethodStatistics.h>
+
+namespace android::mediautils {
+
+// Repository for MethodStatistics Objects
+
+std::shared_ptr<std::vector<std::string>>
+getStatisticsClassesForModule(std::string_view moduleName) {
+    static const std::map<std::string, std::shared_ptr<std::vector<std::string>>> m {
+        {
+            METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL,
+            std::shared_ptr<std::vector<std::string>>(
+                new std::vector<std::string>{
+                "DeviceHalHidl",
+                "StreamInHalHidl",
+                "StreamOutHalHidl",
+              })
+        },
+    };
+    auto it = m.find({moduleName.begin(), moduleName.end()});
+    if (it == m.end()) return {};
+    return it->second;
+}
+
+static void addClassesToMap(const std::shared_ptr<std::vector<std::string>> &classNames,
+        std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> &map) {
+    if (classNames) {
+        for (const auto& className : *classNames) {
+            map.emplace(className, std::make_shared<MethodStatistics<std::string>>());
+        }
+    }
+}
+
+// singleton statistics for DeviceHalHidl StreamOutHalHidl StreamInHalHidl
+std::shared_ptr<MethodStatistics<std::string>>
+getStatisticsForClass(std::string_view className) {
+    static const std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m =
+        // copy elided initialization of map m.
+        [](){
+            std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m;
+            addClassesToMap(
+                    getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL),
+                    m);
+            return m;
+        }();
+
+    auto it = m.find({className.begin(), className.end()});
+    if (it == m.end()) return {};
+    return it->second;
+}
+
+} // android::mediautils
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 2b765cc..75a1b22 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -17,23 +17,79 @@
 #define LOG_TAG "TimeCheck"
 
 #include <optional>
-#include <sstream>
 
+#include <android-base/logging.h>
+#include <audio_utils/clock.h>
 #include <mediautils/EventLog.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
 #include "debuggerd/handler.h"
 
-namespace android {
+namespace android::mediautils {
 
-namespace {
-
+/**
+ * Returns the std::string "HH:MM:SS.MSc" from a system_clock time_point.
+ */
 std::string formatTime(std::chrono::system_clock::time_point t) {
-    auto msSinceEpoch = std::chrono::round<std::chrono::milliseconds>(t.time_since_epoch());
-    return (std::ostringstream() << msSinceEpoch.count()).str();
+    auto time_string = audio_utils_time_string_from_ns(
+            std::chrono::nanoseconds(t.time_since_epoch()).count());
+
+    // The time string is 19 characters (including null termination).
+    // Example: "03-27 16:47:06.187"
+    //           MM DD HH MM SS MS
+    // We offset by 6 to get HH:MM:SS.MSc
+    //
+    return time_string.time + 6; // offset to remove month/day.
 }
 
-}  // namespace
+/**
+ * Finds the end of the common time prefix.
+ *
+ * This is as an option to remove the common time prefix to avoid
+ * unnecessary duplicated strings.
+ *
+ * \param time1 a time string
+ * \param time2 a time string
+ * \return      the position where the common time prefix ends. For abbreviated
+ *              printing of time2, offset the character pointer by this position.
+ */
+static size_t commonTimePrefixPosition(std::string_view time1, std::string_view time2) {
+    const size_t endPos = std::min(time1.size(), time2.size());
+    size_t i;
+
+    // Find location of the first mismatch between strings
+    for (i = 0; ; ++i) {
+        if (i == endPos) {
+            return i; // strings match completely to the length of one of the strings.
+        }
+        if (time1[i] != time2[i]) {
+            break;
+        }
+        if (time1[i] == '\0') {
+            return i; // "printed" strings match completely.  No need to check further.
+        }
+    }
+
+    // Go backwards until we find a delimeter or space.
+    for (; i > 0
+           && isdigit(time1[i]) // still a number
+           && time1[i - 1] != ' '
+         ; --i) {
+    }
+    return i;
+}
+
+/**
+ * Returns the unique suffix of time2 that isn't present in time1.
+ *
+ * If time2 is identical to time1, then an empty string_view is returned.
+ * This method is used to elide the common prefix when printing times.
+ */
+std::string_view timeSuffix(std::string_view time1, std::string_view time2) {
+    const size_t pos = commonTimePrefixPosition(time1, time2);
+    return time2.substr(pos);
+}
 
 // Audio HAL server pids vector used to generate audio HAL processes tombstone
 // when audioserver watchdog triggers.
@@ -48,7 +104,7 @@
 void TimeCheck::accessAudioHalPids(std::vector<pid_t>* pids, bool update) {
     static constexpr int kNumAudioHalPidsVectors = 3;
     static std::vector<pid_t> audioHalPids[kNumAudioHalPidsVectors];
-    static std::atomic<int> curAudioHalPids = 0;
+    static std::atomic<unsigned> curAudioHalPids = 0;
 
     if (update) {
         audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
@@ -70,27 +126,69 @@
 }
 
 /* static */
-TimerThread* TimeCheck::getTimeCheckThread() {
-    static TimerThread* sTimeCheckThread = new TimerThread();
+TimerThread& TimeCheck::getTimeCheckThread() {
+    static TimerThread sTimeCheckThread{};
     return sTimeCheckThread;
 }
 
-TimeCheck::TimeCheck(const char* tag, uint32_t timeoutMs)
-    : mTimerHandle(getTimeCheckThread()->scheduleTask(
-              [tag, startTime = std::chrono::system_clock::now()] { crash(tag, startTime); },
-              std::chrono::milliseconds(timeoutMs))) {}
-
-TimeCheck::~TimeCheck() {
-    getTimeCheckThread()->cancelTask(mTimerHandle);
+/* static */
+std::string TimeCheck::toString() {
+    // note pending and retired are individually locked for maximum concurrency,
+    // snapshot is not instantaneous at a single time.
+    return getTimeCheckThread().toString();
 }
 
-/* static */
-void TimeCheck::crash(const char* tag, std::chrono::system_clock::time_point startTime) {
-    std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+TimeCheck::TimeCheck(std::string tag, OnTimerFunc&& onTimer, uint32_t timeoutMs,
+        bool crashOnTimeout)
+    : mTimeCheckHandler(new TimeCheckHandler{
+            std::move(tag), std::move(onTimer), crashOnTimeout,
+            std::chrono::system_clock::now(), gettid()})
+    , mTimerHandle(timeoutMs == 0
+              ? getTimeCheckThread().trackTask(mTimeCheckHandler->tag)
+              : getTimeCheckThread().scheduleTask(
+                      mTimeCheckHandler->tag,
+                      // Pass in all the arguments by value to this task for safety.
+                      // The thread could call the callback before the constructor is finished.
+                      // The destructor is not blocked on callback.
+                      [ timeCheckHandler = mTimeCheckHandler ] {
+                          timeCheckHandler->onTimeout();
+                      },
+                      std::chrono::milliseconds(timeoutMs))) {}
+
+TimeCheck::~TimeCheck() {
+    if (mTimeCheckHandler) {
+        mTimeCheckHandler->onCancel(mTimerHandle);
+    }
+}
+
+void TimeCheck::TimeCheckHandler::onCancel(TimerThread::Handle timerHandle) const
+{
+    if (TimeCheck::getTimeCheckThread().cancelTask(timerHandle) && onTimer) {
+        const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+        onTimer(false /* timeout */,
+                std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                        endTime - startTime).count());
+    }
+}
+
+void TimeCheck::TimeCheckHandler::onTimeout() const
+{
+    const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+    if (onTimer) {
+        onTimer(true /* timeout */,
+                std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                        endTime - startTime).count());
+    }
+
+    if (!crashOnTimeout) return;
+
+    // Generate the TimerThread summary string early before sending signals to the
+    // HAL processes which can affect thread behavior.
+    const std::string summary = getTimeCheckThread().toString(4 /* retiredCount */);
 
     // Generate audio HAL processes tombstones and allow time to complete
     // before forcing restart
-    std::vector<pid_t> pids = getAudioHalPids();
+    std::vector<pid_t> pids = TimeCheck::getAudioHalPids();
     if (pids.size() != 0) {
         for (const auto& pid : pids) {
             ALOGI("requesting tombstone for pid: %d", pid);
@@ -100,9 +198,45 @@
     } else {
         ALOGI("No HAL process pid available, skipping tombstones");
     }
-    LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag);
-    LOG_ALWAYS_FATAL("TimeCheck timeout for %s (start=%s, end=%s)", tag,
-                     formatTime(startTime).c_str(), formatTime(endTime).c_str());
+
+    LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag.c_str());
+
+    // Create abort message string - caution: this can be very large.
+    const std::string abortMessage = std::string("TimeCheck timeout for ")
+            .append(tag)
+            .append(" scheduled ").append(formatTime(startTime))
+            .append(" on thread ").append(std::to_string(tid)).append("\n")
+            .append(summary);
+
+    // Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
+    // Log message text may be truncated to less than an
+    // implementation-specific limit (1023 bytes).
+    //
+    // Here, we send the string through android-base/logging.h LOG()
+    // to avoid the size limitation. LOG(FATAL) does an abort whereas
+    // LOG(FATAL_WITHOUT_ABORT) does not abort.
+
+    LOG(FATAL) << abortMessage;
 }
 
-};  // namespace android
+// Automatically create a TimeCheck class for a class and method.
+// This is used for Audio HIDL support.
+mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
+        std::string_view className, std::string_view methodName) {
+    std::shared_ptr<MethodStatistics<std::string>> statistics =
+            mediautils::getStatisticsForClass(className);
+    if (!statistics) return {}; // empty TimeCheck.
+    return mediautils::TimeCheck(
+            std::string(className).append("::").append(methodName),
+            [ clazz = std::string(className), method = std::string(methodName),
+              stats = std::move(statistics) ]
+            (bool timeout, float elapsedMs) {
+                    if (timeout) {
+                        ; // ignored, there is no timeout value.
+                    } else {
+                        stats->event(method, elapsedMs);
+                    }
+            }, 0 /* timeoutMs */);
+}
+
+}  // namespace android::mediautils
diff --git a/media/utils/TimerThread-test.cpp b/media/utils/TimerThread-test.cpp
index ee8a811..93cd64c 100644
--- a/media/utils/TimerThread-test.cpp
+++ b/media/utils/TimerThread-test.cpp
@@ -20,54 +20,71 @@
 #include <mediautils/TimerThread.h>
 
 using namespace std::chrono_literals;
+using namespace android::mediautils;
 
-namespace android {
 namespace {
 
 constexpr auto kJitter = 10ms;
 
+// Each task written by *ToString() will start with a left brace.
+constexpr char REQUEST_START = '{';
+
+inline size_t countChars(std::string_view s, char c) {
+    return std::count(s.begin(), s.end(), c);
+}
+
 TEST(TimerThread, Basic) {
     std::atomic<bool> taskRan = false;
     TimerThread thread;
-    thread.scheduleTask([&taskRan] { taskRan = true; }, 100ms);
+    thread.scheduleTask("Basic", [&taskRan] { taskRan = true; }, 100ms);
     std::this_thread::sleep_for(100ms - kJitter);
     ASSERT_FALSE(taskRan);
     std::this_thread::sleep_for(2 * kJitter);
     ASSERT_TRUE(taskRan);
+    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, Cancel) {
     std::atomic<bool> taskRan = false;
     TimerThread thread;
-    TimerThread::Handle handle = thread.scheduleTask([&taskRan] { taskRan = true; }, 100ms);
+    TimerThread::Handle handle =
+            thread.scheduleTask("Cancel", [&taskRan] { taskRan = true; }, 100ms);
     std::this_thread::sleep_for(100ms - kJitter);
     ASSERT_FALSE(taskRan);
-    thread.cancelTask(handle);
+    ASSERT_TRUE(thread.cancelTask(handle));
     std::this_thread::sleep_for(2 * kJitter);
     ASSERT_FALSE(taskRan);
+    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, CancelAfterRun) {
     std::atomic<bool> taskRan = false;
     TimerThread thread;
-    TimerThread::Handle handle = thread.scheduleTask([&taskRan] { taskRan = true; }, 100ms);
+    TimerThread::Handle handle =
+            thread.scheduleTask("CancelAfterRun", [&taskRan] { taskRan = true; }, 100ms);
     std::this_thread::sleep_for(100ms + kJitter);
     ASSERT_TRUE(taskRan);
-    thread.cancelTask(handle);
+    ASSERT_FALSE(thread.cancelTask(handle));
+    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
 }
 
 TEST(TimerThread, MultipleTasks) {
-    std::array<std::atomic<bool>, 6> taskRan;
+    std::array<std::atomic<bool>, 6> taskRan{};
     TimerThread thread;
 
     auto startTime = std::chrono::steady_clock::now();
 
-    thread.scheduleTask([&taskRan] { taskRan[0] = true; }, 300ms);
-    thread.scheduleTask([&taskRan] { taskRan[1] = true; }, 100ms);
-    thread.scheduleTask([&taskRan] { taskRan[2] = true; }, 200ms);
-    thread.scheduleTask([&taskRan] { taskRan[3] = true; }, 400ms);
-    auto handle4 = thread.scheduleTask([&taskRan] { taskRan[4] = true; }, 200ms);
-    thread.scheduleTask([&taskRan] { taskRan[5] = true; }, 200ms);
+    thread.scheduleTask("0", [&taskRan] { taskRan[0] = true; }, 300ms);
+    thread.scheduleTask("1", [&taskRan] { taskRan[1] = true; }, 100ms);
+    thread.scheduleTask("2", [&taskRan] { taskRan[2] = true; }, 200ms);
+    thread.scheduleTask("3", [&taskRan] { taskRan[3] = true; }, 400ms);
+    auto handle4 = thread.scheduleTask("4", [&taskRan] { taskRan[4] = true; }, 200ms);
+    thread.scheduleTask("5", [&taskRan] { taskRan[5] = true; }, 200ms);
+
+    // 6 tasks pending
+    ASSERT_EQ(6, countChars(thread.pendingToString(), REQUEST_START));
+    // 0 tasks completed
+    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
 
     // Task 1 should trigger around 100ms.
     std::this_thread::sleep_until(startTime + 100ms - kJitter);
@@ -123,6 +140,11 @@
     ASSERT_FALSE(taskRan[4]);
     ASSERT_TRUE(taskRan[5]);
 
+    // 1 task pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks ran and 1 cancelled
+    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+
     // Task 3 should trigger around 400ms.
     std::this_thread::sleep_until(startTime + 400ms - kJitter);
     ASSERT_TRUE(taskRan[0]);
@@ -132,6 +154,9 @@
     ASSERT_FALSE(taskRan[4]);
     ASSERT_TRUE(taskRan[5]);
 
+    // 4 tasks ran and 1 cancelled
+    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+
     std::this_thread::sleep_until(startTime + 400ms + kJitter);
     ASSERT_TRUE(taskRan[0]);
     ASSERT_TRUE(taskRan[1]);
@@ -139,8 +164,62 @@
     ASSERT_TRUE(taskRan[3]);
     ASSERT_FALSE(taskRan[4]);
     ASSERT_TRUE(taskRan[5]);
+
+    // 0 tasks pending
+    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+    // 5 tasks ran and 1 cancelled
+    ASSERT_EQ(5 + 1, countChars(thread.retiredToString(), REQUEST_START));
 }
 
+TEST(TimerThread, TrackedTasks) {
+    TimerThread thread;
+
+    auto handle0 = thread.trackTask("0");
+    auto handle1 = thread.trackTask("1");
+    auto handle2 = thread.trackTask("2");
+
+    // 3 tasks pending
+    ASSERT_EQ(3, countChars(thread.pendingToString(), REQUEST_START));
+    // 0 tasks retired
+    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle0));
+    ASSERT_TRUE(thread.cancelTask(handle1));
+
+    // 1 task pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+    // handle1 is stale, cancel returns false.
+    ASSERT_FALSE(thread.cancelTask(handle1));
+
+    // 1 task pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+    // Add another tracked task.
+    auto handle3 = thread.trackTask("3");
+
+    // 2 tasks pending
+    ASSERT_EQ(2, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle2));
+
+    // 1 tasks pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 3 tasks retired
+    ASSERT_EQ(3, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle3));
+
+    // 0 tasks pending
+    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks retired
+    ASSERT_EQ(4, countChars(thread.retiredToString(), REQUEST_START));
+}
 
 }  // namespace
-}  // namespace android
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index 3c95798..3556d7d 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -17,18 +17,285 @@
 #define LOG_TAG "TimerThread"
 
 #include <optional>
+#include <sstream>
+#include <unistd.h>
+#include <vector>
 
 #include <mediautils/TimerThread.h>
+#include <utils/CallStack.h>
 #include <utils/ThreadDefs.h>
 
-namespace android {
+namespace android::mediautils {
 
-TimerThread::TimerThread() : mThread([this] { threadFunc(); }) {
-    pthread_setname_np(mThread.native_handle(), "TimeCheckThread");
-    pthread_setschedprio(mThread.native_handle(), PRIORITY_URGENT_AUDIO);
+extern std::string formatTime(std::chrono::system_clock::time_point t);
+extern std::string_view timeSuffix(std::string_view time1, std::string_view time2);
+
+TimerThread::Handle TimerThread::scheduleTask(
+        std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout) {
+    const auto now = std::chrono::system_clock::now();
+    std::shared_ptr<const Request> request{
+            new Request{ now, now + timeout, gettid(), std::move(tag) }};
+    return mMonitorThread.add(std::move(request), std::move(func), timeout);
 }
 
-TimerThread::~TimerThread() {
+TimerThread::Handle TimerThread::trackTask(std::string tag) {
+    const auto now = std::chrono::system_clock::now();
+    std::shared_ptr<const Request> request{
+            new Request{ now, now, gettid(), std::move(tag) }};
+    return mNoTimeoutMap.add(std::move(request));
+}
+
+bool TimerThread::cancelTask(Handle handle) {
+    std::shared_ptr<const Request> request = mNoTimeoutMap.isValidHandle(handle) ?
+             mNoTimeoutMap.remove(handle) : mMonitorThread.remove(handle);
+    if (!request) return false;
+    mRetiredQueue.add(std::move(request));
+    return true;
+}
+
+std::string TimerThread::toString(size_t retiredCount) const {
+    // Note: These request queues are snapshot very close together but
+    // not at "identical" times as we don't use a class-wide lock.
+
+    std::vector<std::shared_ptr<const Request>> timeoutRequests;
+    std::vector<std::shared_ptr<const Request>> retiredRequests;
+    mTimeoutQueue.copyRequests(timeoutRequests);
+    mRetiredQueue.copyRequests(retiredRequests, retiredCount);
+    std::vector<std::shared_ptr<const Request>> pendingRequests =
+        getPendingRequests();
+
+    struct Analysis analysis = analyzeTimeout(timeoutRequests, pendingRequests);
+    std::string analysisSummary;
+    if (!analysis.summary.empty()) {
+        analysisSummary = std::string("\nanalysis [ ").append(analysis.summary).append(" ]");
+    }
+    std::string timeoutStack;
+    if (analysis.timeoutTid != -1) {
+        timeoutStack = std::string("\ntimeout(")
+                .append(std::to_string(analysis.timeoutTid)).append(") callstack [\n")
+                .append(tidCallStackString(analysis.timeoutTid)).append("]");
+    }
+    std::string blockedStack;
+    if (analysis.HALBlockedTid != -1) {
+        blockedStack = std::string("\nblocked(")
+                .append(std::to_string(analysis.HALBlockedTid)).append(")  callstack [\n")
+                .append(tidCallStackString(analysis.HALBlockedTid)).append("]");
+    }
+
+    return std::string("now ")
+            .append(formatTime(std::chrono::system_clock::now()))
+            .append(analysisSummary)
+            .append("\ntimeout [ ")
+            .append(requestsToString(timeoutRequests))
+            .append(" ]\npending [ ")
+            .append(requestsToString(pendingRequests))
+            .append(" ]\nretired [ ")
+            .append(requestsToString(retiredRequests))
+            .append(" ]")
+            .append(timeoutStack)
+            .append(blockedStack);
+}
+
+// A HAL method is where the substring "Hidl" is in the class name.
+// The tag should look like: ... Hidl ... :: ...
+// When the audio HAL is updated to AIDL perhaps we will use instead
+// a global directory of HAL classes.
+//
+// See MethodStatistics.cpp:
+// mediautils::getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL)
+//
+/* static */
+bool TimerThread::isRequestFromHal(const std::shared_ptr<const Request>& request) {
+    const size_t hidlPos = request->tag.find("Hidl");
+    if (hidlPos == std::string::npos) return false;
+    // should be a separator afterwards Hidl which indicates the string was in the class.
+    const size_t separatorPos = request->tag.find("::", hidlPos);
+    return separatorPos != std::string::npos;
+}
+
+/* static */
+struct TimerThread::Analysis TimerThread::analyzeTimeout(
+    const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
+    const std::vector<std::shared_ptr<const Request>>& pendingRequests) {
+
+    if (timeoutRequests.empty() || pendingRequests.empty()) return {}; // nothing to say.
+
+    // for now look at last timeout (in our case, the only timeout)
+    const std::shared_ptr<const Request> timeout = timeoutRequests.back();
+
+    // pending Requests that are problematic.
+    std::vector<std::shared_ptr<const Request>> pendingExact;
+    std::vector<std::shared_ptr<const Request>> pendingPossible;
+
+    // We look at pending requests that were scheduled no later than kDuration
+    // after the timeout request. This prevents false matches with calls
+    // that naturally block for a short period of time
+    // such as HAL write() and read().
+    //
+    auto constexpr kDuration = std::chrono::milliseconds(1000);
+    for (const auto& pending : pendingRequests) {
+        // If the pending tid is the same as timeout tid, problem identified.
+        if (pending->tid == timeout->tid) {
+            pendingExact.emplace_back(pending);
+            continue;
+        }
+
+        // if the pending tid is scheduled within time limit
+        if (pending->scheduled - timeout->scheduled < kDuration) {
+            pendingPossible.emplace_back(pending);
+        }
+    }
+
+    struct Analysis analysis{};
+
+    analysis.timeoutTid = timeout->tid;
+    std::string& summary = analysis.summary;
+    if (!pendingExact.empty()) {
+        const auto& request = pendingExact.front();
+        const bool hal = isRequestFromHal(request);
+
+        if (hal) {
+            summary = std::string("Blocked directly due to HAL call: ")
+                .append(request->toString());
+        }
+    }
+    if (summary.empty() && !pendingPossible.empty()) {
+        for (const auto& request : pendingPossible) {
+            const bool hal = isRequestFromHal(request);
+            if (hal) {
+                // The first blocked call is the most likely one.
+                // Recent calls might be temporarily blocked
+                // calls such as write() or read() depending on kDuration.
+                summary = std::string("Blocked possibly due to HAL call: ")
+                    .append(request->toString());
+                analysis.HALBlockedTid = request->tid;
+            }
+       }
+    }
+    return analysis;
+}
+
+std::vector<std::shared_ptr<const TimerThread::Request>> TimerThread::getPendingRequests() const {
+    constexpr size_t kEstimatedPendingRequests = 8;  // approx 128 byte alloc.
+    std::vector<std::shared_ptr<const Request>> pendingRequests;
+    pendingRequests.reserve(kEstimatedPendingRequests); // preallocate vector out of lock.
+
+    // following are internally locked calls, which add to our local pendingRequests.
+    mMonitorThread.copyRequests(pendingRequests);
+    mNoTimeoutMap.copyRequests(pendingRequests);
+
+    // Sort in order of scheduled time.
+    std::sort(pendingRequests.begin(), pendingRequests.end(),
+        [](const std::shared_ptr<const Request>& r1,
+           const std::shared_ptr<const Request>& r2) {
+               return r1->scheduled < r2->scheduled;
+           });
+    return pendingRequests;
+}
+
+std::string TimerThread::pendingToString() const {
+    return requestsToString(getPendingRequests());
+}
+
+std::string TimerThread::retiredToString(size_t n) const {
+    std::vector<std::shared_ptr<const Request>> retiredRequests;
+    mRetiredQueue.copyRequests(retiredRequests, n);
+
+    // Dump to string
+    return requestsToString(retiredRequests);
+}
+
+std::string TimerThread::timeoutToString(size_t n) const {
+    std::vector<std::shared_ptr<const Request>> timeoutRequests;
+    mTimeoutQueue.copyRequests(timeoutRequests, n);
+
+    // Dump to string
+    return requestsToString(timeoutRequests);
+}
+
+/* static */
+std::string TimerThread::tidCallStackString(pid_t tid) {
+    CallStack cs{};
+    cs.update(0 /* ignoreDepth */, tid);
+    return cs.toString().c_str();
+}
+
+std::string TimerThread::Request::toString() const {
+    const auto scheduledString = formatTime(scheduled);
+    const auto deadlineString = formatTime(deadline);
+    return std::string(tag)
+        .append(" scheduled ").append(scheduledString)
+        .append(" deadline ").append(timeSuffix(scheduledString, deadlineString))
+        .append(" tid ").append(std::to_string(tid));
+}
+
+void TimerThread::RequestQueue::add(std::shared_ptr<const Request> request) {
+    std::lock_guard lg(mRQMutex);
+    mRequestQueue.emplace_back(std::chrono::system_clock::now(), std::move(request));
+    if (mRequestQueue.size() > mRequestQueueMax) {
+        mRequestQueue.pop_front();
+    }
+}
+
+void TimerThread::RequestQueue::copyRequests(
+        std::vector<std::shared_ptr<const Request>>& requests, size_t n) const {
+    std::lock_guard lg(mRQMutex);
+    const size_t size = mRequestQueue.size();
+    size_t i = n >=  size ? 0 : size - n;
+    for (; i < size; ++i) {
+        const auto &[time, request] = mRequestQueue[i];
+        requests.emplace_back(request);
+    }
+}
+
+bool TimerThread::NoTimeoutMap::isValidHandle(Handle handle) const {
+    return handle > getIndexedHandle(mNoTimeoutRequests);
+}
+
+TimerThread::Handle TimerThread::NoTimeoutMap::add(std::shared_ptr<const Request> request) {
+    std::lock_guard lg(mNTMutex);
+    // A unique handle is obtained by mNoTimeoutRequests.fetch_add(1),
+    // This need not be under a lock, but we do so anyhow.
+    const Handle handle = getIndexedHandle(mNoTimeoutRequests++);
+    mMap[handle] = request;
+    return handle;
+}
+
+std::shared_ptr<const TimerThread::Request> TimerThread::NoTimeoutMap::remove(Handle handle) {
+    std::lock_guard lg(mNTMutex);
+    auto it = mMap.find(handle);
+    if (it == mMap.end()) return {};
+    auto request = it->second;
+    mMap.erase(it);
+    return request;
+}
+
+void TimerThread::NoTimeoutMap::copyRequests(
+        std::vector<std::shared_ptr<const Request>>& requests) const {
+    std::lock_guard lg(mNTMutex);
+    for (const auto &[handle, request] : mMap) {
+        requests.emplace_back(request);
+    }
+}
+
+TimerThread::Handle TimerThread::MonitorThread::getUniqueHandle_l(
+        std::chrono::milliseconds timeout) {
+    // To avoid key collisions, advance by 1 tick until the key is unique.
+    auto deadline = std::chrono::steady_clock::now() + timeout;
+    for (; mMonitorRequests.find(deadline) != mMonitorRequests.end();
+         deadline += std::chrono::steady_clock::duration(1))
+        ;
+    return deadline;
+}
+
+TimerThread::MonitorThread::MonitorThread(RequestQueue& timeoutQueue)
+        : mTimeoutQueue(timeoutQueue)
+        , mThread([this] { threadFunc(); }) {
+     pthread_setname_np(mThread.native_handle(), "TimerThread");
+     pthread_setschedprio(mThread.native_handle(), PRIORITY_URGENT_AUDIO);
+}
+
+TimerThread::MonitorThread::~MonitorThread() {
     {
         std::lock_guard _l(mMutex);
         mShouldExit = true;
@@ -37,34 +304,26 @@
     mThread.join();
 }
 
-TimerThread::Handle TimerThread::scheduleTaskAtDeadline(std::function<void()>&& func,
-                                                        TimePoint deadline) {
-    std::lock_guard _l(mMutex);
-
-    // To avoid key collisions, advance by 1 tick until the key is unique.
-    for (; mMonitorRequests.find(deadline) != mMonitorRequests.end();
-         deadline += TimePoint::duration(1))
-        ;
-    mMonitorRequests.emplace(deadline, std::move(func));
-    mCond.notify_all();
-    return deadline;
-}
-
-void TimerThread::cancelTask(Handle handle) {
-    std::lock_guard _l(mMutex);
-    mMonitorRequests.erase(handle);
-}
-
-void TimerThread::threadFunc() {
+void TimerThread::MonitorThread::threadFunc() {
     std::unique_lock _l(mMutex);
-
     while (!mShouldExit) {
         if (!mMonitorRequests.empty()) {
-            TimePoint nextDeadline = mMonitorRequests.begin()->first;
+            Handle nextDeadline = mMonitorRequests.begin()->first;
             if (nextDeadline < std::chrono::steady_clock::now()) {
-                // Deadline expired.
-                mMonitorRequests.begin()->second();
-                mMonitorRequests.erase(mMonitorRequests.begin());
+                // Deadline has expired, handle the request.
+                {
+                    auto node = mMonitorRequests.extract(mMonitorRequests.begin());
+                    _l.unlock();
+                    // We add Request to retired queue early so that it can be dumped out.
+                    mTimeoutQueue.add(std::move(node.mapped().first));
+                    node.mapped().second(); // Caution: we don't hold lock here - but do we care?
+                                            // this is the timeout case!  We will crash soon,
+                                            // maybe before returning.
+                    // anything left over is released here outside lock.
+                }
+                // reacquire the lock - if something was added, we loop immediately to check.
+                _l.lock();
+                continue;
             }
             mCond.wait_until(_l, nextDeadline);
         } else {
@@ -73,4 +332,35 @@
     }
 }
 
-}  // namespace android
+TimerThread::Handle TimerThread::MonitorThread::add(
+        std::shared_ptr<const Request> request, std::function<void()>&& func,
+        std::chrono::milliseconds timeout) {
+    std::lock_guard _l(mMutex);
+    const Handle handle = getUniqueHandle_l(timeout);
+    mMonitorRequests.emplace(handle, std::make_pair(std::move(request), std::move(func)));
+    mCond.notify_all();
+    return handle;
+}
+
+std::shared_ptr<const TimerThread::Request> TimerThread::MonitorThread::remove(Handle handle) {
+    std::unique_lock ul(mMutex);
+    const auto it = mMonitorRequests.find(handle);
+    if (it == mMonitorRequests.end()) {
+        return {};
+    }
+    std::shared_ptr<const TimerThread::Request> request = std::move(it->second.first);
+    std::function<void()> func = std::move(it->second.second);
+    mMonitorRequests.erase(it);
+    ul.unlock();  // manually release lock here so func is released outside of lock.
+    return request;
+}
+
+void TimerThread::MonitorThread::copyRequests(
+        std::vector<std::shared_ptr<const Request>>& requests) const {
+    std::lock_guard lg(mMutex);
+    for (const auto &[deadline, monitorpair] : mMonitorRequests) {
+        requests.emplace_back(monitorpair.first);
+    }
+}
+
+}  // namespace android::mediautils
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
index eeb6ba6..7966469 100644
--- a/media/utils/fuzzers/TimeCheckFuzz.cpp
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -44,11 +44,11 @@
 
     // 2. We also have setAudioHalPids, which is populated with the pids set
     // above.
-    android::TimeCheck::setAudioHalPids(pids);
+    android::mediautils::TimeCheck::setAudioHalPids(pids);
     std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
 
     // 3. The constructor, which is fuzzed here:
-    android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+    android::mediautils::TimeCheck timeCheck(name.c_str(), {} /* onTimer */, timeoutMs);
     // We will leave some buffer to avoid sleeping too long
     uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
 
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
new file mode 100644
index 0000000..700fbaa
--- /dev/null
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <mutex>
+#include <string>
+#include <vector>
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/Statistics.h>
+
+namespace android::mediautils {
+
+/**
+ * MethodStatistics is used to associate Binder codes
+ * with a method name and execution time statistics.
+ *
+ * This is used to track binder transaction times for
+ * AudioFlinger and AudioPolicy services.
+ *
+ * Here, Code is the enumeration type for the method
+ * lookup.
+ */
+template <typename Code>
+class MethodStatistics {
+public:
+    using FloatType = float;
+    using StatsType = audio_utils::Statistics<FloatType>;
+
+    /**
+     * Method statistics.
+     *
+     * Initialized with the Binder transaction list for tracking AudioFlinger
+     * and AudioPolicyManager execution statistics.
+     */
+    explicit MethodStatistics(
+            const std::initializer_list<std::pair<const Code, std::string>>& methodMap = {})
+        : mMethodMap{methodMap} {}
+
+    /**
+     * Adds a method event, typically execution time in ms.
+     */
+    void event(Code code, FloatType executeMs) {
+        std::lock_guard lg(mLock);
+        mStatisticsMap[code].add(executeMs);
+    }
+
+    /**
+     * Returns the name for the method code.
+     */
+    std::string getMethodForCode(Code code) const {
+        auto it = mMethodMap.find(code);
+        return it == mMethodMap.end() ? std::to_string((int)code) : it->second;
+    }
+
+    /**
+     * Returns the number of times the method was invoked by event().
+     */
+    size_t getMethodCount(Code code) const {
+        std::lock_guard lg(mLock);
+        auto it = mStatisticsMap.find(code);
+        return it == mStatisticsMap.end() ? 0 : it->second.getN();
+    }
+
+    /**
+     * Returns the statistics object for the method.
+     */
+    StatsType getStatistics(Code code) const {
+        std::lock_guard lg(mLock);
+        auto it = mStatisticsMap.find(code);
+        return it == mStatisticsMap.end() ? StatsType{} : it->second;
+    }
+
+    /**
+     * Dumps the current method statistics.
+     */
+    std::string dump() const {
+        std::stringstream ss;
+        std::lock_guard lg(mLock);
+        if constexpr (std::is_same_v<Code, std::string>) {
+            for (const auto &[code, stats] : mStatisticsMap) {
+                ss << code <<
+                        " n=" << stats.getN() << " " << stats.toString() << "\n";
+            }
+        } else /* constexpr */ {
+            for (const auto &[code, stats] : mStatisticsMap) {
+                ss << int(code) << " " << getMethodForCode(code) <<
+                        " n=" << stats.getN() << " " << stats.toString() << "\n";
+            }
+        }
+        return ss.str();
+    }
+
+private:
+    const std::map<Code, std::string> mMethodMap;
+    mutable std::mutex mLock;
+    std::map<Code, StatsType> mStatisticsMap GUARDED_BY(mLock);
+};
+
+// Managed Statistics support.
+// Supported Modules
+#define METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL "AudioHidl"
+
+// Returns a vector of class names for the module, or a nullptr if module not found.
+std::shared_ptr<std::vector<std::string>>
+getStatisticsClassesForModule(std::string_view moduleName);
+
+// Returns a statistics object for that class, or a nullptr if class not found.
+std::shared_ptr<MethodStatistics<std::string>>
+getStatisticsForClass(std::string_view className);
+
+// Only if used, requires IBinder.h to be included at the location of invocation.
+#define METHOD_STATISTICS_BINDER_CODE_NAMES(CODE_TYPE) \
+    {(CODE_TYPE)IBinder::PING_TRANSACTION , "ping"}, \
+    {(CODE_TYPE)IBinder::DUMP_TRANSACTION , "dump"}, \
+    {(CODE_TYPE)IBinder::SHELL_COMMAND_TRANSACTION , "shellCommand"}, \
+    {(CODE_TYPE)IBinder::INTERFACE_TRANSACTION , "getInterfaceDescriptor"}, \
+    {(CODE_TYPE)IBinder::SYSPROPS_TRANSACTION , "SYSPROPS_TRANSACTION"}, \
+    {(CODE_TYPE)IBinder::EXTENSION_TRANSACTION , "EXTENSION_TRANSACTION"}, \
+    {(CODE_TYPE)IBinder::DEBUG_PID_TRANSACTION , "DEBUG_PID_TRANSACTION"}, \
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index 0d6e80d..ef03aef 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -20,27 +20,88 @@
 
 #include <mediautils/TimerThread.h>
 
-namespace android {
+namespace android::mediautils {
 
 // A class monitoring execution time for a code block (scoped variable) and causing an assert
 // if it exceeds a certain time
 
 class TimeCheck {
   public:
+    using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
+
     // The default timeout is chosen to be less than system server watchdog timeout
     static constexpr uint32_t kDefaultTimeOutMs = 5000;
 
-    TimeCheck(const char* tag, uint32_t timeoutMs = kDefaultTimeOutMs);
+    /**
+     * TimeCheck is a RAII object which will notify a callback
+     * on timer expiration or when the object is deallocated.
+     *
+     * TimeCheck is used as a watchdog and aborts by default on timer expiration.
+     * When it aborts, it will also send a debugger signal to pids passed in through
+     * setAudioHalPids().
+     *
+     * If the callback function returns for timeout it will not be called again for
+     * the deallocation.
+     *
+     * \param tag       string associated with the TimeCheck object.
+     * \param onTimer   callback function with 2 parameters
+     *                      bool timeout  (which is true when the TimeCheck object
+     *                                    times out, false when the TimeCheck object is
+     *                                    destroyed or leaves scope before the timer expires.)
+     *                      float elapsedMs (the elapsed time to this event).
+     *                  The callback when timeout is true will be called on a different thread.
+     *                  This will cancel the callback on the destructor but is not guaranteed
+     *                  to block for callback completion if it is already in progress
+     *                  (for maximum concurrency and reduced deadlock potential), so use proper
+     *                  lifetime analysis (e.g. shared or weak pointers).
+     * \param timeoutMs timeout in milliseconds.
+     *                  A zero timeout means no timeout is set -
+     *                  the callback is called only when
+     *                  the TimeCheck object is destroyed or leaves scope.
+     * \param crashOnTimeout true if the object issues an abort on timeout.
+     */
+    explicit TimeCheck(std::string tag, OnTimerFunc&& onTimer = {},
+            uint32_t timeoutMs = kDefaultTimeOutMs, bool crashOnTimeout = true);
+
+    TimeCheck() = default;
+    // Remove copy constructors as there should only be one call to the destructor.
+    // Move is kept implicitly disabled, but would be logically consistent if enabled.
+    TimeCheck(const TimeCheck& other) = delete;
+    TimeCheck& operator=(const TimeCheck&) = delete;
+
     ~TimeCheck();
+    static std::string toString();
     static void setAudioHalPids(const std::vector<pid_t>& pids);
     static std::vector<pid_t> getAudioHalPids();
 
   private:
-    static TimerThread* getTimeCheckThread();
-    static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
-    static void crash(const char* tag, std::chrono::system_clock::time_point startTime);
+    // Helper class for handling events.
+    // The usage here is const safe.
+    class TimeCheckHandler {
+    public:
+        const std::string tag;
+        const OnTimerFunc onTimer;
+        const bool crashOnTimeout;
+        const std::chrono::system_clock::time_point startTime;
+        const pid_t tid;
 
-    const TimerThread::Handle mTimerHandle;
+        void onCancel(TimerThread::Handle handle) const;
+        void onTimeout() const;
+    };
+
+    static TimerThread& getTimeCheckThread();
+    static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
+
+    // mTimeCheckHandler is immutable, prefer to be first initialized, last destroyed.
+    // Technically speaking, we do not need a shared_ptr here because TimerThread::cancelTask()
+    // is mutually exclusive of the callback, but the price paid for lifetime safety is minimal.
+    const std::shared_ptr<const TimeCheckHandler> mTimeCheckHandler;
+    const TimerThread::Handle mTimerHandle = TimerThread::INVALID_HANDLE;
 };
 
-};  // namespace android
+// Returns a TimeCheck object that sends info to MethodStatistics
+// obtained from getStatisticsForClass(className).
+TimeCheck makeTimeCheckStatsForClassMethod(
+        std::string_view className, std::string_view methodName);
+
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/TimerThread.h b/media/utils/include/mediautils/TimerThread.h
index cf457b8..b69e02c 100644
--- a/media/utils/include/mediautils/TimerThread.h
+++ b/media/utils/include/mediautils/TimerThread.h
@@ -16,53 +16,260 @@
 
 #pragma once
 
+#include <atomic>
 #include <condition_variable>
+#include <deque>
 #include <functional>
 #include <map>
 #include <mutex>
+#include <string>
 #include <thread>
 
 #include <android-base/thread_annotations.h>
 
-namespace android {
+namespace android::mediautils {
 
 /**
  * A thread for deferred execution of tasks, with cancellation.
  */
 class TimerThread {
   public:
+    // A Handle is a time_point that serves as a unique key.  It is ordered.
     using Handle = std::chrono::steady_clock::time_point;
 
-    TimerThread();
-    ~TimerThread();
+    static inline constexpr Handle INVALID_HANDLE =
+            std::chrono::steady_clock::time_point::min();
 
     /**
-     * Schedule a task to be executed in the future (`timeout` duration from now).
-     * Returns a handle that can be used for cancellation.
+     * Schedules a task to be executed in the future (`timeout` duration from now).
+     *
+     * \param tag     string associated with the task.  This need not be unique,
+     *                as the Handle returned is used for cancelling.
+     * \param func    callback function that is invoked at the timeout.
+     * \param timeout timeout duration which is converted to milliseconds with at
+     *                least 45 integer bits.
+     *                A timeout of 0 (or negative) means the timer never expires
+     *                so func() is never called. These tasks are stored internally
+     *                and reported in the toString() until manually cancelled.
+     * \returns       a handle that can be used for cancellation.
      */
-    template <typename R, typename P>
-    Handle scheduleTask(std::function<void()>&& func, std::chrono::duration<R, P> timeout) {
-        auto deadline = std::chrono::steady_clock::now() + std::chrono::milliseconds(timeout);
-        return scheduleTaskAtDeadline(std::move(func), deadline);
+    Handle scheduleTask(
+            std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout);
+
+    /**
+     * Tracks a task that shows up on toString() until cancelled.
+     *
+     * \param tag     string associated with the task.
+     * \returns       a handle that can be used for cancellation.
+     */
+    Handle trackTask(std::string tag);
+
+    /**
+     * Cancels a task previously scheduled with scheduleTask()
+     * or trackTask().
+     *
+     * \returns true if cancelled. If the task has already executed
+     *          or if the handle doesn't exist, this is a no-op
+     *          and returns false.
+     */
+    bool cancelTask(Handle handle);
+
+    std::string toString(size_t retiredCount = SIZE_MAX) const;
+
+    /**
+     * Returns a string representation of the TimerThread queue.
+     *
+     * The queue is dumped in order of scheduling (not deadline).
+     */
+    std::string pendingToString() const;
+
+    /**
+     * Returns a string representation of the last retired tasks.
+     *
+     * These tasks from trackTask() or scheduleTask() are
+     * cancelled.
+     *
+     * These are ordered when the task was retired.
+     *
+     * \param n is maximum number of tasks to dump.
+     */
+    std::string retiredToString(size_t n = SIZE_MAX) const;
+
+
+    /**
+     * Returns a string representation of the last timeout tasks.
+     *
+     * These tasks from scheduleTask() which have  timed-out.
+     *
+     * These are ordered when the task had timed-out.
+     *
+     * \param n is maximum number of tasks to dump.
+     */
+    std::string timeoutToString(size_t n = SIZE_MAX) const;
+
+    /**
+     * Dumps a container with SmartPointer<Request> to a string.
+     *
+     * "{ Request1 } { Request2} ...{ RequestN }"
+     */
+    template <typename T>
+    static std::string requestsToString(const T& containerRequests) {
+        std::string s;
+        // append seems to be faster than stringstream.
+        // https://stackoverflow.com/questions/18892281/most-optimized-way-of-concatenation-in-strings
+        for (const auto& request : containerRequests) {
+            s.append("{ ").append(request->toString()).append(" } ");
+        }
+        // If not empty, there's an extra space at the end, so we trim it off.
+        if (!s.empty()) s.pop_back();
+        return s;
     }
 
     /**
-     * Cancel a task, previously scheduled with scheduleTask().
-     * If the task has already executed, this is a no-op.
+     * Returns callstack of tid as a string.
      */
-    void cancelTask(Handle handle);
+    static std::string tidCallStackString(pid_t tid);
 
   private:
-    using TimePoint = std::chrono::steady_clock::time_point;
+    // To minimize movement of data, we pass around shared_ptrs to Requests.
+    // These are allocated and deallocated outside of the lock.
+    struct Request {
+        const std::chrono::system_clock::time_point scheduled;
+        const std::chrono::system_clock::time_point deadline; // deadline := scheduled + timeout
+                                                              // if deadline == scheduled, no
+                                                              // timeout, task not executed.
+        const pid_t tid;
+        const std::string tag;
 
-    std::condition_variable mCond;
-    std::mutex mMutex;
-    std::thread mThread;
-    std::map<TimePoint, std::function<void()>> mMonitorRequests GUARDED_BY(mMutex);
-    bool mShouldExit GUARDED_BY(mMutex) = false;
+        std::string toString() const;
+    };
 
-    void threadFunc();
-    Handle scheduleTaskAtDeadline(std::function<void()>&& func, TimePoint deadline);
+    // Deque of requests, in order of add().
+    // This class is thread-safe.
+    class RequestQueue {
+      public:
+        explicit RequestQueue(size_t maxSize)
+            : mRequestQueueMax(maxSize) {}
+
+        void add(std::shared_ptr<const Request>);
+
+        // return up to the last "n" requests retired.
+        void copyRequests(std::vector<std::shared_ptr<const Request>>& requests,
+            size_t n = SIZE_MAX) const;
+
+      private:
+        const size_t mRequestQueueMax;
+        mutable std::mutex mRQMutex;
+        std::deque<std::pair<std::chrono::system_clock::time_point,
+                             std::shared_ptr<const Request>>>
+                mRequestQueue GUARDED_BY(mRQMutex);
+    };
+
+    // A storage map of tasks without timeouts.  There is no std::function<void()>
+    // required, it just tracks the tasks with the tag, scheduled time and the tid.
+    // These tasks show up on a pendingToString() until manually cancelled.
+    class NoTimeoutMap {
+        // This a counter of the requests that have no timeout (timeout == 0).
+        std::atomic<size_t> mNoTimeoutRequests{};
+
+        mutable std::mutex mNTMutex;
+        std::map<Handle, std::shared_ptr<const Request>> mMap GUARDED_BY(mNTMutex);
+
+      public:
+        bool isValidHandle(Handle handle) const; // lock free
+        Handle add(std::shared_ptr<const Request> request);
+        std::shared_ptr<const Request> remove(Handle handle);
+        void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+    };
+
+    // Monitor thread.
+    // This thread manages shared pointers to Requests and a function to
+    // call on timeout.
+    // This class is thread-safe.
+    class MonitorThread {
+        mutable std::mutex mMutex;
+        mutable std::condition_variable mCond;
+
+        // Ordered map of requests based on time of deadline.
+        //
+        std::map<Handle, std::pair<std::shared_ptr<const Request>, std::function<void()>>>
+                mMonitorRequests GUARDED_BY(mMutex);
+
+        RequestQueue& mTimeoutQueue; // locked internally, added to when request times out.
+
+        // Worker thread variables
+        bool mShouldExit GUARDED_BY(mMutex) = false;
+
+        // To avoid race with initialization,
+        // mThread should be initialized last as the thread is launched immediately.
+        std::thread mThread;
+
+        void threadFunc();
+        Handle getUniqueHandle_l(std::chrono::milliseconds timeout) REQUIRES(mMutex);
+
+      public:
+        MonitorThread(RequestQueue &timeoutQueue);
+        ~MonitorThread();
+
+        Handle add(std::shared_ptr<const Request> request, std::function<void()>&& func,
+                std::chrono::milliseconds timeout);
+        std::shared_ptr<const Request> remove(Handle handle);
+        void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+    };
+
+    // Analysis contains info deduced by analysisTimeout().
+    //
+    // Summary is the result string from checking timeoutRequests to see if
+    // any might be caused by blocked calls in pendingRequests.
+    //
+    // Summary string is empty if there is no automatic actionable info.
+    //
+    // timeoutTid is the tid selected from timeoutRequests (if any).
+    //
+    // HALBlockedTid is the tid that is blocked from pendingRequests believed
+    // to cause the timeout.
+    // HALBlockedTid may be INVALID_PID if no suspected tid is found,
+    // and if HALBlockedTid is valid, it will not be the same as timeoutTid.
+    //
+    static constexpr pid_t INVALID_PID = -1;
+    struct Analysis {
+        std::string summary;
+        pid_t timeoutTid = INVALID_PID;
+        pid_t HALBlockedTid = INVALID_PID;
+    };
+
+    // A HAL method is where the substring "Hidl" is in the class name.
+    // The tag should look like: ... Hidl ... :: ...
+    static bool isRequestFromHal(const std::shared_ptr<const Request>& request);
+
+    // Returns analysis from the requests.
+    static Analysis analyzeTimeout(
+        const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
+        const std::vector<std::shared_ptr<const Request>>& pendingRequests);
+
+    std::vector<std::shared_ptr<const Request>> getPendingRequests() const;
+
+    // A no-timeout request is represented by a handles at the end of steady_clock time,
+    // counting down by the number of no timeout requests previously requested.
+    // We manage them on the NoTimeoutMap, but conceptually they could be scheduled
+    // on the MonitorThread because those time handles won't expire in
+    // the lifetime of the device.
+    static inline Handle getIndexedHandle(size_t index) {
+        return std::chrono::time_point<std::chrono::steady_clock>::max() -
+                    std::chrono::time_point<std::chrono::steady_clock>::duration(index);
+    }
+
+    static constexpr size_t kRetiredQueueMax = 16;
+    RequestQueue mRetiredQueue{kRetiredQueueMax};  // locked internally
+
+    static constexpr size_t kTimeoutQueueMax = 16;
+    RequestQueue mTimeoutQueue{kTimeoutQueueMax};  // locked internally
+
+    NoTimeoutMap mNoTimeoutMap;  // locked internally
+
+    MonitorThread mMonitorThread{mTimeoutQueue};  // This should be initialized last because
+                                                  // the thread is launched immediately.
+                                                  // Locked internally.
 };
 
-}  // namespace android
+}  // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 6593d56..5498ac5 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -26,3 +26,51 @@
         "media_synchronization_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "methodstatistics_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "methodstatistics_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "timecheck_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    sanitize:{
+       address: true,
+       cfi: true,
+       integer_overflow: true,
+       memtag_heap: true,
+    },
+
+    shared_libs: [
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "timecheck_tests.cpp",
+    ],
+}
diff --git a/media/utils/tests/methodstatistics_tests.cpp b/media/utils/tests/methodstatistics_tests.cpp
new file mode 100644
index 0000000..85c4ad5
--- /dev/null
+++ b/media/utils/tests/methodstatistics_tests.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "methodstatistics_tests"
+
+#include <mediautils/MethodStatistics.h>
+
+#include <atomic>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using CodeType = size_t;
+
+constexpr CodeType HELLO_CODE = 10;
+constexpr const char * HELLO_NAME = "hello";
+constexpr float HELLO_EVENTS[] = { 1.f, 3.f }; // needs lossless average
+
+constexpr CodeType WORLD_CODE = 21;
+constexpr const char * WORLD_NAME = "world";
+
+constexpr CodeType UNKNOWN_CODE = 12345;
+
+TEST(methodstatistics_tests, method_names) {
+    const MethodStatistics<CodeType> methodStatistics{
+            {HELLO_CODE, HELLO_NAME},
+            {WORLD_CODE, WORLD_NAME},
+    };
+
+    ASSERT_EQ(std::string(HELLO_NAME), methodStatistics.getMethodForCode(HELLO_CODE));
+    ASSERT_EQ(std::string(WORLD_NAME), methodStatistics.getMethodForCode(WORLD_CODE));
+    // an unknown code returns itself as a number.
+    ASSERT_EQ(std::to_string(UNKNOWN_CODE), methodStatistics.getMethodForCode(UNKNOWN_CODE));
+}
+
+TEST(methodstatistics_tests, events) {
+    MethodStatistics<CodeType> methodStatistics{
+            {HELLO_CODE, HELLO_NAME},
+            {WORLD_CODE, WORLD_NAME},
+    };
+
+    size_t n = 0;
+    float sum = 0.f;
+    for (const auto event : HELLO_EVENTS) {
+        methodStatistics.event(HELLO_CODE, event);
+        sum += event;
+        ++n;
+    }
+
+    const auto helloStats = methodStatistics.getStatistics(HELLO_CODE);
+    ASSERT_EQ((signed)n, helloStats.getN());
+    ASSERT_EQ(sum / n, helloStats.getMean());
+    ASSERT_EQ(n, methodStatistics.getMethodCount(HELLO_CODE));
+
+    const auto unsetStats = methodStatistics.getStatistics(UNKNOWN_CODE);
+    ASSERT_EQ(0, unsetStats.getN());
+    ASSERT_EQ(0.f, unsetStats.getMean());
+    ASSERT_EQ(0U, methodStatistics.getMethodCount(UNKNOWN_CODE));
+}
diff --git a/media/utils/tests/timecheck_tests.cpp b/media/utils/tests/timecheck_tests.cpp
new file mode 100644
index 0000000..6ebf44d
--- /dev/null
+++ b/media/utils/tests/timecheck_tests.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "timecheck_tests"
+
+#include <mediautils/TimeCheck.h>
+
+#include <atomic>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using namespace std::chrono_literals;
+
+namespace {
+
+TEST(timecheck_tests, success) {
+    bool timeoutRegistered = false;
+    float elapsedMsRegistered = 0.f;
+    bool event = false;
+
+    {
+        TimeCheck timeCheck("success",
+                [&event, &timeoutRegistered, &elapsedMsRegistered]
+                        (bool timeout, float elapsedMs) {
+            timeoutRegistered = timeout;
+            elapsedMsRegistered = elapsedMs;
+            event = true;
+        }, 1000 /* msec */, false /* crash */);
+    }
+    ASSERT_TRUE(event);
+    ASSERT_FALSE(timeoutRegistered);
+    ASSERT_GT(elapsedMsRegistered, 0.f);
+}
+
+TEST(timecheck_tests, timeout) {
+    bool timeoutRegistered = false;
+    float elapsedMsRegistered = 0.f;
+    std::atomic_bool event = false;  // seq-cst implies acquire-release
+
+    {
+        TimeCheck timeCheck("timeout",
+                [&event, &timeoutRegistered, &elapsedMsRegistered]
+                        (bool timeout, float elapsedMs) {
+            timeoutRegistered = timeout;
+            elapsedMsRegistered = elapsedMs;
+            event = true; // store-release, must be last.
+        }, 1 /* msec */, false /* crash */);
+        std::this_thread::sleep_for(100ms);
+    }
+    ASSERT_TRUE(event); // load-acquire, must be first.
+    ASSERT_TRUE(timeoutRegistered); // only called once on failure, not on dealloc.
+    ASSERT_GT(elapsedMsRegistered, 0.f);
+}
+
+// Note: We do not test TimeCheck crash because TimeCheck is multithreaded and the
+// EXPECT_EXIT() signal catching is imperfect due to the gtest fork.
+
+} // namespace
\ No newline at end of file
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index eb3c164..2813f72 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -79,6 +79,7 @@
 #include <media/nbaio/PipeReader.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/MemoryLeakTrackUtil.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
 #include <private/android_filesystem_config.h>
@@ -158,6 +159,92 @@
     return sExternalVibratorService;
 }
 
+// Creates association between Binder code to name for IAudioFlinger.
+#define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(createTrack) \
+BINDER_METHOD_ENTRY(createRecord) \
+BINDER_METHOD_ENTRY(sampleRate) \
+BINDER_METHOD_ENTRY(format) \
+BINDER_METHOD_ENTRY(frameCount) \
+BINDER_METHOD_ENTRY(latency) \
+BINDER_METHOD_ENTRY(setMasterVolume) \
+BINDER_METHOD_ENTRY(setMasterMute) \
+BINDER_METHOD_ENTRY(masterVolume) \
+BINDER_METHOD_ENTRY(masterMute) \
+BINDER_METHOD_ENTRY(setStreamVolume) \
+BINDER_METHOD_ENTRY(setStreamMute) \
+BINDER_METHOD_ENTRY(streamVolume) \
+BINDER_METHOD_ENTRY(streamMute) \
+BINDER_METHOD_ENTRY(setMode) \
+BINDER_METHOD_ENTRY(setMicMute) \
+BINDER_METHOD_ENTRY(getMicMute) \
+BINDER_METHOD_ENTRY(setRecordSilenced) \
+BINDER_METHOD_ENTRY(setParameters) \
+BINDER_METHOD_ENTRY(getParameters) \
+BINDER_METHOD_ENTRY(registerClient) \
+BINDER_METHOD_ENTRY(getInputBufferSize) \
+BINDER_METHOD_ENTRY(openOutput) \
+BINDER_METHOD_ENTRY(openDuplicateOutput) \
+BINDER_METHOD_ENTRY(closeOutput) \
+BINDER_METHOD_ENTRY(suspendOutput) \
+BINDER_METHOD_ENTRY(restoreOutput) \
+BINDER_METHOD_ENTRY(openInput) \
+BINDER_METHOD_ENTRY(closeInput) \
+BINDER_METHOD_ENTRY(invalidateStream) \
+BINDER_METHOD_ENTRY(setVoiceVolume) \
+BINDER_METHOD_ENTRY(getRenderPosition) \
+BINDER_METHOD_ENTRY(getInputFramesLost) \
+BINDER_METHOD_ENTRY(newAudioUniqueId) \
+BINDER_METHOD_ENTRY(acquireAudioSessionId) \
+BINDER_METHOD_ENTRY(releaseAudioSessionId) \
+BINDER_METHOD_ENTRY(queryNumberEffects) \
+BINDER_METHOD_ENTRY(queryEffect) \
+BINDER_METHOD_ENTRY(getEffectDescriptor) \
+BINDER_METHOD_ENTRY(createEffect) \
+BINDER_METHOD_ENTRY(moveEffects) \
+BINDER_METHOD_ENTRY(loadHwModule) \
+BINDER_METHOD_ENTRY(getPrimaryOutputSamplingRate) \
+BINDER_METHOD_ENTRY(getPrimaryOutputFrameCount) \
+BINDER_METHOD_ENTRY(setLowRamDevice) \
+BINDER_METHOD_ENTRY(getAudioPort) \
+BINDER_METHOD_ENTRY(createAudioPatch) \
+BINDER_METHOD_ENTRY(releaseAudioPatch) \
+BINDER_METHOD_ENTRY(listAudioPatches) \
+BINDER_METHOD_ENTRY(setAudioPortConfig) \
+BINDER_METHOD_ENTRY(getAudioHwSyncForSession) \
+BINDER_METHOD_ENTRY(systemReady) \
+BINDER_METHOD_ENTRY(audioPolicyReady) \
+BINDER_METHOD_ENTRY(frameCountHAL) \
+BINDER_METHOD_ENTRY(getMicrophones) \
+BINDER_METHOD_ENTRY(setMasterBalance) \
+BINDER_METHOD_ENTRY(getMasterBalance) \
+BINDER_METHOD_ENTRY(setEffectSuspended) \
+BINDER_METHOD_ENTRY(setAudioHalPids) \
+BINDER_METHOD_ENTRY(setVibratorInfos) \
+BINDER_METHOD_ENTRY(updateSecondaryOutputs) \
+BINDER_METHOD_ENTRY(getMmapPolicyInfos) \
+BINDER_METHOD_ENTRY(getAAudioMixerBurstCount) \
+BINDER_METHOD_ENTRY(getAAudioHardwareBurstMinUsec) \
+BINDER_METHOD_ENTRY(setDeviceConnectedState) \
+
+// singleton for Binder Method Statistics for IAudioFlinger
+static auto& getIAudioFlingerStatistics() {
+    using Code = android::AudioFlingerServerAdapter::Delegate::TransactionCode;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+    {(Code)media::BnAudioFlingerService::TRANSACTION_##ENTRY, #ENTRY},
+
+    static mediautils::MethodStatistics<Code> methodStatistics{
+        IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST
+        METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+    };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+    return methodStatistics;
+}
+
 class DevicesFactoryHalCallbackImpl : public DevicesFactoryHalCallback {
   public:
     void onNewDevicesAvailable() override {
@@ -276,7 +363,7 @@
     mMediaLogNotifier->run("MediaLogNotifier");
     std::vector<pid_t> halPids;
     mDevicesFactoryHal->getHalPids(&halPids);
-    TimeCheck::setAudioHalPids(halPids);
+    mediautils::TimeCheck::setAudioHalPids(halPids);
 
     // Notify that we have started (also called when audioserver service restarts)
     mediametrics::LogItem(mMetricsId)
@@ -316,7 +403,7 @@
 }
 
 status_t AudioFlinger::setAudioHalPids(const std::vector<pid_t>& pids) {
-  TimeCheck::setAudioHalPids(pids);
+  mediautils::TimeCheck::setAudioHalPids(pids);
   return NO_ERROR;
 }
 
@@ -828,6 +915,36 @@
             std::string s = GetUnreachableMemoryString(true /* contents */, 100 /* limit */);
             write(fd, s.c_str(), s.size());
         }
+        {
+            std::string timeCheckStats = getIAudioFlingerStatistics().dump();
+            dprintf(fd, "\nIAudioFlinger binder call profile:\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+
+            extern mediautils::MethodStatistics<int>& getIEffectStatistics();
+            timeCheckStats = getIEffectStatistics().dump();
+            dprintf(fd, "\nIEffect binder call profile:\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+
+            // Automatically fetch HIDL statistics.
+            std::shared_ptr<std::vector<std::string>> hidlClassNames =
+                    mediautils::getStatisticsClassesForModule(
+                            METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL);
+            if (hidlClassNames) {
+                for (const auto& className : *hidlClassNames) {
+                    auto stats = mediautils::getStatisticsForClass(className);
+                    if (stats) {
+                        timeCheckStats = stats->dump();
+                        dprintf(fd, "\n%s binder call profile:\n", className.c_str());
+                        write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+                    }
+                }
+            }
+
+            timeCheckStats = mediautils::TimeCheck::toString();
+            dprintf(fd, "\nTimeCheck:\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+            dprintf(fd, "\n");
+        }
     }
     return NO_ERROR;
 }
@@ -4417,9 +4534,20 @@
             break;
     }
 
-    std::string tag("IAudioFlinger command " +
-                    std::to_string(static_cast<std::underlying_type_t<TransactionCode>>(code)));
-    TimeCheck check(tag.c_str());
+    const std::string methodName = getIAudioFlingerStatistics().getMethodForCode(code);
+    mediautils::TimeCheck check(
+            std::string("IAudioFlinger::").append(methodName),
+            [code, methodName](bool timeout, float elapsedMs) { // don't move methodName.
+        if (timeout) {
+            mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT)
+                .set(AMEDIAMETRICS_PROP_METHODCODE, int64_t(code))
+                .set(AMEDIAMETRICS_PROP_METHODNAME, methodName.c_str())
+                .record();
+        } else {
+            getIAudioFlingerStatistics().event(code, elapsedMs);
+        }
+    });
 
     // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
     //  - AudioFlinger can call into Audio Policy Service with its global mutex held
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index b748f9d..e6d7cf7 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -39,7 +39,9 @@
 #include <media/ShmemCompat.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
+#include <mediautils/TimeCheck.h>
 
 #include "AudioFlinger.h"
 
@@ -1751,6 +1753,47 @@
     disconnect(false);
 }
 
+// Creates an association between Binder code to name for IEffect.
+#define IEFFECT_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(enable) \
+BINDER_METHOD_ENTRY(disable) \
+BINDER_METHOD_ENTRY(command) \
+BINDER_METHOD_ENTRY(disconnect) \
+BINDER_METHOD_ENTRY(getCblk) \
+
+// singleton for Binder Method Statistics for IEffect
+mediautils::MethodStatistics<int>& getIEffectStatistics() {
+    using Code = int;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+        {(Code)media::BnEffect::TRANSACTION_##ENTRY, #ENTRY},
+
+    static mediautils::MethodStatistics<Code> methodStatistics{
+        IEFFECT_BINDER_METHOD_MACRO_LIST
+        METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+    };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+    return methodStatistics;
+}
+
+status_t AudioFlinger::EffectHandle::onTransact(
+        uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
+    const std::string methodName = getIEffectStatistics().getMethodForCode(code);
+    mediautils::TimeCheck check(
+            std::string("IEffect::").append(methodName),
+            [code](bool timeout, float elapsedMs) {
+        if (timeout) {
+            ; // we don't timeout right now on the effect interface.
+        } else {
+            getIEffectStatistics().event(code, elapsedMs);
+        }
+    }, 0 /* timeoutMs */);
+    return BnEffect::onTransact(code, data, reply, flags);
+}
+
 status_t AudioFlinger::EffectHandle::initCheck()
 {
     return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e2bea67..42614cc 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -356,6 +356,8 @@
             const sp<media::IEffectClient>& effectClient,
             int32_t priority, bool notifyFramesProcessed);
     virtual ~EffectHandle();
+    status_t onTransact(
+            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
     virtual status_t initCheck();
 
     // IEffect
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/services/audioflinger/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index 9b4cc8a..f130f7c 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -11,6 +11,14 @@
           "include-filter": "com.google.android.gts.audio.AudioHostTest#testTwoChannelCapturing"
         }
       ]
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
     }
   ]
 }
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 18cf0c1..64c7923 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -305,6 +305,7 @@
     {
         return !devices().isEmpty() ? devices().itemAt(0)->hasGainController() : false;
     }
+    bool isRouted() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
 
     DeviceVector mDevices; /**< current devices this output is routed to */
     wp<AudioPolicyMix> mPolicyMix;  // non NULL when used by a dynamic policy
@@ -437,6 +438,8 @@
 
     uint32_t getRecommendedMuteDurationMs() const override;
 
+    void setTracksInvalidatedStatusByStrategy(product_strategy_t strategy);
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index dc2403c..0431619 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -56,7 +56,13 @@
 
     virtual void dump(String8 *dst, int spaces) const;
     virtual std::string toShortString() const;
-
+    /**
+     * @brief isInternal
+     * @return true if the client corresponds to an audio patch created from createAudioPatch API or
+     * for call audio routing, or false if the client corresponds to an AudioTrack, AudioRecord or
+     * HW Audio Source.
+     */
+    virtual bool isInternal() const { return false; }
     audio_port_handle_t portId() const { return mPortId; }
     uid_t uid() const { return mUid; }
     audio_session_t session() const { return mSessionId; };
@@ -69,8 +75,16 @@
     bool isPreferredDeviceForExclusiveUse() const { return mPreferredDeviceForExclusiveUse; }
     virtual void setActive(bool active) { mActive = active; }
     bool active() const { return mActive; }
+    /**
+     * @brief hasPreferredDevice Note that as internal clients use preferred device for convenience,
+     * we do hide this internal behavior to prevent from regression (like invalidating track for
+     * clients following same strategies...)
+     * @param activeOnly
+     * @return
+     */
     bool hasPreferredDevice(bool activeOnly = false) const {
-        return mPreferredDeviceId != AUDIO_PORT_HANDLE_NONE && (!activeOnly || mActive);
+        return !isInternal() &&
+                mPreferredDeviceId != AUDIO_PORT_HANDLE_NONE && (!activeOnly || mActive);
     }
 
 private:
@@ -143,6 +157,14 @@
     }
     uint32_t getActivityCount() const { return mActivityCount; }
 
+    bool isInvalid() const {
+        return mIsInvalid;
+    }
+
+    void setIsInvalid() {
+        mIsInvalid = true;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -155,6 +177,7 @@
      * involved in a duplication.
      */
     uint32_t mActivityCount = 0;
+    bool mIsInvalid = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
@@ -211,6 +234,11 @@
         mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
         mSinkDevice = nullptr;
     }
+    bool belongsToOutput(const sp<SwAudioOutputDescriptor> &swOutput) const {
+        return swOutput != nullptr && mSwOutput.promote() == swOutput;
+    }
+    void setUseSwBridge() { mUseSwBridge = true; }
+    bool useSwBridge() const { return mUseSwBridge; }
     bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
     sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
@@ -229,6 +257,35 @@
     sp<DeviceDescriptor> mSinkDevice;
     wp<SwAudioOutputDescriptor> mSwOutput;
     wp<HwAudioOutputDescriptor> mHwOutput;
+    bool mUseSwBridge = false;
+};
+
+/**
+ * @brief The InternalSourceClientDescriptor class
+ * Specialized Client Descriptor for either a raw patch created from @see createAudioPatch API
+ * or for internal audio patches managed by APM (e.g. phone call patches).
+ * Whatever the bridge created (software or hardware), we need a client to track the activity
+ * and manage volumes.
+ * The Audio Patch requested sink is expressed as a preferred device which allows to route
+ * the SwOutput. Then APM will performs checks on the UID (against UID of Audioserver) of the
+ * requester to prevent rerouting SwOutput involved in raw patches.
+ */
+class InternalSourceClientDescriptor: public SourceClientDescriptor
+{
+public:
+    InternalSourceClientDescriptor(
+            audio_port_handle_t portId, uid_t uid, audio_attributes_t attributes,
+            const struct audio_port_config &config, const sp<DeviceDescriptor>& srcDevice,
+             const sp<DeviceDescriptor>& sinkDevice,
+            product_strategy_t strategy, VolumeSource volumeSource) :
+        SourceClientDescriptor(
+            portId, uid, attributes, config, srcDevice, AUDIO_STREAM_PATCH, strategy,
+            volumeSource)
+    {
+        setPreferredDeviceId(sinkDevice->getId());
+    }
+    bool isInternal() const override { return true; }
+    ~InternalSourceClientDescriptor() override = default;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 5c342a1..009fa82 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -726,6 +726,14 @@
     return mProfile->recommendedMuteDurationMs;
 }
 
+void SwAudioOutputDescriptor::setTracksInvalidatedStatusByStrategy(product_strategy_t strategy) {
+    for (const auto &client : getClientIterable()) {
+        if (strategy == client->strategy()) {
+            client->setIsInvalid();
+        }
+    }
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 1132a29..d1655ef 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -100,7 +100,8 @@
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
-        {} /* Sources do not support secondary outputs*/, nullptr), mSrcDevice(srcDevice)
+        {} /* Sources do not support secondary outputs*/, nullptr),
+    mSrcDevice(srcDevice)
 {
 }
 
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index faf15d6..9f6b703 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -62,4 +62,7 @@
         "libaudiopolicymanager_interface_headers",
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
+    fuzz_config: {
+       cc: ["mnaganov@google.com"],
+    },
 }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c3c9753..5720551 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -664,12 +664,8 @@
     ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
 
-    disconnectTelephonyRxAudioSource();
-    // release TX patch if any
-    if (mCallTxPatch != 0) {
-        releaseAudioPatchInternal(mCallTxPatch->getHandle());
-        mCallTxPatch.clear();
-    }
+    disconnectTelephonyAudioSource(mCallRxSourceClient);
+    disconnectTelephonyAudioSource(mCallTxSourceClient);
 
     auto telephonyRxModule =
         mHwModules.getModuleForDeviceType(AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
@@ -727,7 +723,7 @@
                 closeActiveClients(activeDesc);
             }
         }
-        mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
+        connectTelephonyTxAudioSource(txSourceDevice, txSinkDevice, delayMs);
     }
     if (waitMs != nullptr) {
         *waitMs = muteWaitMs;
@@ -735,36 +731,6 @@
     return NO_ERROR;
 }
 
-sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
-        bool isRx, const sp<DeviceDescriptor> &device, uint32_t delayMs) {
-    PatchBuilder patchBuilder;
-
-    if (device == nullptr) {
-        return nullptr;
-    }
-
-    // @TODO: still ignoring the address, or not dealing platform with multiple telephony devices
-    if (isRx) {
-        patchBuilder.addSink(device).
-                addSource(mAvailableInputDevices.getDevice(
-                    AUDIO_DEVICE_IN_TELEPHONY_RX, String8(), AUDIO_FORMAT_DEFAULT));
-    } else {
-        patchBuilder.addSource(device).
-                addSink(mAvailableOutputDevices.getDevice(
-                    AUDIO_DEVICE_OUT_TELEPHONY_TX, String8(), AUDIO_FORMAT_DEFAULT));
-    }
-
-    audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
-    status_t status =
-            createAudioPatchInternal(patchBuilder.patch(), &patchHandle, mUidCached, delayMs);
-    ssize_t index = mAudioPatches.indexOfKey(patchHandle);
-    if (status != NO_ERROR || index < 0) {
-        ALOGW("%s() error %d creating %s audio patch", __func__, status, isRx ? "RX" : "TX");
-        return nullptr;
-    }
-    return mAudioPatches.valueAt(index);
-}
-
 bool AudioPolicyManager::isDeviceOfModule(
         const sp<DeviceDescriptor>& devDesc, const char *moduleId) const {
     sp<HwModule> module = mHwModules.getModuleFromName(moduleId);
@@ -779,20 +745,55 @@
 
 void AudioPolicyManager::connectTelephonyRxAudioSource()
 {
-    disconnectTelephonyRxAudioSource();
+    disconnectTelephonyAudioSource(mCallRxSourceClient);
     const struct audio_port_config source = {
         .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-    status_t status = startAudioSource(&source, &aa, &mCallRxSourceClientPort, 0/*uid*/);
-    ALOGE_IF(status != NO_ERROR, "%s failed to start Telephony Rx AudioSource", __func__);
+    mCallRxSourceClient = startAudioSourceInternal(&source, &aa, 0/*uid*/);
+    ALOGE_IF(mCallRxSourceClient == nullptr,
+             "%s failed to start Telephony Rx AudioSource", __func__);
 }
 
-void AudioPolicyManager::disconnectTelephonyRxAudioSource()
+void AudioPolicyManager::disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc)
 {
-    stopAudioSource(mCallRxSourceClientPort);
-    mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
+    if (clientDesc == nullptr) {
+        return;
+    }
+    ALOGW_IF(stopAudioSource(clientDesc->portId()) != NO_ERROR,
+            "%s error stopping audio source", __func__);
+    clientDesc.clear();
+}
+
+void AudioPolicyManager::connectTelephonyTxAudioSource(
+        const sp<DeviceDescriptor> &srcDevice, const sp<DeviceDescriptor> &sinkDevice,
+        uint32_t delayMs)
+{
+    disconnectTelephonyAudioSource(mCallTxSourceClient);
+    if (srcDevice == nullptr || sinkDevice == nullptr) {
+        ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
+        return;
+    }
+    PatchBuilder patchBuilder;
+    patchBuilder.addSource(srcDevice).addSink(sinkDevice);
+    ALOGV("%s between source %s and sink %s", __func__,
+            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+    auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
+    const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+    struct audio_port_config source = {};
+    srcDevice->toAudioPortConfig(&source);
+    mCallTxSourceClient = new InternalSourceClientDescriptor(
+                callTxSourceClientPortId, mUidCached, aa, source, srcDevice, sinkDevice,
+                mCommunnicationStrategy, toVolumeSource(aa));
+    audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
+    status_t status = connectAudioSourceToSink(
+                mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
+                delayMs);
+    ALOGE_IF(status != NO_ERROR, "%s() error %d creating TX audio patch", __func__, status);
+    if (status == NO_ERROR) {
+        mAudioSources.add(callTxSourceClientPortId, mCallTxSourceClient);
+    }
 }
 
 void AudioPolicyManager::setPhoneState(audio_mode_t state)
@@ -860,11 +861,8 @@
                 rxDevices = mPrimaryOutput->devices();
             }
             if (oldState == AUDIO_MODE_IN_CALL) {
-                disconnectTelephonyRxAudioSource();
-                if (mCallTxPatch != 0) {
-                    releaseAudioPatchInternal(mCallTxPatch->getHandle());
-                    mCallTxPatch.clear();
-                }
+                disconnectTelephonyAudioSource(mCallRxSourceClient);
+                disconnectTelephonyAudioSource(mCallTxSourceClient);
             }
             setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
         }
@@ -874,8 +872,10 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
-        if (state != AUDIO_MODE_IN_CALL || desc != mPrimaryOutput) {
-            setOutputDevices(desc, newDevices, !newDevices.isEmpty(), 0 /*delayMs*/);
+        if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
+            bool forceRouting = !newDevices.isEmpty();
+            setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
+                             true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
     }
 
@@ -935,6 +935,32 @@
     ALOGV("setSystemProperty() property %s, value %s", property, value);
 }
 
+// Find an MSD output profile compatible with the parameters passed.
+// When "directOnly" is set, restrict search to profiles for direct outputs.
+sp<IOProfile> AudioPolicyManager::getMsdProfileForOutput(
+                                                   const DeviceVector& devices,
+                                                   uint32_t samplingRate,
+                                                   audio_format_t format,
+                                                   audio_channel_mask_t channelMask,
+                                                   audio_output_flags_t flags,
+                                                   bool directOnly)
+{
+    flags = getRelevantFlags(flags, directOnly);
+
+    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    if (msdModule != nullptr) {
+        // for the msd module check if there are patches to the output devices
+        if (msdHasPatchesToAllDevices(devices.toTypeAddrVector())) {
+            HwModuleCollection modules;
+            modules.add(msdModule);
+            return searchCompatibleProfileHwModules(
+                    modules, getMsdAudioOutDevices(), samplingRate, format, channelMask,
+                    flags, directOnly);
+        }
+    }
+    return nullptr;
+}
+
 // Find an output profile compatible with the parameters passed. When "directOnly" is set, restrict
 // search to profiles for direct outputs.
 sp<IOProfile> AudioPolicyManager::getProfileForOutput(
@@ -945,45 +971,65 @@
                                                    audio_output_flags_t flags,
                                                    bool directOnly)
 {
+    flags = getRelevantFlags(flags, directOnly);
+
+    return searchCompatibleProfileHwModules(
+            mHwModules, devices, samplingRate, format, channelMask, flags, directOnly);
+}
+
+audio_output_flags_t AudioPolicyManager::getRelevantFlags (
+                                            audio_output_flags_t flags, bool directOnly) {
     if (directOnly) {
-        // only retain flags that will drive the direct output profile selection
-        // if explicitly requested
-        static const uint32_t kRelevantFlags =
+         // only retain flags that will drive the direct output profile selection
+         // if explicitly requested
+         static const uint32_t kRelevantFlags =
                 (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
-                 AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
-        flags =
-            (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
+                AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
+         flags = (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
     }
+    return flags;
+}
 
+sp<IOProfile> AudioPolicyManager::searchCompatibleProfileHwModules (
+                                        const HwModuleCollection& hwModules,
+                                        const DeviceVector& devices,
+                                        uint32_t samplingRate,
+                                        audio_format_t format,
+                                        audio_channel_mask_t channelMask,
+                                        audio_output_flags_t flags,
+                                        bool directOnly) {
     sp<IOProfile> profile;
-
-    for (const auto& hwModule : mHwModules) {
+    for (const auto& hwModule : hwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-            if (!curProfile->isCompatibleProfile(devices,
-                    samplingRate, NULL /*updatedSamplingRate*/,
-                    format, NULL /*updatedFormat*/,
-                    channelMask, NULL /*updatedChannelMask*/,
-                    flags)) {
+             if (!curProfile->isCompatibleProfile(devices,
+                     samplingRate, NULL /*updatedSamplingRate*/,
+                     format, NULL /*updatedFormat*/,
+                     channelMask, NULL /*updatedChannelMask*/,
+                     flags)) {
+                 continue;
+             }
+             // reject profiles not corresponding to a device currently available
+             if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
+                 continue;
+             }
+             // reject profiles if connected device does not support codec
+             if (!curProfile->devicesSupportEncodedFormats(devices.types())) {
+                 continue;
+             }
+             if (!directOnly) {
+                return curProfile;
+             }
+
+             // when searching for direct outputs, if several profiles are compatible, give priority
+             // to one with offload capability
+             if (profile != 0 && 
+                 ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
                 continue;
-            }
-            // reject profiles not corresponding to a device currently available
-            if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
-                continue;
-            }
-            // reject profiles if connected device does not support codec
-            if (!curProfile->devicesSupportEncodedFormats(devices.types())) {
-                continue;
-            }
-            if (!directOnly) return curProfile;
-            // when searching for direct outputs, if several profiles are compatible, give priority
-            // to one with offload capability
-            if (profile != 0 && ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
-                continue;
-            }
-            profile = curProfile;
-            if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
-                break;
-            }
+             }
+             profile = curProfile;
+             if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+                 break;
+             }
         }
     }
     return profile;
@@ -1510,6 +1556,27 @@
     return msdPatches;
 }
 
+bool AudioPolicyManager::isMsdPatch(const audio_patch_handle_t &handle) const {
+    ssize_t index = mAudioPatches.indexOfKey(handle);
+    if (index < 0) {
+        return false;
+    }
+    const sp<AudioPatch> patch = mAudioPatches.valueAt(index);
+    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    if (msdModule == nullptr) {
+        return false;
+    }
+    const struct audio_port_config *sink = &patch->mPatch.sinks[0];
+    if (getMsdAudioOutDevices().contains(mAvailableOutputDevices.getDeviceFromId(sink->id))) {
+        return true;
+    }
+    index = getMsdOutputPatches().indexOfKey(handle);
+    if (index < 0) {
+        return false;
+    }
+    return true;
+}
+
 status_t AudioPolicyManager::getMsdProfiles(bool hwAvSync,
                                             const InputProfileCollection &inputProfiles,
                                             const OutputProfileCollection &outputProfiles,
@@ -1939,8 +2006,7 @@
 
     // force device change if the output is inactive and no audio patch is already present.
     // check active before incrementing usage count
-    bool force = !outputDesc->isActive() &&
-            (outputDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE);
+    bool force = !outputDesc->isActive() && !outputDesc->isRouted();
 
     DeviceVector devices;
     sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
@@ -3126,9 +3192,10 @@
             }
         }
     }
-    return mEffects.registerEffect(desc, io, session, id,
-                                   (strategy == streamToStrategy(AUDIO_STREAM_MUSIC) ||
-                                   strategy == PRODUCT_STRATEGY_NONE));
+    bool isMusicEffect = (session != AUDIO_SESSION_OUTPUT_STAGE)
+                            && ((strategy == streamToStrategy(AUDIO_STREAM_MUSIC)
+                                    || strategy == PRODUCT_STRATEGY_NONE));
+    return mEffects.registerEffect(desc, io, session, id, isMusicEffect);
 }
 
 status_t AudioPolicyManager::unregisterEffect(int id)
@@ -3511,11 +3578,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(outputDesc, true /*fromCache*/);
-        if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) || (outputDesc != mPrimaryOutput)) {
+        if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
+                (outputDesc != mPrimaryOutput && !isTelephonyRxOrTx(outputDesc))) {
             // As done in setDeviceConnectionState, we could also fix default device issue by
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
-            waitMs = setOutputDevices(outputDesc, newDevices, !newDevices.isEmpty(), delayMs);
+            bool forceRouting = !newDevices.isEmpty();
+            waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
+                                      true /*requiresMuteCheck*/,
+                                      !forceRouting /*requiresVolumeCheck*/);
             // Only apply special touch sound delay once
             delayMs = 0;
         }
@@ -3796,7 +3867,22 @@
         __FUNCTION__, profile != 0 ? "" : "NOT ",
         (profile != 0 ? profile->getTagName().c_str() : "null"),
         config.sample_rate, config.format, config.channel_mask, output_flags);
-    return (profile != 0);
+
+    // also try the MSD module if compatible profile not found
+    if (profile == nullptr) {
+        profile = getMsdProfileForOutput(outputDevices,
+                                              config.sample_rate,
+                                              config.format,
+                                              config.channel_mask,
+                                              output_flags,
+                                              true /* directOnly */);
+        ALOGV("%s() MSD profile %sfound with name: %s, "
+            "sample rate: %u, format: 0x%x, channel_mask: 0x%x, output flags: 0x%x",
+            __FUNCTION__, profile != 0 ? "" : "NOT ",
+            (profile != 0 ? profile->getTagName().c_str() : "null"),
+            config.sample_rate, config.format, config.channel_mask, output_flags);
+    }
+    return (profile != nullptr);
 }
 
 bool AudioPolicyManager::isOffloadPossible(const audio_offload_info_t &offloadInfo,
@@ -3877,8 +3963,16 @@
     }
     flags = (audio_output_flags_t)((flags & relevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
 
-    DeviceVector outputDevices = mEngine->getOutputDevicesForAttributes(*attr);
+    DeviceVector engineOutputDevices = mEngine->getOutputDevicesForAttributes(*attr);
     for (const auto& hwModule : mHwModules) {
+        DeviceVector outputDevices = engineOutputDevices;
+        // the MSD module checks for different conditions and output devices
+        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+            if (!msdHasPatchesToAllDevices(engineOutputDevices.toTypeAddrVector())) {
+                continue;
+            }
+            outputDevices = getMsdAudioOutDevices();
+        }
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
             if (!curProfile->isCompatibleProfile(outputDevices,
                     config->sample_rate, nullptr /*updatedSamplingRate*/,
@@ -3905,11 +3999,10 @@
                             ~AUDIO_DIRECT_OFFLOAD_SUPPORTED) |
                             AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED);
                 } else {
-                    directMode = (audio_direct_mode_t)(directMode |AUDIO_DIRECT_OFFLOAD_SUPPORTED);
+                    directMode = (audio_direct_mode_t)(directMode | AUDIO_DIRECT_OFFLOAD_SUPPORTED);
                 }
             } else {
-                directMode = (audio_direct_mode_t) (directMode |
-                                                    AUDIO_DIRECT_BITSTREAM_SUPPORTED);
+                directMode = (audio_direct_mode_t) (directMode | AUDIO_DIRECT_BITSTREAM_SUPPORTED);
             }
         }
     }
@@ -4064,17 +4157,15 @@
     return BAD_VALUE;
 }
 
-status_t AudioPolicyManager::createAudioPatchInternal(const struct audio_patch *patch,
-                                                      audio_patch_handle_t *handle,
-                                                      uid_t uid, uint32_t delayMs,
-                                                      const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch,
+                                              audio_patch_handle_t *handle,
+                                              uid_t uid)
 {
     ALOGV("%s", __func__);
     if (handle == NULL || patch == NULL) {
         return BAD_VALUE;
     }
     ALOGV("%s num sources %d num sinks %d", __func__, patch->num_sources, patch->num_sinks);
-
     if (!audio_patch_is_valid(patch)) {
         return BAD_VALUE;
     }
@@ -4082,7 +4173,6 @@
     if (patch->num_sources > 1) {
         return INVALID_OPERATION;
     }
-
     if (patch->sources[0].role != AUDIO_PORT_ROLE_SOURCE) {
         return INVALID_OPERATION;
     }
@@ -4092,6 +4182,86 @@
         }
     }
 
+    sp<DeviceDescriptor> srcDevice = mAvailableInputDevices.getDeviceFromId(patch->sources[0].id);
+    sp<DeviceDescriptor> sinkDevice = mAvailableOutputDevices.getDeviceFromId(patch->sinks[0].id);
+    if (srcDevice == nullptr || sinkDevice == nullptr) {
+        ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
+        return BAD_VALUE;
+    }
+    ALOGV("%s between source %s and sink %s", __func__,
+            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+    audio_port_handle_t portId = PolicyAudioPort::getNextUniqueId();
+    // Default attributes, default volume priority, not to infer with non raw audio patches.
+    audio_attributes_t attributes = attributes_initializer(AUDIO_USAGE_MEDIA);
+    const struct audio_port_config *source = &patch->sources[0];
+    sp<SourceClientDescriptor> sourceDesc =
+            new InternalSourceClientDescriptor(
+                portId, uid, attributes, *source, srcDevice, sinkDevice,
+                mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes));
+
+    status_t status =
+            connectAudioSourceToSink(sourceDesc, sinkDevice, patch, *handle, uid, 0 /* delayMs */);
+
+    if (status != NO_ERROR) {
+        return INVALID_OPERATION;
+    }
+    mAudioSources.add(portId, sourceDesc);
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::connectAudioSourceToSink(
+        const sp<SourceClientDescriptor>& sourceDesc, const sp<DeviceDescriptor> &sinkDevice,
+        const struct audio_patch *patch,
+        audio_patch_handle_t &handle,
+        uid_t uid, uint32_t delayMs)
+{
+    status_t status = createAudioPatchInternal(patch, &handle, uid, delayMs, sourceDesc);
+    if (status != NO_ERROR || mAudioPatches.indexOfKey(handle) < 0) {
+        ALOGW("%s patch panel could not connect device patch, error %d", __func__, status);
+        return INVALID_OPERATION;
+    }
+    sourceDesc->connect(handle, sinkDevice);
+    if (isMsdPatch(handle)) {
+        return NO_ERROR;
+    }
+    // SW Bridge? (@todo: HW bridge, keep track of HwOutput for device selection "reconsideration")
+    sp<SwAudioOutputDescriptor> swOutput = sourceDesc->swOutput().promote();
+    ALOG_ASSERT(swOutput != nullptr, "%s: a swOutput shall always be associated", __func__);
+    if (swOutput->getClient(sourceDesc->portId()) != nullptr) {
+        ALOGW("%s source portId has already been attached to outputDesc", __func__);
+        goto FailurePatchAdded;
+    }
+    status = swOutput->start();
+    if (status != NO_ERROR) {
+        goto FailureSourceAdded;
+    }
+    swOutput->addClient(sourceDesc);
+    status = startSource(swOutput, sourceDesc, &delayMs);
+    if (status != NO_ERROR) {
+        ALOGW("%s failed to start source, error %d", __FUNCTION__, status);
+        goto FailureSourceActive;
+    }
+    if (delayMs != 0) {
+        usleep(delayMs * 1000);
+    }
+    return NO_ERROR;
+
+FailureSourceActive:
+    swOutput->stop();
+    releaseOutput(sourceDesc->portId());
+FailureSourceAdded:
+    sourceDesc->setSwOutput(nullptr);
+FailurePatchAdded:
+    releaseAudioPatchInternal(handle);
+    return INVALID_OPERATION;
+}
+
+status_t AudioPolicyManager::createAudioPatchInternal(const struct audio_patch *patch,
+                                                      audio_patch_handle_t *handle,
+                                                      uid_t uid, uint32_t delayMs,
+                                                      const sp<SourceClientDescriptor>& sourceDesc)
+{
+    ALOGV("%s num sources %d num sinks %d", __func__, patch->num_sources, patch->num_sinks);
     sp<AudioPatch> patchDesc;
     ssize_t index = mAudioPatches.indexOfKey(*handle);
 
@@ -4280,7 +4450,7 @@
                 // in config XML to reach the sink so that is can be declared as available.
                 audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
                 sp<SwAudioOutputDescriptor> outputDesc = nullptr;
-                if (sourceDesc != nullptr) {
+                if (!sourceDesc->isInternal()) {
                     // take care of dynamic routing for SwOutput selection,
                     audio_attributes_t attributes = sourceDesc->attributes();
                     audio_stream_type_t stream = sourceDesc->stream();
@@ -4308,44 +4478,49 @@
                         return INVALID_OPERATION;
                     }
                     sourceDesc->setSwOutput(outputDesc);
+                } else {
+                    // Same for "raw patches" aka created from createAudioPatch API
+                    SortedVector<audio_io_handle_t> outputs =
+                            getOutputsForDevices(DeviceVector(sinkDevice), mOutputs);
+                    // if the sink device is reachable via an opened output stream, request to
+                    // go via this output stream by adding a second source to the patch
+                    // description
+                    output = selectOutput(outputs);
+                    if (output == AUDIO_IO_HANDLE_NONE) {
+                        ALOGE("%s no output available for internal patch sink", __func__);
+                        return INVALID_OPERATION;
+                    }
+                    outputDesc = mOutputs.valueFor(output);
+                    if (outputDesc->isDuplicated()) {
+                        ALOGV("%s output for device %s is duplicated",
+                              __func__, sinkDevice->toString().c_str());
+                        return INVALID_OPERATION;
+                    }
+                    sourceDesc->setSwOutput(outputDesc);
                 }
                 // create a software bridge in PatchPanel if:
                 // - source and sink devices are on different HW modules OR
                 // - audio HAL version is < 3.0
                 // - audio HAL version is >= 3.0 but no route has been declared between devices
-                // - called from startAudioSource (aka sourceDesc != nullptr) and source device does
-                //   not have a gain controller
+                // - called from startAudioSource (aka sourceDesc is not internal) and source device
+                //   does not have a gain controller
                 if (!srcDevice->hasSameHwModuleAs(sinkDevice) ||
                         (srcDevice->getModuleVersionMajor() < 3) ||
                         !srcDevice->getModule()->supportsPatch(srcDevice, sinkDevice) ||
-                        (sourceDesc != nullptr &&
+                        (!sourceDesc->isInternal() &&
                          srcDevice->getAudioPort()->getGains().size() == 0)) {
                     // support only one sink device for now to simplify output selection logic
                     if (patch->num_sinks > 1) {
                         return INVALID_OPERATION;
                     }
-                    if (sourceDesc == nullptr) {
-                        SortedVector<audio_io_handle_t> outputs =
-                                getOutputsForDevices(DeviceVector(sinkDevice), mOutputs);
-                        // if the sink device is reachable via an opened output stream, request to
-                        // go via this output stream by adding a second source to the patch
-                        // description
-                        output = selectOutput(outputs);
-                        if (output != AUDIO_IO_HANDLE_NONE) {
-                            outputDesc = mOutputs.valueFor(output);
-                            if (outputDesc->isDuplicated()) {
-                                ALOGV("%s output for device %s is duplicated",
-                                      __FUNCTION__, sinkDevice->toString().c_str());
-                                return INVALID_OPERATION;
-                            }
-                        }
-                    }
+                    sourceDesc->setUseSwBridge();
                     if (outputDesc != nullptr) {
                         audio_port_config srcMixPortConfig = {};
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
-                        srcMixPortConfig.ext.mix.usecase.stream = sourceDesc != nullptr ?
-                                    sourceDesc->stream() : AUDIO_STREAM_PATCH;
+                        srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+                                    mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
+                                    AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
                     }
                 }
@@ -4368,11 +4543,9 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::releaseAudioPatch(audio_patch_handle_t handle,
-                                                  uid_t uid)
+status_t AudioPolicyManager::releaseAudioPatch(audio_patch_handle_t handle, uid_t uid)
 {
-    ALOGV("releaseAudioPatch() patch %d", handle);
-
+    ALOGV("%s patch %d", __func__, handle);
     ssize_t index = mAudioPatches.indexOfKey(handle);
 
     if (index < 0) {
@@ -4384,11 +4557,21 @@
     if (patchDesc->getUid() != mUidCached && uid != patchDesc->getUid()) {
         return INVALID_OPERATION;
     }
-    return releaseAudioPatchInternal(handle);
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    for (size_t i = 0; i < mAudioSources.size(); i++)  {
+        sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
+        if (sourceDesc != nullptr && sourceDesc->getPatchHandle() == handle) {
+            portId = sourceDesc->portId();
+            break;
+        }
+    }
+    return portId != AUDIO_PORT_HANDLE_NONE ?
+                stopAudioSource(portId) : releaseAudioPatchInternal(handle);
 }
 
 status_t AudioPolicyManager::releaseAudioPatchInternal(audio_patch_handle_t handle,
-                                                       uint32_t delayMs)
+                                                       uint32_t delayMs,
+                                                       const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s patch %d", __func__, handle);
     if (mAudioPatches.indexOfKey(handle) < 0) {
@@ -4429,26 +4612,29 @@
             removeAudioPatch(patchDesc->getHandle());
             nextAudioPortGeneration();
             mpClientInterface->onAudioPatchListUpdate();
-            // SW Bridge
+            // SW or HW Bridge
+            sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+            audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
             if (patch->num_sources > 1 && patch->sources[1].type == AUDIO_PORT_TYPE_MIX) {
-                sp<SwAudioOutputDescriptor> outputDesc =
-                        mOutputs.getOutputFromId(patch->sources[1].id);
-                if (outputDesc == NULL) {
-                    ALOGW("%s output not found for id %d", __func__, patch->sources[0].id);
-                    // releaseOutput has already called closeOuput in case of direct output
-                    return NO_ERROR;
-                }
-                if (patchDesc->getHandle() != outputDesc->getPatchHandle()) {
-                    // force SwOutput patch removal as AF counter part patch has already gone.
-                    ALOGV("%s reset patch handle on Output as different from SWBridge", __func__);
-                    removeAudioPatch(outputDesc->getPatchHandle());
-                }
-                outputDesc->setPatchHandle(AUDIO_PATCH_HANDLE_NONE);
+                outputDesc = mOutputs.getOutputFromId(patch->sources[1].id);
+            } else if (patch->num_sources == 1 && sourceDesc != nullptr) {
+                outputDesc = sourceDesc->swOutput().promote();
+            }
+            if (outputDesc == nullptr) {
+                ALOGW("%s no output for id %d", __func__, patch->sources[0].id);
+                // releaseOutput has already called closeOutput in case of direct output
+                return NO_ERROR;
+            }
+            if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
+                resetOutputDevice(outputDesc);
+            } else {
+                // Reuse patch handle if still valid / do not force rerouting if still routed
+                patchHandle = outputDesc->getPatchHandle();
                 setOutputDevices(outputDesc,
                                  getNewOutputDevices(outputDesc, true /*fromCache*/),
-                                 true, /*force*/
+                                 patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
                                  0,
-                                 NULL);
+                                 patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
             }
         } else {
             return BAD_VALUE;
@@ -4694,6 +4880,18 @@
     return status;
 }
 
+sp<SourceClientDescriptor> AudioPolicyManager::startAudioSourceInternal(
+        const struct audio_port_config *source, const audio_attributes_t *attributes, uid_t uid)
+{
+    ALOGV("%s", __FUNCTION__);
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+
+    status_t status = startAudioSource(source, attributes, &portId, uid);
+    ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
+    return mAudioSources.valueFor(portId);
+}
+
+
 status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
@@ -4718,52 +4916,9 @@
     PatchBuilder patchBuilder;
     patchBuilder.addSink(sinkDevice).addSource(srcDevice);
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-    status_t status =
-            createAudioPatchInternal(patchBuilder.patch(), &handle, mUidCached, 0, sourceDesc);
-    if (status != NO_ERROR || mAudioPatches.indexOfKey(handle) < 0) {
-        ALOGW("%s patch panel could not connect device patch, error %d", __func__, status);
-        return INVALID_OPERATION;
-    }
-    sourceDesc->connect(handle, sinkDevice);
-    // SW Bridge? (@todo: HW bridge, keep track of HwOutput for device selection "reconsideration")
-    sp<SwAudioOutputDescriptor> swOutput = sourceDesc->swOutput().promote();
-    if (swOutput != 0) {
-        status = swOutput->start();
-        if (status != NO_ERROR) {
-            goto FailureSourceAdded;
-        }
-        if (swOutput->getClient(sourceDesc->portId()) != nullptr) {
-            ALOGW("%s source portId has already been attached to outputDesc", __func__);
-            goto FailureReleasePatch;
-        }
-        swOutput->addClient(sourceDesc);
-        uint32_t delayMs = 0;
-        status = startSource(swOutput, sourceDesc, &delayMs);
-        if (status != NO_ERROR) {
-            ALOGW("%s failed to start source, error %d", __FUNCTION__, status);
-            goto FailureSourceActive;
-        }
-        if (delayMs != 0) {
-            usleep(delayMs * 1000);
-        }
-    } else {
-        sp<HwAudioOutputDescriptor> hwOutputDesc = sourceDesc->hwOutput().promote();
-        if (hwOutputDesc != 0) {
-          //   create Hwoutput and add to mHwOutputs
-        } else {
-            ALOGW("%s source has neither SW nor HW output", __FUNCTION__);
-        }
-    }
-    return NO_ERROR;
 
-FailureSourceActive:
-    swOutput->stop();
-    releaseOutput(sourceDesc->portId());
-FailureSourceAdded:
-    sourceDesc->setSwOutput(nullptr);
-FailureReleasePatch:
-    releaseAudioPatchInternal(handle);
-    return INVALID_OPERATION;
+    return connectAudioSourceToSink(
+                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
 }
 
 status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -5089,7 +5244,7 @@
             ALOGW("%s source has neither SW nor HW output", __FUNCTION__);
         }
     }
-    status_t status = releaseAudioPatchInternal(sourceDesc->getPatchHandle());
+    status_t status = releaseAudioPatchInternal(sourceDesc->getPatchHandle(), 0, sourceDesc);
     sourceDesc->disconnect();
     return status;
 }
@@ -6061,7 +6216,7 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
                 && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
-                && !isCallRxAudioSource(sourceDesc)) {
+                && !isCallRxAudioSource(sourceDesc) && !sourceDesc->isInternal()) {
             connectAudioSource(sourceDesc);
         }
     }
@@ -6174,7 +6329,7 @@
                                 newDevices.types());
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != nullptr && !isCallRxAudioSource(source)) {
+            if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
                 connectAudioSource(source);
             }
         }
@@ -6188,6 +6343,12 @@
             for (auto stream :  mEngine->getStreamTypesForProductStrategy(psId)) {
                 mpClientInterface->invalidateStream(stream);
             }
+            for (audio_io_handle_t srcOut : srcOutputs) {
+                sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
+                if (desc == nullptr) continue;
+
+                desc->setTracksInvalidatedStatusByStrategy(psId);
+            }
         }
     }
 }
@@ -6708,6 +6869,8 @@
         muteWaitMs = 0;
     }
 
+    bool outputRouted = outputDesc->isRouted();
+
     // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
     // output profile or if new device is not supported AND previous device(s) is(are) still
     // available (otherwise reset device must be done on the output)
@@ -6724,8 +6887,7 @@
     //  AND force is not specified
     //  AND the output is connected by a valid audio patch.
     // Doing this check here allows the caller to call setOutputDevices() without conditions
-    if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) &&
-            !force && outputDesc->getPatchHandle() != AUDIO_PATCH_HANDLE_NONE) {
+    if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) && !force && outputRouted) {
         ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
               filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
         if (requiresVolumeCheck && !filteredDevices.isEmpty()) {
@@ -6765,6 +6927,9 @@
                                                audio_patch_handle_t *patchHandle)
 {
     ssize_t index;
+    if (patchHandle == nullptr && !outputDesc->isRouted()) {
+        return INVALID_OPERATION;
+    }
     if (patchHandle) {
         index = mAudioPatches.indexOfKey(*patchHandle);
     } else {
@@ -7509,7 +7674,10 @@
         routedDevices.add(device);
     }
     for (const auto& client : activeClients) {
-        // TODO: b/175343099 only travel the valid client
+        if (client->isInvalid()) {
+            // No need to take care about invalidated clients.
+            continue;
+        }
         sp<DeviceDescriptor> preferredDevice =
                 mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId());
         if (mEngine->getOutputDevicesForAttributes(
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 4d307cf..68ae8cb 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -263,10 +263,7 @@
         virtual status_t getAudioPort(struct audio_port_v7 *port);
         virtual status_t createAudioPatch(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle,
-                                           uid_t uid) {
-            return createAudioPatchInternal(patch, handle, uid);
-        }
-
+                                           uid_t uid);
         virtual status_t releaseAudioPatch(audio_patch_handle_t handle,
                                               uid_t uid);
         virtual status_t listAudioPatches(unsigned int *num_patches,
@@ -638,13 +635,22 @@
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
 
         bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallRxSourceClientPort != AUDIO_PORT_HANDLE_NONE
-                && source == mAudioSources.valueFor(mCallRxSourceClientPort);
+            return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
         }
 
         void connectTelephonyRxAudioSource();
 
-        void disconnectTelephonyRxAudioSource();
+        void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
+
+        void connectTelephonyTxAudioSource(const sp<DeviceDescriptor> &srcdevice,
+                                           const sp<DeviceDescriptor> &sinkDevice,
+                                           uint32_t delayMs);
+
+        bool isTelephonyRxOrTx(const sp<SwAudioOutputDescriptor>& desc) const {
+            return (mCallRxSourceClient != nullptr && mCallRxSourceClient->belongsToOutput(desc))
+                    || (mCallTxSourceClient != nullptr
+                    &&  mCallTxSourceClient->belongsToOutput(desc));
+        }
 
         /**
          * @brief updates routing for all inputs.
@@ -762,6 +768,15 @@
                                           audio_channel_mask_t channelMask,
                                           audio_output_flags_t flags,
                                           bool directOnly);
+        /**
+        * Same as getProfileForOutput, but it looks for an MSD profile
+        */
+        sp<IOProfile> getMsdProfileForOutput(const DeviceVector &devices,
+                                           uint32_t samplingRate,
+                                           audio_format_t format,
+                                           audio_channel_mask_t channelMask,
+                                           audio_output_flags_t flags,
+                                           bool directOnly);
 
         audio_io_handle_t selectOutputForMusicEffects();
 
@@ -851,6 +866,12 @@
         status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
         status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
 
+        status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
+                                          const sp<DeviceDescriptor> &sinkDevice,
+                                          const struct audio_patch *patch,
+                                          audio_patch_handle_t &handle,
+                                          uid_t uid, uint32_t delayMs);
+
         sp<SourceClientDescriptor> getSourceForAttributesOnOutput(audio_io_handle_t output,
                                                                   const audio_attributes_t &attr);
         void clearAudioSourcesForOutput(audio_io_handle_t output);
@@ -901,8 +922,6 @@
 
         SoundTriggerSessionCollection mSoundTriggerSessions;
 
-        sp<AudioPatch> mCallTxPatch;
-
         HwAudioOutputCollection mHwOutputs;
         SourceClientCollection mAudioSources;
 
@@ -943,7 +962,8 @@
 
         // The port handle of the hardware audio source created internally for the Call RX audio
         // end point.
-        audio_port_handle_t mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
+        sp<SourceClientDescriptor> mCallRxSourceClient;
+        sp<SourceClientDescriptor> mCallTxSourceClient;
 
         // Support for Multi-Stream Decoder (MSD) module
         sp<DeviceDescriptor> getMsdAudioInDevice() const;
@@ -975,7 +995,13 @@
         // Called by setDeviceConnectionState()
         status_t deviceToAudioPort(audio_devices_t deviceType, const char* device_address,
                                    const char* device_name, media::AudioPort* aidPort);
+        bool isMsdPatch(const audio_patch_handle_t &handle) const;
+
 private:
+        sp<SourceClientDescriptor> startAudioSourceInternal(
+                const struct audio_port_config *source, const audio_attributes_t *attributes,
+                uid_t uid);
+
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
         // Add or remove AC3 DTS encodings based on user preferences.
@@ -1120,21 +1146,25 @@
          * @param[out] handle patch handle to be provided if patch installed correctly
          * @param[in] uid of the client
          * @param[in] delayMs if required
-         * @param[in] sourceDesc [optional] in case of external source, source client to be
-         * configured by the patch, i.e. assigning an Output (HW or SW)
+         * @param[in] sourceDesc source client to be configured when creating the patch, i.e.
+         *            assigning an Output (HW or SW) used for volume control.
          * @return NO_ERROR if patch installed correctly, error code otherwise.
          */
         status_t createAudioPatchInternal(const struct audio_patch *patch,
                                           audio_patch_handle_t *handle,
-                                          uid_t uid, uint32_t delayMs = 0,
-                                          const sp<SourceClientDescriptor>& sourceDesc = nullptr);
+                                          uid_t uid, uint32_t delayMs,
+                                          const sp<SourceClientDescriptor>& sourceDesc);
         /**
          * @brief releaseAudioPatchInternal internal function to remove an audio patch
          * @param[in] handle of the patch to be removed
          * @param[in] delayMs if required
+         * @param[in] sourceDesc [optional] in case of external source, source client to be
+         * unrouted from the patch, i.e. assigning an Output (HW or SW)
          * @return NO_ERROR if patch removed correctly, error code otherwise.
          */
-        status_t releaseAudioPatchInternal(audio_patch_handle_t handle, uint32_t delayMs = 0);
+        status_t releaseAudioPatchInternal(audio_patch_handle_t handle,
+                                           uint32_t delayMs = 0,
+                                           const sp<SourceClientDescriptor>& sourceDesc = nullptr);
 
         status_t installPatch(const char *caller,
                 audio_patch_handle_t *patchHandle,
@@ -1180,6 +1210,21 @@
         // without duplicating them if already present
         void addPortProfilesToVector(sp<IOProfile> outputProfile,
                                     AudioProfileVector& audioProfilesVector);
+
+        // Searches for a compatible profile with the sample rate, audio format and channel mask
+        // in the list of passed HwModule(s).
+        // returns a compatible profile if found, nullptr otherwise
+        sp<IOProfile> searchCompatibleProfileHwModules (
+                                            const HwModuleCollection& hwModules,
+                                            const DeviceVector& devices,
+                                            uint32_t samplingRate,
+                                            audio_format_t format,
+                                            audio_channel_mask_t channelMask,
+                                            audio_output_flags_t flags,
+                                            bool directOnly);
+
+        // Filters only the relevant flags for getProfileForOutput
+        audio_output_flags_t getRelevantFlags (audio_output_flags_t flags, bool directOnly);
 };
 
 };
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 460eddb..c9cfbca 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -90,13 +90,13 @@
             if (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
                     && ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)) {
                 if (!callAudioInterceptionAllowed(attributionSource)) {
-                    ALOGE(("permission denied: modify audio routing not allowed "
-                           "for attributionSource %s"), attributionSource.toString().c_str());
+                    ALOGE("%s: call audio interception not allowed for attribution source: %s",
+                           __func__, attributionSource.toString().c_str());
                     return PERMISSION_DENIED;
                 }
             } else if (!modifyAudioRoutingAllowed(attributionSource)) {
-                ALOGE(("permission denied: modify audio routing not allowed "
-                       "for attributionSource %s"), attributionSource.toString().c_str());
+                ALOGE("%s: modify audio routing not allowed for attribution source: %s",
+                        __func__, attributionSource.toString().c_str());
                 return PERMISSION_DENIED;
             }
         } else {
@@ -485,6 +485,7 @@
     status_t status = mAudioPolicyManager->startOutput(portId);
     if (status == NO_ERROR) {
         client->active = true;
+        onUpdateActiveSpatializerTracks_l();
     }
     return binderStatusFromStatusT(status);
 }
@@ -522,6 +523,7 @@
     status_t status = mAudioPolicyManager->stopOutput(portId);
     if (status == NO_ERROR) {
         client->active = false;
+        onUpdateActiveSpatializerTracks_l();
     }
     return status;
 }
@@ -552,8 +554,10 @@
             client->io, client->stream, client->session);
     }
     Mutex::Autolock _l(mLock);
+    if (client != nullptr && client->active) {
+        onUpdateActiveSpatializerTracks_l();
+    }
     mAudioPlaybackClients.removeItem(portId);
-
     // called from internal thread: no need to clear caller identity
     mAudioPolicyManager->releaseOutput(portId);
 }
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 644d4be..c78a0b3 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -39,6 +39,7 @@
 #include <media/AidlConversion.h>
 #include <media/AudioEffect.h>
 #include <media/AudioParameter.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
 #include <sensorprivacy/SensorPrivacyManager.h>
@@ -60,6 +61,120 @@
 
 static const String16 sManageAudioPolicyPermission("android.permission.MANAGE_AUDIO_POLICY");
 
+// Creates an association between Binder code to name for IAudioPolicyService.
+#define IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(onNewAudioModulesAvailable) \
+BINDER_METHOD_ENTRY(setDeviceConnectionState) \
+BINDER_METHOD_ENTRY(getDeviceConnectionState) \
+BINDER_METHOD_ENTRY(handleDeviceConfigChange) \
+BINDER_METHOD_ENTRY(setPhoneState) \
+BINDER_METHOD_ENTRY(setForceUse) \
+BINDER_METHOD_ENTRY(getForceUse) \
+BINDER_METHOD_ENTRY(getOutput) \
+BINDER_METHOD_ENTRY(getOutputForAttr) \
+BINDER_METHOD_ENTRY(startOutput) \
+BINDER_METHOD_ENTRY(stopOutput) \
+BINDER_METHOD_ENTRY(releaseOutput) \
+BINDER_METHOD_ENTRY(getInputForAttr) \
+BINDER_METHOD_ENTRY(startInput) \
+BINDER_METHOD_ENTRY(stopInput) \
+BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(initStreamVolume) \
+BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
+BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
+BINDER_METHOD_ENTRY(setVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getMaxVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getMinVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getStrategyForStream) \
+BINDER_METHOD_ENTRY(getDevicesForAttributes) \
+BINDER_METHOD_ENTRY(getOutputForEffect) \
+BINDER_METHOD_ENTRY(registerEffect) \
+BINDER_METHOD_ENTRY(unregisterEffect) \
+BINDER_METHOD_ENTRY(setEffectEnabled) \
+BINDER_METHOD_ENTRY(moveEffectsToIo) \
+BINDER_METHOD_ENTRY(isStreamActive) \
+BINDER_METHOD_ENTRY(isStreamActiveRemotely) \
+BINDER_METHOD_ENTRY(isSourceActive) \
+BINDER_METHOD_ENTRY(queryDefaultPreProcessing) \
+BINDER_METHOD_ENTRY(addSourceDefaultEffect) \
+BINDER_METHOD_ENTRY(addStreamDefaultEffect) \
+BINDER_METHOD_ENTRY(removeSourceDefaultEffect) \
+BINDER_METHOD_ENTRY(removeStreamDefaultEffect) \
+BINDER_METHOD_ENTRY(setSupportedSystemUsages) \
+BINDER_METHOD_ENTRY(setAllowedCapturePolicy) \
+BINDER_METHOD_ENTRY(getOffloadSupport) \
+BINDER_METHOD_ENTRY(isDirectOutputSupported) \
+BINDER_METHOD_ENTRY(listAudioPorts) \
+BINDER_METHOD_ENTRY(getAudioPort) \
+BINDER_METHOD_ENTRY(createAudioPatch) \
+BINDER_METHOD_ENTRY(releaseAudioPatch) \
+BINDER_METHOD_ENTRY(listAudioPatches) \
+BINDER_METHOD_ENTRY(setAudioPortConfig) \
+BINDER_METHOD_ENTRY(registerClient) \
+BINDER_METHOD_ENTRY(setAudioPortCallbacksEnabled) \
+BINDER_METHOD_ENTRY(setAudioVolumeGroupCallbacksEnabled) \
+BINDER_METHOD_ENTRY(acquireSoundTriggerSession) \
+BINDER_METHOD_ENTRY(releaseSoundTriggerSession) \
+BINDER_METHOD_ENTRY(getPhoneState) \
+BINDER_METHOD_ENTRY(registerPolicyMixes) \
+BINDER_METHOD_ENTRY(setUidDeviceAffinities) \
+BINDER_METHOD_ENTRY(removeUidDeviceAffinities) \
+BINDER_METHOD_ENTRY(setUserIdDeviceAffinities) \
+BINDER_METHOD_ENTRY(removeUserIdDeviceAffinities) \
+BINDER_METHOD_ENTRY(startAudioSource) \
+BINDER_METHOD_ENTRY(stopAudioSource) \
+BINDER_METHOD_ENTRY(setMasterMono) \
+BINDER_METHOD_ENTRY(getMasterMono) \
+BINDER_METHOD_ENTRY(getStreamVolumeDB) \
+BINDER_METHOD_ENTRY(getSurroundFormats) \
+BINDER_METHOD_ENTRY(getReportedSurroundFormats) \
+BINDER_METHOD_ENTRY(getHwOffloadFormatsSupportedForBluetoothMedia) \
+BINDER_METHOD_ENTRY(setSurroundFormatEnabled) \
+BINDER_METHOD_ENTRY(setAssistantServicesUids) \
+BINDER_METHOD_ENTRY(setActiveAssistantServicesUids) \
+BINDER_METHOD_ENTRY(setA11yServicesUids) \
+BINDER_METHOD_ENTRY(setCurrentImeUid) \
+BINDER_METHOD_ENTRY(isHapticPlaybackSupported) \
+BINDER_METHOD_ENTRY(isUltrasoundSupported) \
+BINDER_METHOD_ENTRY(listAudioProductStrategies) \
+BINDER_METHOD_ENTRY(getProductStrategyFromAudioAttributes) \
+BINDER_METHOD_ENTRY(listAudioVolumeGroups) \
+BINDER_METHOD_ENTRY(getVolumeGroupFromAudioAttributes) \
+BINDER_METHOD_ENTRY(setRttEnabled) \
+BINDER_METHOD_ENTRY(isCallScreenModeSupported) \
+BINDER_METHOD_ENTRY(setDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(removeDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(getDevicesForRoleAndStrategy) \
+BINDER_METHOD_ENTRY(setDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(addDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(removeDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(clearDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(getDevicesForRoleAndCapturePreset) \
+BINDER_METHOD_ENTRY(registerSoundTriggerCaptureStateListener) \
+BINDER_METHOD_ENTRY(getSpatializer) \
+BINDER_METHOD_ENTRY(canBeSpatialized) \
+BINDER_METHOD_ENTRY(getDirectPlaybackSupport) \
+BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+
+// singleton for Binder Method Statistics for IAudioPolicyService
+static auto& getIAudioPolicyServiceStatistics() {
+    using Code = int;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+        {(Code)media::BnAudioPolicyService::TRANSACTION_##ENTRY, #ENTRY},
+
+    static mediautils::MethodStatistics<Code> methodStatistics{
+        IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
+        METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+    };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+    return methodStatistics;
+}
+
 // ----------------------------------------------------------------------------
 
 static AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface)
@@ -114,6 +229,13 @@
 
 void AudioPolicyService::onFirstRef()
 {
+    // Log an AudioPolicy "constructor" mediametrics event on first ref.
+    // This records the time it takes to load the audio modules and devices.
+    mediametrics::Defer defer([beginNs = systemTime()] {
+        mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_POLICY)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+            .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+            .record(); });
     {
         Mutex::Autolock _l(mLock);
 
@@ -397,6 +519,7 @@
             if (status == NO_ERROR && currentOutput == newOutput) {
                 return;
             }
+            size_t numActiveTracks = countActiveClientsOnOutput_l(newOutput);
             mLock.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
@@ -404,7 +527,7 @@
                 mLock.lock();
                 return;
             }
-            status = mSpatializer->attachOutput(newOutput);
+            status = mSpatializer->attachOutput(newOutput, numActiveTracks);
             mLock.lock();
             if (status != NO_ERROR) {
                 mAudioPolicyManager->releaseSpatializerOutput(newOutput);
@@ -421,6 +544,34 @@
     }
 }
 
+size_t AudioPolicyService::countActiveClientsOnOutput_l(audio_io_handle_t output) REQUIRES(mLock) {
+    size_t count = 0;
+    for (size_t i = 0; i < mAudioPlaybackClients.size(); i++) {
+        auto client = mAudioPlaybackClients.valueAt(i);
+        if (client->io == output && client->active) {
+            count++;
+        }
+    }
+    return count;
+}
+
+void AudioPolicyService::onUpdateActiveSpatializerTracks_l() {
+    if (mSpatializer == nullptr) {
+        return;
+    }
+    mOutputCommandThread->updateActiveSpatializerTracksCommand();
+}
+
+void AudioPolicyService::doOnUpdateActiveSpatializerTracks()
+{
+    Mutex::Autolock _l(mLock);
+    if (mSpatializer == nullptr) {
+        return;
+    }
+    mSpatializer->updateActiveTracks(countActiveClientsOnOutput_l(mSpatializer->getOutput()));
+}
+
+
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
                                                 audio_patch_handle_t *handle,
                                                 int delayMs)
@@ -1032,6 +1183,12 @@
         mPackageManager.dump(fd);
 
         dumpReleaseLock(mLock, locked);
+
+        {
+            std::string timeCheckStats = getIAudioPolicyServiceStatistics().dump();
+            dprintf(fd, "\nIAudioPolicyService binder call profile\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+        }
     }
     return NO_ERROR;
 }
@@ -1137,8 +1294,20 @@
             break;
     }
 
-    std::string tag("IAudioPolicyService command " + std::to_string(code));
-    TimeCheck check(tag.c_str());
+    const std::string methodName = getIAudioPolicyServiceStatistics().getMethodForCode(code);
+    mediautils::TimeCheck check(
+            std::string("IAudioPolicyService::").append(methodName),
+            [code, methodName](bool timeout, float elapsedMs) { // don't move methodName.
+        if (timeout) {
+            mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_POLICY)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT)
+                .set(AMEDIAMETRICS_PROP_METHODCODE, int64_t(code))
+                .set(AMEDIAMETRICS_PROP_METHODNAME, methodName.c_str())
+                .record();
+        } else {
+            getIAudioPolicyServiceStatistics().event(code, elapsedMs);
+        }
+    });
 
     switch (code) {
         case SHELL_COMMAND_TRANSACTION: {
@@ -1460,6 +1629,9 @@
     }
 }
 
+void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused) {
+}
+
 void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
     updateUid(&mOverrideUids, uid, active, ActivityManager::PROCESS_STATE_UNKNOWN, insert);
 }
@@ -1952,8 +2124,8 @@
                     mLock.lock();
                     } break;
 
-                case CHECK_SPATIALIZER: {
-                    ALOGV("AudioCommandThread() processing updateUID states");
+                case CHECK_SPATIALIZER_OUTPUT: {
+                    ALOGV("AudioCommandThread() processing check spatializer");
                     svc = mService.promote();
                     if (svc == 0) {
                         break;
@@ -1963,6 +2135,17 @@
                     mLock.lock();
                     } break;
 
+                case UPDATE_ACTIVE_SPATIALIZER_TRACKS: {
+                    ALOGV("AudioCommandThread() processing update spatializer tracks");
+                    svc = mService.promote();
+                    if (svc == 0) {
+                        break;
+                    }
+                    mLock.unlock();
+                    svc->doOnUpdateActiveSpatializerTracks();
+                    mLock.lock();
+                    } break;
+
                 default:
                     ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
                 }
@@ -2273,11 +2456,19 @@
 void AudioPolicyService::AudioCommandThread::checkSpatializerCommand()
 {
     sp<AudioCommand>command = new AudioCommand();
-    command->mCommand = CHECK_SPATIALIZER;
+    command->mCommand = CHECK_SPATIALIZER_OUTPUT;
     ALOGV("AudioCommandThread() adding check spatializer");
     sendCommand(command);
 }
 
+void AudioPolicyService::AudioCommandThread::updateActiveSpatializerTracksCommand()
+{
+    sp<AudioCommand>command = new AudioCommand();
+    command->mCommand = UPDATE_ACTIVE_SPATIALIZER_TRACKS;
+    ALOGV("AudioCommandThread() adding update active spatializer tracks");
+    sendCommand(command);
+}
+
 status_t AudioPolicyService::AudioCommandThread::sendCommand(sp<AudioCommand>& command, int delayMs)
 {
     {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index c6fd465..7a4b80a 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -350,9 +350,13 @@
      * by audio policy manager and attach/detach the spatializer effect accordingly.
      */
     void onCheckSpatializer() override;
-    void onCheckSpatializer_l();
+    void onCheckSpatializer_l() REQUIRES(mLock);
     void doOnCheckSpatializer();
 
+    void onUpdateActiveSpatializerTracks_l() REQUIRES(mLock);
+    void doOnUpdateActiveSpatializerTracks();
+
+
     void setEffectSuspended(int effectId,
                             audio_session_t sessionId,
                             bool suspended);
@@ -446,7 +450,8 @@
         void onUidGone(uid_t uid, bool disabled) override;
         void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
-                int32_t capability);
+                int32_t capability) override;
+        void onUidProcAdjChanged(uid_t uid) override;
 
         void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
         void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
@@ -524,7 +529,8 @@
             AUDIO_MODULES_UPDATE,
             ROUTING_UPDATED,
             UPDATE_UID_STATES,
-            CHECK_SPATIALIZER
+            CHECK_SPATIALIZER_OUTPUT, // verify if spatializer effect should be created or moved
+            UPDATE_ACTIVE_SPATIALIZER_TRACKS // Update active track counts on spalializer output
         };
 
         AudioCommandThread (String8 name, const wp<AudioPolicyService>& service);
@@ -574,6 +580,8 @@
                     void        routingChangedCommand();
                     void        updateUidStatesCommand();
                     void        checkSpatializerCommand();
+                    void        updateActiveSpatializerTracksCommand();
+
                     void        insertCommand_l(AudioCommand *command, int delayMs = 0);
     private:
         class AudioCommandData;
@@ -998,6 +1006,8 @@
     void loadAudioPolicyManager();
     void unloadAudioPolicyManager();
 
+    size_t countActiveClientsOnOutput_l(audio_io_handle_t output) REQUIRES(mLock);
+
     mutable Mutex mLock;    // prevents concurrent access to AudioPolicy manager functions changing
                             // device connection state  or routing
     // Note: lock acquisition order is always mLock > mEffectsLock:
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 54d9094..579b852 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -300,6 +300,7 @@
         if (levelChanged && mEngine != nullptr) {
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL, std::vector<SpatializationLevel>{level});
         }
+        checkSensorsState_l();
     }
 
     if (levelChanged) {
@@ -374,6 +375,7 @@
 
     if (mPoseController != nullptr) {
         mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
+        checkSensorsState_l();
     }
 
     return Status::ok();
@@ -447,9 +449,7 @@
     }
     std::lock_guard lock(mLock);
     mHeadSensor = sensorHandle;
-    if (mPoseController != nullptr) {
-        mPoseController->setHeadSensor(mHeadSensor);
-    }
+    checkSensorsState_l();
     return Status::ok();
 }
 
@@ -460,9 +460,7 @@
     }
     std::lock_guard lock(mLock);
     mScreenSensor = sensorHandle;
-    if (mPoseController != nullptr) {
-        mPoseController->setScreenSensor(mScreenSensor);
-    }
+    checkSensorsState_l();
     return Status::ok();
 }
 
@@ -557,7 +555,6 @@
     auto vec = headToStage.toVector();
     LOG_ALWAYS_FATAL_IF(vec.size() != sHeadPoseKeys.size(),
             "%s invalid head to stage vector size %zu", __func__, vec.size());
-
     sp<AMessage> msg =
             new AMessage(EngineCallbackHandler::kWhatOnHeadToStagePose, mHandler);
     for (size_t i = 0 ; i < sHeadPoseKeys.size(); i++) {
@@ -571,6 +568,9 @@
     sp<media::ISpatializerHeadTrackingCallback> callback;
     {
         std::lock_guard lock(mLock);
+        if (mActualHeadTrackingMode == SpatializerHeadTrackingMode::DISABLED) {
+            return;
+        }
         callback = mHeadTrackingCallback;
         if (mEngine != nullptr) {
             setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
@@ -621,7 +621,7 @@
     }
 }
 
-status_t Spatializer::attachOutput(audio_io_handle_t output) {
+status_t Spatializer::attachOutput(audio_io_handle_t output, size_t numActiveTracks) {
     std::shared_ptr<SpatializerPoseController> poseController;
     bool outputChanged = false;
     sp<media::INativeSpatializerCallback> callback;
@@ -634,6 +634,7 @@
             // remove FX instance
             mEngine->setEnabled(false);
             mEngine.clear();
+            mPoseController.reset();
         }
         // create FX instance on output
         AttributionSourceState attributionSource = AttributionSourceState();
@@ -663,8 +664,8 @@
                                 "%s could not allocate pose controller", __func__);
 
             mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
-            mPoseController->setHeadSensor(mHeadSensor);
-            mPoseController->setScreenSensor(mScreenSensor);
+            mNumActiveTracks = numActiveTracks;
+            checkSensorsState_l();
             mPoseController->setDisplayOrientation(mDisplayOrientation);
             poseController = mPoseController;
         }
@@ -697,7 +698,6 @@
         output = mOutput;
         mOutput = AUDIO_IO_HANDLE_NONE;
         mPoseController.reset();
-
         callback = mSpatializerCallback;
     }
 
@@ -707,6 +707,26 @@
     return output;
 }
 
+void Spatializer::updateActiveTracks(size_t numActiveTracks) {
+    std::lock_guard lock(mLock);
+    mNumActiveTracks = numActiveTracks;
+    checkSensorsState_l();
+}
+
+void Spatializer::checkSensorsState_l() {
+    if (mSupportsHeadTracking && mPoseController != nullptr) {
+        if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+            && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+            && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+            mPoseController->setHeadSensor(mHeadSensor);
+            mPoseController->setScreenSensor(mScreenSensor);
+        } else {
+            mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+            mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+        }
+    }
+}
+
 void Spatializer::calculateHeadPose() {
     ALOGV("%s", __func__);
     std::lock_guard lock(mLock);
@@ -723,11 +743,11 @@
     switch (event) {
         case AudioEffect::EVENT_FRAMES_PROCESSED: {
             int frames = info == nullptr ? 0 : *(int*)info;
-            ALOGD("%s frames processed %d for me %p", __func__, frames, me);
+            ALOGV("%s frames processed %d for me %p", __func__, frames, me);
             me->postFramesProcessedMsg(frames);
         } break;
         default:
-            ALOGD("%s event %d", __func__, event);
+            ALOGV("%s event %d", __func__, event);
             break;
     }
 }
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 4d77b78..1382124 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -135,7 +135,7 @@
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
-    status_t attachOutput(audio_io_handle_t output);
+    status_t attachOutput(audio_io_handle_t output, size_t numActiveTracks);
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is closed and the spatializer engine must be release.
      */
@@ -143,6 +143,8 @@
     /** Returns the output stream the spatializer is attached to. */
     audio_io_handle_t getOutput() const { std::lock_guard lock(mLock); return mOutput; }
 
+    void updateActiveTracks(size_t numActiveTracks);
+
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
     audio_config_base_t getAudioInConfig() const;
 
@@ -274,6 +276,13 @@
 
     void postFramesProcessedMsg(int frames);
 
+    /**
+     * Checks if head and screen sensors must be actively monitored based on
+     * spatializer state and playback activity and configures the pose controller
+     * accordingly.
+     */
+    void checkSensorsState_l() REQUIRES(mLock);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
@@ -328,6 +337,8 @@
     sp<ALooper> mLooper;
     sp<EngineCallbackHandler> mHandler;
 
+    size_t mNumActiveTracks GUARDED_BY(mLock) = 0;
+
     static const std::vector<const char *> sHeadPoseKeys;
 };
 
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 551f5e9..da42ab4 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -699,6 +699,158 @@
     ASSERT_EQ(countDirectProfilesPrimary, getDirectProfilesForAttributes(attr).size());
 }
 
+TEST_P(AudioPolicyManagerTestMsd, IsDirectPlaybackSupportedWithMsd) {
+    const audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+        AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    audio_config_base_t directConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    directConfig.format = AUDIO_FORMAT_DTS;
+    directConfig.sample_rate = 48000;
+    directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_base_t nonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    nonDirectConfig.sample_rate = 48000;
+    nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_base_t nonExistentConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
+    nonExistentConfig.sample_rate = 48000;
+    nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_base_t msdDirectConfig1 = AUDIO_CONFIG_BASE_INITIALIZER;
+    msdDirectConfig1.format = AUDIO_FORMAT_AC3;
+    msdDirectConfig1.sample_rate = 48000;
+    msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_base_t msdDirectConfig2 = AUDIO_CONFIG_BASE_INITIALIZER;
+    msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
+    msdDirectConfig2.sample_rate = 48000;
+    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_base_t msdNonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    msdNonDirectConfig.sample_rate = 96000;
+    msdNonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+    // before setting MSD patches the direct MSD configs return false
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+
+    DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    outputDevices.remove(mMsdOutputDevice);
+    mManager->setMsdOutputPatches(&outputDevices);
+
+    ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+    // after setting MSD patches the direct MSD configs return true
+    ASSERT_TRUE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+    ASSERT_TRUE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+
+    mManager->releaseMsdOutputPatches(outputDevices);
+
+    ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+    // AFTER releasing MSD patches the direct MSD configs return false
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+}
+
+TEST_P(AudioPolicyManagerTestMsd, GetDirectPlaybackSupportWithMsd) {
+    const audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+        AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    audio_config_t directConfig = AUDIO_CONFIG_INITIALIZER;
+    directConfig.format = AUDIO_FORMAT_DTS;
+    directConfig.sample_rate = 48000;
+    directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_t nonDirectConfig = AUDIO_CONFIG_INITIALIZER;
+    nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    nonDirectConfig.sample_rate = 48000;
+    nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_t nonExistentConfig = AUDIO_CONFIG_INITIALIZER;
+    nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
+    nonExistentConfig.sample_rate = 48000;
+    nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_t msdDirectConfig1 = AUDIO_CONFIG_INITIALIZER;
+    msdDirectConfig1.format = AUDIO_FORMAT_AC3;
+    msdDirectConfig1.sample_rate = 48000;
+    msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_t msdDirectConfig2 = AUDIO_CONFIG_INITIALIZER;
+    msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
+    msdDirectConfig2.sample_rate = 48000;
+    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_t msdNonDirectConfig = AUDIO_CONFIG_INITIALIZER;
+    msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    msdNonDirectConfig.sample_rate = 96000;
+    msdNonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &directConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+    // before setting MSD patches the direct MSD configs return AUDIO_DIRECT_NOT_SUPPORTED
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+
+    DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    outputDevices.remove(mMsdOutputDevice);
+    mManager->setMsdOutputPatches(&outputDevices);
+
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &directConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+    // after setting MSD patches the direct MSD configs return values according to their flags
+    ASSERT_EQ(AUDIO_DIRECT_OFFLOAD_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+
+    mManager->releaseMsdOutputPatches(outputDevices);
+
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &directConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+    // after releasing MSD patches the direct MSD configs return AUDIO_DIRECT_NOT_SUPPORTED
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+}
+
 class AudioPolicyManagerTestWithConfigurationFile : public AudioPolicyManagerTest {
 protected:
     void SetUpManagerConfig() override;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 69300be..bf7d0c2 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -42,6 +42,7 @@
 
     srcs: [
         "CameraService.cpp",
+        "CameraServiceWatchdog.cpp",
         "CameraFlashlight.cpp",
         "common/Camera2ClientBase.cpp",
         "common/CameraDeviceBase.cpp",
@@ -81,7 +82,6 @@
         "device3/Camera3OutputUtils.cpp",
         "device3/Camera3DeviceInjectionMethods.cpp",
         "device3/UHRCropAndMeteringRegionMapper.cpp",
-        "device3/PreviewFrameScheduler.cpp",
         "device3/hidl/HidlCamera3Device.cpp",
         "device3/hidl/HidlCamera3OfflineSession.cpp",
         "device3/hidl/HidlCamera3OutputUtils.cpp",
@@ -112,7 +112,6 @@
     ],
 
     shared_libs: [
-        "libandroid",
         "libbase",
         "libdl",
         "libexif",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 0ba1b28..e4fb815 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -2139,10 +2139,14 @@
                     id.string());
                 errorCode = ERROR_CAMERA_IN_USE;
                 break;
+            case -EINVAL:
+                msg = String8::format("Torch strength level %d is not within the "
+                        "valid range.", torchStrength);
+                errorCode = ERROR_ILLEGAL_ARGUMENT;
+                break;
             default:
                 msg = String8::format("Changing torch strength level failed.");
                 errorCode = ERROR_INVALID_OPERATION;
-
         }
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(errorCode, msg.string());
@@ -3666,7 +3670,8 @@
     status_t res = mAm.linkToDeath(this);
     mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
             | ActivityManager::UID_OBSERVER_IDLE
-            | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE,
+            | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
+            | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
             ActivityManager::PROCESS_STATE_UNKNOWN,
             String16("cameraserver"));
     if (res == OK) {
@@ -3715,9 +3720,9 @@
     bool procStateChange = false;
     {
         Mutex::Autolock _l(mUidLock);
-        if ((mMonitoredUids.find(uid) != mMonitoredUids.end()) &&
-                (mMonitoredUids[uid].first != procState)) {
-            mMonitoredUids[uid].first = procState;
+        if (mMonitoredUids.find(uid) != mMonitoredUids.end() &&
+                mMonitoredUids[uid].procState != procState) {
+            mMonitoredUids[uid].procState = procState;
             procStateChange = true;
         }
     }
@@ -3730,15 +3735,33 @@
     }
 }
 
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
+    bool procAdjChange = false;
+    {
+        Mutex::Autolock _l(mUidLock);
+        if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
+            procAdjChange = true;
+        }
+    }
+
+    if (procAdjChange) {
+        sp<CameraService> service = mService.promote();
+        if (service != nullptr) {
+            service->notifyMonitoredUids();
+        }
+    }
+}
+
 void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
-        it->second.second++;
+        it->second.refCount++;
     } else {
-        mMonitoredUids.emplace(
-                std::pair<uid_t, std::pair<int32_t, size_t>> (uid,
-                    std::pair<int32_t, size_t> (ActivityManager::PROCESS_STATE_NONEXISTENT, 1)));
+        MonitoredUid monitoredUid;
+        monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+        monitoredUid.refCount = 1;
+        mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
     }
 }
 
@@ -3746,8 +3769,8 @@
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
-        it->second.second--;
-        if (it->second.second == 0) {
+        it->second.refCount--;
+        if (it->second.refCount == 0) {
             mMonitoredUids.erase(it);
         }
     } else {
@@ -3825,7 +3848,7 @@
 int32_t CameraService::UidPolicy::getProcStateLocked(uid_t uid) {
     int32_t procState = ActivityManager::PROCESS_STATE_UNKNOWN;
     if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
-        procState = mMonitoredUids[uid].first;
+        procState = mMonitoredUids[uid].procState;
     }
     return procState;
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 6346f50..89a537d 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -698,11 +698,13 @@
         bool isUidActive(uid_t uid, String16 callingPackage);
         int32_t getProcState(uid_t uid);
 
-        void onUidGone(uid_t uid, bool disabled);
-        void onUidActive(uid_t uid);
-        void onUidIdle(uid_t uid, bool disabled);
+        // IUidObserver
+        void onUidGone(uid_t uid, bool disabled) override;
+        void onUidActive(uid_t uid) override;
+        void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
-                int32_t capability);
+                int32_t capability) override;
+        void onUidProcAdjChanged(uid_t uid) override;
 
         void addOverrideUid(uid_t uid, String16 callingPackage, bool active);
         void removeOverrideUid(uid_t uid, String16 callingPackage);
@@ -717,13 +719,18 @@
         int32_t getProcStateLocked(uid_t uid);
         void updateOverrideUid(uid_t uid, String16 callingPackage, bool active, bool insert);
 
+        struct MonitoredUid {
+            int32_t procState;
+            size_t refCount;
+        };
+
         Mutex mUidLock;
         bool mRegistered;
         ActivityManager mAm;
         wp<CameraService> mService;
         std::unordered_set<uid_t> mActiveUids;
-        // Monitored uid map to cached procState and refCount pair
-        std::unordered_map<uid_t, std::pair<int32_t, size_t>> mMonitoredUids;
+        // Monitored uid map
+        std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
         std::unordered_map<uid_t, bool> mOverrideUids;
     }; // class UidPolicy
 
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
new file mode 100644
index 0000000..fcd6ebe
--- /dev/null
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraServiceWatchdog"
+
+#include "CameraServiceWatchdog.h"
+
+namespace android {
+
+bool CameraServiceWatchdog::threadLoop()
+{
+    {
+        AutoMutex _l(mWatchdogLock);
+
+        while (mPause) {
+            mWatchdogCondition.wait(mWatchdogLock);
+        }
+    }
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(mCycleLengthMs));
+
+    {
+        AutoMutex _l(mWatchdogLock);
+
+        for (auto it = tidToCycleCounterMap.begin(); it != tidToCycleCounterMap.end(); it++) {
+            uint32_t currentThreadId = it->first;
+
+            tidToCycleCounterMap[currentThreadId]++;
+
+            if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
+                ALOGW("CameraServiceWatchdog triggering kill for pid: %d", getpid());
+                kill(getpid(), SIGKILL);
+            }
+        }
+    }
+
+    return true;
+}
+
+void CameraServiceWatchdog::requestExit()
+{
+    Thread::requestExit();
+
+    AutoMutex _l(mWatchdogLock);
+
+    tidToCycleCounterMap.clear();
+
+    if (mPause) {
+        mPause = false;
+        mWatchdogCondition.signal();
+    }
+}
+
+void CameraServiceWatchdog::stop(uint32_t tid)
+{
+    AutoMutex _l(mWatchdogLock);
+
+    tidToCycleCounterMap.erase(tid);
+
+    if (tidToCycleCounterMap.empty()) {
+        mPause = true;
+    }
+}
+
+void CameraServiceWatchdog::start(uint32_t tid)
+{
+    AutoMutex _l(mWatchdogLock);
+
+    tidToCycleCounterMap[tid] = 0;
+
+    if (mPause) {
+        mPause = false;
+        mWatchdogCondition.signal();
+    }
+}
+
+}   // namespace android
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
new file mode 100644
index 0000000..f4955e2
--- /dev/null
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The CameraService watchdog is used to help detect bad states in the
+ * Camera HAL. The threadloop uses cycle counters, assigned to each calling
+ * thread, to monitor the elapsing time and kills the process when the
+ * expected duration has exceeded.
+ * Notes on multi-threaded behaviors:
+ *    - The threadloop is blocked/paused when there are no calls being
+ *   monitored.
+ *   - The start and stop functions handle simultaneous call monitoring
+ *   and single call monitoring differently. See function documentation for
+ *   more details.
+ */
+
+#include <chrono>
+#include <thread>
+#include <time.h>
+#include <utils/Thread.h>
+#include <utils/Log.h>
+#include <unordered_map>
+
+// Used to wrap the call of interest in start and stop calls
+#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid())
+#define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
+        watchThread([&]() { return toMonitor;}, gettid(), cycles, cycleLength);
+
+// Default cycles and cycle length values used to calculate permitted elapsed time
+const static size_t   kMaxCycles     = 100;
+const static uint32_t kCycleLengthMs = 100;
+
+namespace android {
+
+class CameraServiceWatchdog : public Thread {
+
+public:
+    explicit CameraServiceWatchdog() : mPause(true), mMaxCycles(kMaxCycles),
+            mCycleLengthMs(kCycleLengthMs) {};
+
+    explicit CameraServiceWatchdog (size_t maxCycles, uint32_t cycleLengthMs) :
+            mPause(true), mMaxCycles(maxCycles), mCycleLengthMs(cycleLengthMs) {};
+
+    virtual ~CameraServiceWatchdog() {};
+
+    virtual void requestExit();
+
+    /** Used to wrap monitored calls in start and stop functions using custom timer values */
+    template<typename T>
+    auto watchThread(T func, uint32_t tid, uint32_t cycles, uint32_t cycleLength) {
+        auto res = NULL;
+
+        if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
+            // Create another instance of the watchdog to prevent disruption
+            // of timer for current monitored calls
+            sp<CameraServiceWatchdog> tempWatchdog =
+                    new CameraServiceWatchdog(cycles, cycleLength);
+            tempWatchdog->run("CameraServiceWatchdog");
+            res = tempWatchdog->watchThread(func, tid);
+            tempWatchdog->requestExit();
+            tempWatchdog.clear();
+        } else {
+            // If custom timer values are equivalent to set class timer values, use
+            // current thread
+            res = watchThread(func, tid);
+        }
+
+        return res;
+    }
+
+    /** Used to wrap monitored calls in start and stop functions using class timer values */
+    template<typename T>
+    auto watchThread(T func, uint32_t tid) {
+        auto res = NULL;
+
+        start(tid);
+        res = func();
+        stop(tid);
+
+        return res;
+    }
+
+private:
+
+    /**
+     * Start adds a cycle counter for the calling thread. When threadloop is blocked/paused,
+     * start() unblocks and starts the watchdog
+     */
+    void start(uint32_t tid);
+
+    /**
+     * If there are no calls left to be monitored, stop blocks/pauses threadloop
+     * otherwise stop() erases the cycle counter to end watchdog for the calling thread
+     */
+    void stop(uint32_t tid);
+
+    virtual bool    threadLoop();
+
+    Mutex           mWatchdogLock;        // Lock for condition variable
+    Condition       mWatchdogCondition;   // Condition variable for stop/start
+    bool            mPause;               // True if thread is currently paused
+    uint32_t        mMaxCycles;           // Max cycles
+    uint32_t        mCycleLengthMs;       // Length of time elapsed per cycle
+
+    std::unordered_map<uint32_t, uint32_t> tidToCycleCounterMap; // Thread Id to cycle counter map
+};
+
+}   // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index bcba80e..701206a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -864,7 +864,7 @@
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
     bool isMultiResolution = outputConfiguration.isMultiResolution();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
-    int streamUseCase = outputConfiguration.getStreamUseCase();
+    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int mirrorMode = outputConfiguration.getMirrorMode();
 
@@ -1260,7 +1260,7 @@
     }
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
-    int streamUseCase = outputConfiguration.getStreamUseCase();
+    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
     int mirrorMode = outputConfiguration.getMirrorMode();
@@ -1629,7 +1629,7 @@
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
-    int streamUseCase= outputConfiguration.getStreamUseCase();
+    int64_t streamUseCase= outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int mirrorMode = outputConfiguration.getMirrorMode();
     for (auto& bufferProducer : bufferProducers) {
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 10fa33f..9303fd2 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -287,7 +287,7 @@
     }
 }
 
-status_t CameraOfflineSessionClient::notifyActive() {
+status_t CameraOfflineSessionClient::notifyActive(float maxPreviewFps __unused) {
     return startCameraStreamingOps();
 }
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index ef1d2de..f2c42d8 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -94,7 +94,7 @@
     // NotificationListener API
     void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
     void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
-    status_t notifyActive() override;
+    status_t notifyActive(float maxPreviewFps) override;
     void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::vector<hardware::CameraStreamStats>& streamStats) override;
     void notifyAutoFocus(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 600bd28..d32b71c 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -117,6 +117,41 @@
     // Composite streams should behave accordingly.
     void enableErrorState();
 
+    // Utility class to lock and unlock a GraphicBuffer
+    class GraphicBufferLocker {
+    public:
+        GraphicBufferLocker(sp<GraphicBuffer> buffer) : _buffer(buffer) {}
+
+        status_t lockAsync(void** dstBuffer, int fenceFd) {
+            if (_buffer == nullptr) return BAD_VALUE;
+
+            status_t res = OK;
+            if (!_locked) {
+                status_t res =  _buffer->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN,
+                        dstBuffer, fenceFd);
+                if (res == OK) {
+                    _locked = true;
+                }
+            }
+            return res;
+        }
+
+        ~GraphicBufferLocker() {
+            if (_locked && _buffer != nullptr) {
+                auto res = _buffer->unlock();
+                if (res != OK) {
+                    ALOGE("%s: Error trying to unlock buffer: %s (%d)", __FUNCTION__,
+                            strerror(-res), res);
+                }
+            }
+        }
+
+    private:
+        sp<GraphicBuffer> _buffer;
+        bool _locked = false;
+    };
+
+
     wp<CameraDeviceBase>   mDevice;
     wp<camera3::StatusTracker> mStatusTracker;
     wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index a66a592..aa057c7 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -297,7 +297,8 @@
     }
 
     sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
-    res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, fenceFd);
+    GraphicBufferLocker gbLocker(gb);
+    res = gbLocker.lockAsync(&dstBuffer, fenceFd);
     if (res != OK) {
         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a73ffb9..6058429 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1130,7 +1130,8 @@
     // Copy the content of the file to memory.
     sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
     void* dstBuffer;
-    auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
+    GraphicBufferLocker gbLocker(gb);
+    auto res = gbLocker.lockAsync(&dstBuffer, inputFrame.fenceFd);
     if (res != OK) {
         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a29f3a6..6ed3c02 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -315,7 +315,7 @@
 }
 
 template <typename TClientBase>
-status_t Camera2ClientBase<TClientBase>::notifyActive() {
+status_t Camera2ClientBase<TClientBase>::notifyActive(float maxPreviewFps) {
     if (!mDeviceActive) {
         status_t res = TClientBase::startCameraStreamingOps();
         if (res != OK) {
@@ -323,7 +323,7 @@
                     TClientBase::mCameraIdStr.string(), res);
             return res;
         }
-        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr);
+        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
     }
     mDeviceActive = true;
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 182e6ef..6b90f5e 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -73,7 +73,8 @@
 
     virtual void          notifyError(int32_t errorCode,
                                       const CaptureResultExtras& resultExtras);
-    virtual status_t      notifyActive();  // Returns errors on app ops permission failures
+    // Returns errors on app ops permission failures
+    virtual status_t      notifyActive(float maxPreviewFps);
     virtual void          notifyIdle(int64_t requestCount, int64_t resultErrorCount,
                                      bool deviceError,
                                      const std::vector<hardware::CameraStreamStats>& streamStats);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 5883988..05edd6a 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -184,7 +184,7 @@
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
 
@@ -205,7 +205,7 @@
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
 
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 54e42a6..f39b92a 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -40,7 +40,8 @@
     // Required for API 1 and 2
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras &resultExtras) = 0;
-    virtual status_t notifyActive() = 0; // May return an error since it checks appops
+    // May return an error since it checks appops
+    virtual status_t notifyActive(float maxPreviewFps) = 0;
     virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
             const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index c337eda..4cc03f0 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -642,7 +642,7 @@
         removeRef(DeviceMode::CAMERA, id);
         ALOGE("%s: Transaction error opening a session for camera device %s: %s",
                 __FUNCTION__, id.c_str(), ret.getMessage());
-        return DEAD_OBJECT;
+        return AidlProviderInfo::mapToStatusT(ret);
     }
     return OK;
 }
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index b2a7fee..6f35e56 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2021 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -45,10 +45,32 @@
 using ICameraProvider = aidl::android::hardware::camera::provider::ICameraProvider;
 using StatusListener = CameraProviderManager::StatusListener;
 
+static status_t mapExceptionCodeToStatusT(binder_exception_t binderException) {
+    switch (binderException) {
+        case EX_NONE:
+            return OK;
+        case EX_ILLEGAL_ARGUMENT:
+        case EX_NULL_POINTER:
+        case EX_BAD_PARCELABLE:
+        case EX_ILLEGAL_STATE:
+            return BAD_VALUE;
+        case EX_UNSUPPORTED_OPERATION:
+            return INVALID_OPERATION;
+        case EX_TRANSACTION_FAILED:
+            return DEAD_OBJECT;
+        default:
+            return UNKNOWN_ERROR;
+    }
+}
+
 status_t AidlProviderInfo::mapToStatusT(const ndk::ScopedAStatus& s) {
     using Status = aidl::android::hardware::camera::common::Status;
+    auto exceptionCode = s.getExceptionCode();
+    if (exceptionCode != EX_SERVICE_SPECIFIC) {
+        return mapExceptionCodeToStatusT(exceptionCode);
+    }
     Status st = static_cast<Status>(s.getServiceSpecificError());
-    switch(st) {
+    switch (st) {
         case Status::OK:
             return OK;
         case Status::ILLEGAL_ARGUMENT:
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 688b6df..f5c6d6f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -115,6 +115,10 @@
 
 status_t Camera3Device::initializeCommonLocked() {
 
+    /** Start watchdog thread */
+    mCameraServiceWatchdog = new CameraServiceWatchdog();
+    mCameraServiceWatchdog->run("CameraServiceWatchdog");
+
     /** Start up status tracker thread */
     mStatusTracker = new StatusTracker(this);
     status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
@@ -316,7 +320,7 @@
 
         // Call close without internal mutex held, as the HAL close may need to
         // wait on assorted callbacks,etc, to complete before it can return.
-        interface->close();
+        mCameraServiceWatchdog->WATCH(interface->close());
 
         flushInflightRequests();
 
@@ -339,6 +343,12 @@
         }
     }
     ALOGI("%s: X", __FUNCTION__);
+
+    if (mCameraServiceWatchdog != NULL) {
+        mCameraServiceWatchdog->requestExit();
+        mCameraServiceWatchdog.clear();
+    }
+
     return res;
 }
 
@@ -978,7 +988,7 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
-            uint64_t consumerUsage, int64_t dynamicRangeProfile, int streamUseCase,
+            uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
             int timestampBase, int mirrorMode) {
     ATRACE_CALL();
 
@@ -1013,8 +1023,8 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
         const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
-        uint64_t consumerUsage, int64_t dynamicRangeProfile, int streamUseCase, int timestampBase,
-        int mirrorMode) {
+        uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
+        int timestampBase, int mirrorMode) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
@@ -1022,7 +1032,8 @@
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
             " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
-            " dynamicRangeProfile %" PRIx64 ", streamUseCase %d, timestampBase %d, mirrorMode %d",
+            " dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
+            " mirrorMode %d",
             mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
             consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
             dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
@@ -1718,7 +1729,12 @@
         mSessionStatsBuilder.stopCounter();
     }
 
-    return mRequestThread->flush();
+    // Calculate expected duration for flush with additional buffer time in ms for watchdog
+    uint64_t maxExpectedDuration = (getExpectedInFlightDuration() + kBaseGetBufferWait) / 1e6;
+    status_t res = mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(mRequestThread->flush(),
+            maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
+
+    return res;
 }
 
 status_t Camera3Device::prepare(int streamId) {
@@ -1787,6 +1803,20 @@
     return OK;
 }
 
+float Camera3Device::getMaxPreviewFps(sp<camera3::Camera3OutputStreamInterface> stream) {
+    camera_metadata_entry minDurations =
+            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+    for (size_t i = 0; i < minDurations.count; i += 4) {
+        if (minDurations.data.i64[i] == stream->getFormat()
+                && minDurations.data.i64[i+1] == stream->getWidth()
+                && minDurations.data.i64[i+2] == stream->getHeight()) {
+            int64_t minFrameDuration = minDurations.data.i64[i+3];
+            return 1e9f / minFrameDuration;
+        }
+    }
+    return 0.0f;
+}
+
 /**
  * Methods called by subclasses
  */
@@ -1795,6 +1825,7 @@
     ATRACE_CALL();
     std::vector<int> streamIds;
     std::vector<hardware::CameraStreamStats> streamStats;
+    float sessionMaxPreviewFps = 0.0f;
 
     {
         // Need mLock to safely update state and synchronize to current
@@ -1814,21 +1845,25 @@
         // state changes
         if (mPauseStateNotify) return;
 
-        // Populate stream statistics in case of Idle
-        if (idle) {
-            for (size_t i = 0; i < mOutputStreams.size(); i++) {
-                auto stream = mOutputStreams[i];
-                if (stream.get() == nullptr) continue;
+        for (size_t i = 0; i < mOutputStreams.size(); i++) {
+            auto stream = mOutputStreams[i];
+            if (stream.get() == nullptr) continue;
+
+            float streamMaxPreviewFps = getMaxPreviewFps(stream);
+            sessionMaxPreviewFps = std::max(sessionMaxPreviewFps, streamMaxPreviewFps);
+
+            // Populate stream statistics in case of Idle
+            if (idle) {
                 streamIds.push_back(stream->getId());
                 Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
                 int64_t usage = 0LL;
-                int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+                int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
                 if (camera3Stream != nullptr) {
                     usage = camera3Stream->getUsage();
                     streamUseCase = camera3Stream->getStreamUseCase();
                 }
                 streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
-                    stream->getFormat(), stream->getDataSpace(), usage,
+                    stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
                     stream->getMaxHalBuffers(),
                     stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
                     stream->getDynamicRangeProfile(), streamUseCase);
@@ -1869,7 +1904,7 @@
             }
             listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
         } else {
-            res = listener->notifyActive();
+            res = listener->notifyActive(sessionMaxPreviewFps);
         }
     }
     if (res != OK) {
@@ -2629,7 +2664,7 @@
 
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-        bool hasAppCallback, nsecs_t maxExpectedDuration,
+        bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
         const std::set<std::set<String8>>& physicalCameraIds,
         bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
         const std::set<std::string>& cameraIdsWithZoom,
@@ -2639,8 +2674,9 @@
 
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
-            hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
-            rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs, outputSurfaces));
+            hasAppCallback, minExpectedDuration, maxExpectedDuration, physicalCameraIds,
+            isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
+            outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -3196,13 +3232,16 @@
     return true;
 }
 
-nsecs_t Camera3Device::RequestThread::calculateMaxExpectedDuration(const camera_metadata_t *request) {
-    nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
+std::pair<nsecs_t, nsecs_t> Camera3Device::RequestThread::calculateExpectedDurationRange(
+        const camera_metadata_t *request) {
+    std::pair<nsecs_t, nsecs_t> expectedRange(
+            InFlightRequest::kDefaultMinExpectedDuration,
+            InFlightRequest::kDefaultMaxExpectedDuration);
     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
     find_camera_metadata_ro_entry(request,
             ANDROID_CONTROL_AE_MODE,
             &e);
-    if (e.count == 0) return maxExpectedDuration;
+    if (e.count == 0) return expectedRange;
 
     switch (e.data.u8[0]) {
         case ANDROID_CONTROL_AE_MODE_OFF:
@@ -3210,13 +3249,15 @@
                     ANDROID_SENSOR_EXPOSURE_TIME,
                     &e);
             if (e.count > 0) {
-                maxExpectedDuration = e.data.i64[0];
+                expectedRange.first = e.data.i64[0];
+                expectedRange.second = expectedRange.first;
             }
             find_camera_metadata_ro_entry(request,
                     ANDROID_SENSOR_FRAME_DURATION,
                     &e);
             if (e.count > 0) {
-                maxExpectedDuration = std::max(e.data.i64[0], maxExpectedDuration);
+                expectedRange.first = std::max(e.data.i64[0], expectedRange.first);
+                expectedRange.second = expectedRange.first;
             }
             break;
         default:
@@ -3224,12 +3265,13 @@
                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
                     &e);
             if (e.count > 1) {
-                maxExpectedDuration = 1e9 / e.data.u8[0];
+                expectedRange.first = 1e9 / e.data.i32[1];
+                expectedRange.second = 1e9 / e.data.i32[0];
             }
             break;
     }
 
-    return maxExpectedDuration;
+    return expectedRange;
 }
 
 bool Camera3Device::RequestThread::skipHFRTargetFPSUpdate(int32_t tag,
@@ -3844,11 +3886,13 @@
                 isZslCapture = true;
             }
         }
+        auto expectedDurationRange = calculateExpectedDurationRange(settings);
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
                 /*hasInput*/halRequest->input_buffer != NULL,
                 hasCallback,
-                calculateMaxExpectedDuration(settings),
+                /*min*/expectedDurationRange.first,
+                /*max*/expectedDurationRange.second,
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
                 captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
                 (mUseHalBufManager) ? uniqueSurfaceIdMap :
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 453ac3a..0fd0a2d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -32,6 +32,7 @@
 #include <camera/CaptureResult.h>
 
 #include "android/hardware/camera/metadata/3.8/types.h"
+#include "CameraServiceWatchdog.h"
 #include "common/CameraDeviceBase.h"
 #include "device3/BufferUtils.h"
 #include "device3/StatusTracker.h"
@@ -98,6 +99,9 @@
 
     metadata_vendor_id_t getVendorTagId() const override { return mVendorTagId; }
 
+    // Watchdog thread
+    sp<CameraServiceWatchdog> mCameraServiceWatchdog;
+
     // Transitions to idle state on success.
     virtual status_t initialize(sp<CameraProviderManager> /*manager*/,
             const String8& /*monitorTags*/) = 0;
@@ -141,7 +145,7 @@
             uint64_t consumerUsage = 0,
             int64_t dynamicRangeProfile =
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
 
@@ -156,7 +160,7 @@
             uint64_t consumerUsage = 0,
             int64_t dynamicRangeProfile =
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
 
@@ -294,13 +298,14 @@
     status_t disconnectImpl();
     static status_t removeFwkOnlyRegionKeys(CameraMetadata *request);
 
+    float getMaxPreviewFps(sp<camera3::Camera3OutputStreamInterface> stream);
+
     static const size_t        kDumpLockAttempts  = 10;
     static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
     static const nsecs_t       kActiveTimeout     = 500000000;  // 500 ms
     static const nsecs_t       kMinWarnInflightDuration = 5000000000; // 5 s
     static const size_t        kInFlightWarnLimit = 30;
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
-    static const nsecs_t       kDefaultExpectedDuration = 100000000; // 100 ms
     static const nsecs_t       kMinInflightDuration = 5000000000; // 5 s
     static const nsecs_t       kBaseGetBufferWait = 3000000000; // 3 sec.
     // SCHED_FIFO priority for request submission thread in HFR mode
@@ -954,8 +959,9 @@
         // send request in mNextRequests to HAL in a batch. Return true = sucssess
         bool sendRequestsBatch();
 
-        // Calculate the expected maximum duration for a request
-        nsecs_t calculateMaxExpectedDuration(const camera_metadata_t *request);
+        // Calculate the expected (minimum, maximum) duration range for a request
+        std::pair<nsecs_t, nsecs_t> calculateExpectedDurationRange(
+                const camera_metadata_t *request);
 
         // Check and update latest session parameters based on the current request settings.
         bool updateSessionParameters(const CameraMetadata& settings);
@@ -1070,7 +1076,7 @@
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-            bool callback, nsecs_t maxExpectedDuration,
+            bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
             const std::set<std::set<String8>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
@@ -1321,6 +1327,9 @@
     // performance class.
     bool mOverrideForPerfClass;
 
+    // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
+    nsecs_t mMinExpectedDuration = 0;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 48e44dc..8cecabd 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -100,6 +100,7 @@
 
     virtual status_t setBatchSize(size_t batchSize) override;
 
+    virtual void onMinDurationChanged(nsecs_t /*duration*/) {}
   protected:
 
     /**
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 2497c22..b5d0746 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,7 +34,7 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int streamUseCase,
+        int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
         bool deviceTimeBaseIsRealtime, int timestampBase) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
@@ -91,7 +91,7 @@
     }
     lines.appendFormat("      Dynamic Range Profile: 0x%" PRIx64,
             camera_stream::dynamic_range_profile);
-    lines.appendFormat("      Stream use case: %d\n", camera_stream::use_case);
+    lines.appendFormat("      Stream use case: %" PRId64 "\n", camera_stream::use_case);
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
     lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index e757ec6..f389d53 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -39,7 +39,7 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
 
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 0f7d145..a799719 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -246,6 +246,9 @@
     // For client methods such as disconnect/dump
     std::mutex mInterfaceLock;
 
+    // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
+    nsecs_t mMinExpectedDuration = 0;
+
     // SetErrorInterface
     void setErrorState(const char *fmt, ...) override;
     void setErrorStateLocked(const char *fmt, ...) override;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 8ae16e5..b822178 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -47,7 +47,7 @@
         nsecs_t timestampOffset, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-        int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
         int mirrorMode) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
@@ -80,7 +80,7 @@
         nsecs_t timestampOffset, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-        int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
         int mirrorMode) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
@@ -118,7 +118,7 @@
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-        int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
         int mirrorMode) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
@@ -163,7 +163,7 @@
                                          const std::unordered_set<int32_t> &sensorPixelModesUsed,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
                                          int setId, bool isMultiResolution,
-                                         int64_t dynamicRangeProfile, int streamUseCase,
+                                         int64_t dynamicRangeProfile, int64_t streamUseCase,
                                          bool deviceTimeBaseIsRealtime, int timestampBase,
                                          int mirrorMode) :
         Camera3IOStreamBase(id, type, width, height,
@@ -376,32 +376,26 @@
             dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
         }
 
-        nsecs_t t = mPreviewFrameScheduler != nullptr ? readoutTimestamp : timestamp;
-        t -= mTimestampOffset;
-        if (mPreviewFrameScheduler != nullptr) {
-            res = mPreviewFrameScheduler->queuePreviewBuffer(t, transform,
-                    anwBuffer, anwReleaseFence);
-            if (res != OK) {
-                ALOGE("%s: Stream %d: Error queuing buffer to preview buffer scheduler: %s (%d)",
-                        __FUNCTION__, mId, strerror(-res), res);
-                return res;
-            }
-        } else {
-            setTransform(transform, true/*mayChangeMirror*/);
-            res = native_window_set_buffers_timestamp(mConsumer.get(), t);
-            if (res != OK) {
-                ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
-                      __FUNCTION__, mId, strerror(-res), res);
-                return res;
-            }
+        nsecs_t captureTime = (mSyncToDisplay ? readoutTimestamp : timestamp) - mTimestampOffset;
+        nsecs_t presentTime = mSyncToDisplay ?
+                syncTimestampToDisplayLocked(captureTime) : captureTime;
+        mLastCaptureTime = captureTime;
+        mLastPresentTime = presentTime;
 
-            queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
+        setTransform(transform, true/*mayChangeMirror*/);
+        res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
+        if (res != OK) {
+            ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+                  __FUNCTION__, mId, strerror(-res), res);
+            return res;
+        }
 
-            res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
-            if (shouldLogError(res, state)) {
-                ALOGE("%s: Stream %d: Error queueing buffer to native window:"
-                      " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
-            }
+        queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
+
+        res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
+        if (shouldLogError(res, state)) {
+            ALOGE("%s: Stream %d: Error queueing buffer to native window:"
+                  " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
         }
     }
     mLock.lock();
@@ -476,7 +470,7 @@
         return res;
     }
 
-    if ((res = configureConsumerQueueLocked(true /*allowPreviewScheduler*/)) != OK) {
+    if ((res = configureConsumerQueueLocked(true /*allowDisplaySync*/)) != OK) {
         return res;
     }
 
@@ -500,7 +494,7 @@
     return OK;
 }
 
-status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewScheduler) {
+status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowDisplaySync) {
     status_t res;
 
     mTraceFirstBuffer = true;
@@ -590,16 +584,17 @@
     int timestampBase = getTimestampBase();
     bool isDefaultTimeBase = (timestampBase ==
             OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
-    if (allowPreviewScheduler)  {
+    if (allowDisplaySync)  {
         // We cannot distinguish between a SurfaceView and an ImageReader of
-        // preview buffer format. The PreviewFrameScheduler needs to handle both.
+        // preview buffer format. Frames are synchronized to display in both
+        // cases.
         bool forceChoreographer = (timestampBase ==
                 OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
         bool defaultToChoreographer = (isDefaultTimeBase && isConsumedByHWComposer() &&
                 !property_get_bool("camera.disable_preview_scheduler", false));
         if (forceChoreographer || defaultToChoreographer) {
-            mPreviewFrameScheduler = std::make_unique<PreviewFrameScheduler>(*this, mConsumer);
-            mTotalBufferCount += PreviewFrameScheduler::kQueueDepthWatermark;
+            mSyncToDisplay = true;
+            mTotalBufferCount += kDisplaySyncExtraBuffer;
         }
     }
 
@@ -1244,6 +1239,11 @@
     return OK;
 }
 
+void Camera3OutputStream::onMinDurationChanged(nsecs_t duration) {
+    Mutex::Autolock l(mLock);
+    mMinExpectedDuration = duration;
+}
+
 void Camera3OutputStream::returnPrefetchedBuffersLocked() {
     std::vector<Surface::BatchBuffer> batchedBuffers;
 
@@ -1261,9 +1261,52 @@
     }
 }
 
-bool Camera3OutputStream::shouldLogError(status_t res) {
-    Mutex::Autolock l(mLock);
-    return shouldLogError(res, mState);
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+    ParcelableVsyncEventData parcelableVsyncEventData;
+    auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
+                __FUNCTION__, mId, strerror(-res), res);
+        return t;
+    }
+
+    const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
+    nsecs_t currentTime = systemTime();
+
+    // Reset capture to present time offset if more than 1 second
+    // between frames.
+    if (t - mLastCaptureTime > kSpacingResetIntervalNs) {
+        for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+            if (vsyncEventData.frameTimelines[i].deadlineTimestamp >= currentTime) {
+                mCaptureToPresentOffset =
+                    vsyncEventData.frameTimelines[i].expectedPresentationTime - t;
+                break;
+            }
+        }
+    }
+
+    nsecs_t idealPresentT = t + mCaptureToPresentOffset;
+    nsecs_t expectedPresentT = 0;
+    nsecs_t minDiff = INT64_MAX;
+    // Derive minimum intervals between presentation times based on minimal
+    // expected duration.
+    size_t minVsyncs = (mMinExpectedDuration + vsyncEventData.frameInterval - 1) /
+            vsyncEventData.frameInterval - 1;
+    nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval + kTimelineThresholdNs;
+    // Find best timestamp in the vsync timeline:
+    // - closest to the ideal present time,
+    // - deadline timestamp is greater than the current time, and
+    // - the candidate present time is at least minInterval in the future
+    //   compared to last present time.
+    for (const auto& vsyncTime : vsyncEventData.frameTimelines) {
+        if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
+                vsyncTime.deadlineTimestamp >= currentTime &&
+                vsyncTime.expectedPresentationTime > mLastPresentTime + minInterval) {
+            expectedPresentT = vsyncTime.expectedPresentationTime;
+            minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
+        }
+    }
+    return expectedPresentT;
 }
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index e777e85..6ea7ef7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -21,13 +21,13 @@
 #include <utils/RefBase.h>
 #include <gui/IProducerListener.h>
 #include <gui/Surface.h>
+#include <gui/DisplayEventReceiver.h>
 
 #include "utils/LatencyHistogram.h"
 #include "Camera3Stream.h"
 #include "Camera3IOStreamBase.h"
 #include "Camera3OutputStreamInterface.h"
 #include "Camera3BufferManager.h"
-#include "PreviewFrameScheduler.h"
 
 namespace android {
 
@@ -91,7 +91,7 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -108,7 +108,7 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -124,7 +124,7 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -240,12 +240,16 @@
     virtual status_t setBatchSize(size_t batchSize = 1) override;
 
     /**
+     * Notify the stream on change of min frame durations.
+     */
+    virtual void onMinDurationChanged(nsecs_t duration) override;
+
+    /**
      * Apply ZSL related consumer usage quirk.
      */
     static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);
 
     void setImageDumpMask(int mask) { mImageDumpMask = mask; }
-    bool shouldLogError(status_t res);
 
   protected:
     Camera3OutputStream(int id, camera_stream_type_t type,
@@ -256,7 +260,7 @@
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -278,7 +282,7 @@
 
     status_t getEndpointUsageForSurface(uint64_t *usage,
             const sp<Surface>& surface) const;
-    status_t configureConsumerQueueLocked(bool allowPreviewScheduler);
+    status_t configureConsumerQueueLocked(bool allowDisplaySync);
 
     // Consumer as the output of camera HAL
     sp<Surface> mConsumer;
@@ -392,13 +396,24 @@
 
     void returnPrefetchedBuffersLocked();
 
+    // Synchronize camera timestamp to display, and the return value
+    // can be used as presentation timestamp
+    nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+
     static const int32_t kDequeueLatencyBinSize = 5; // in ms
     CameraLatencyHistogram mDequeueBufferLatency;
 
     int mImageDumpMask = 0;
 
-    // The preview stream scheduler for re-timing frames
-    std::unique_ptr<PreviewFrameScheduler> mPreviewFrameScheduler;
+    nsecs_t mMinExpectedDuration = 0;
+    bool mSyncToDisplay = false;
+    DisplayEventReceiver mDisplayEventReceiver;
+    nsecs_t mLastCaptureTime = 0;
+    nsecs_t mLastPresentTime = 0;
+    nsecs_t mCaptureToPresentOffset = 0;
+    static constexpr size_t kDisplaySyncExtraBuffer = 2;
+    static constexpr nsecs_t kSpacingResetIntervalNs = 1000000000LL; // 1 second
+    static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
 }; // class Camera3OutputStream
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index e44e795..a6d4b96 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -108,6 +108,14 @@
      * instead.
      */
     virtual status_t setBatchSize(size_t batchSize = 1) = 0;
+
+    /**
+     * Notify the output stream that the minimum frame duration has changed.
+     *
+     * The minimum frame duration is calculated based on the upper bound of
+     * AE_TARGET_FPS_RANGE in the capture request.
+     */
+    virtual void onMinDurationChanged(nsecs_t duration) = 0;
 };
 
 // Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index ab25322..ed66df0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -853,6 +853,13 @@
 
             r.shutterTimestamp = msg.timestamp;
             r.shutterReadoutTimestamp = msg.readout_timestamp;
+            if (r.minExpectedDuration != states.minFrameDuration) {
+                for (size_t i = 0; i < states.outputStreams.size(); i++) {
+                    auto outputStream = states.outputStreams[i];
+                    outputStream->onMinDurationChanged(r.minExpectedDuration);
+                }
+                states.minFrameDuration = r.minExpectedDuration;
+            }
             if (r.hasCallback) {
                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
                     states.cameraId.string(), __FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 4d1eb75..dd01408 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -107,6 +107,7 @@
         InflightRequestUpdateInterface& inflightIntf;
         BufferRecordsInterface& bufferRecordsIntf;
         bool legacyClient;
+        nsecs_t& minFrameDuration;
     };
 
     void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 198e32f..d24b527 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -34,7 +34,7 @@
         nsecs_t timestampOffset, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool useHalBufManager, int64_t dynamicProfile,
-        int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
         int mirrorMode) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
@@ -251,7 +251,7 @@
         return res;
     }
 
-    res = configureConsumerQueueLocked(false/*allowPreviewScheduler*/);
+    res = configureConsumerQueueLocked(false/*allowDisplaySync*/);
     if (res != OK) {
         ALOGE("Failed to configureConsumerQueueLocked: %s(%d)", strerror(-res), res);
         return res;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 9be0c86..8f7f00b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -42,7 +42,7 @@
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             bool useHalBufManager = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
-            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 396b316..7ad6649 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,7 @@
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-        int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -179,7 +179,7 @@
     return camera_stream::max_buffers;
 }
 
-int Camera3Stream::getStreamUseCase() const {
+int64_t Camera3Stream::getStreamUseCase() const {
     return camera_stream::use_case;
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index d1545cc..d429e6c 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -178,7 +178,7 @@
     android_dataspace getOriginalDataSpace() const;
     int               getMaxHalBuffers() const;
     const String8&    physicalCameraId() const;
-    int               getStreamUseCase() const;
+    int64_t           getStreamUseCase() const;
     int               getTimestampBase() const;
     bool              isDeviceTimeBaseRealtime() const;
 
@@ -509,7 +509,7 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-            int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+            int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 77c6483..5c333a4 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -66,7 +66,7 @@
 
     std::unordered_set<int32_t> sensor_pixel_modes_used;
     int64_t dynamic_range_profile;
-    int use_case;
+    int64_t use_case;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -111,7 +111,7 @@
         bool supportsOffline = false;
         std::unordered_set<int32_t> sensorPixelModesUsed;
         int64_t dynamicRangeProfile;
-        int streamUseCase;
+        int64_t streamUseCase;
         int timestampBase;
         int mirrorMode;
         OutputStreamInfo() :
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 0c97f3e..493a9e2 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -143,6 +143,11 @@
     // is not for constrained high speed recording, this flag will also be true.
     bool hasCallback;
 
+    // Minimum expected frame duration for this request
+    // For manual captures, equal to the max of requested exposure time and frame duration
+    // For auto-exposure modes, equal to 1/(higher end of target FPS range)
+    nsecs_t minExpectedDuration;
+
     // Maximum expected frame duration for this request.
     // For manual captures, equal to the max of requested exposure time and frame duration
     // For auto-exposure modes, equal to 1/(lower end of target FPS range)
@@ -187,8 +192,8 @@
     // Current output transformation
     int32_t transform;
 
-    // TODO: dedupe
-    static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
+    static const nsecs_t kDefaultMinExpectedDuration = 33333333; // 33 ms
+    static const nsecs_t kDefaultMaxExpectedDuration = 100000000; // 100 ms
 
     // Default constructor needed by KeyedVector
     InFlightRequest() :
@@ -199,7 +204,8 @@
             numBuffersLeft(0),
             hasInputBuffer(false),
             hasCallback(true),
-            maxExpectedDuration(kDefaultExpectedDuration),
+            minExpectedDuration(kDefaultMinExpectedDuration),
+            maxExpectedDuration(kDefaultMaxExpectedDuration),
             skipResultMetadata(false),
             errorBufStrategy(ERROR_BUF_CACHE),
             stillCapture(false),
@@ -210,7 +216,7 @@
     }
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
-            bool hasAppCallback, nsecs_t maxDuration,
+            bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration,
             const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
             nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
@@ -222,6 +228,7 @@
             resultExtras(extras),
             hasInputBuffer(hasInput),
             hasCallback(hasAppCallback),
+            minExpectedDuration(minDuration),
             maxExpectedDuration(maxDuration),
             skipResultMetadata(false),
             errorBufStrategy(ERROR_BUF_CACHE),
diff --git a/services/camera/libcameraservice/device3/PreviewFrameScheduler.cpp b/services/camera/libcameraservice/device3/PreviewFrameScheduler.cpp
deleted file mode 100644
index 80f27ed..0000000
--- a/services/camera/libcameraservice/device3/PreviewFrameScheduler.cpp
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera3-PreviewFrameScheduler"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-
-#include <android/looper.h>
-#include "PreviewFrameScheduler.h"
-#include "Camera3OutputStream.h"
-
-namespace android {
-
-namespace camera3 {
-
-/**
- * Internal Choreographer thread implementation for polling and handling callbacks
- */
-
-// Callback function for Choreographer
-static void frameCallback(const AChoreographerFrameCallbackData* callbackData, void* data) {
-    PreviewFrameScheduler* parent = static_cast<PreviewFrameScheduler*>(data);
-    if (parent == nullptr) {
-        ALOGE("%s: Invalid data for Choreographer callback!", __FUNCTION__);
-        return;
-    }
-
-    size_t length = AChoreographerFrameCallbackData_getFrameTimelinesLength(callbackData);
-    std::vector<nsecs_t> timeline(length);
-    for (size_t i = 0; i < length; i++) {
-        nsecs_t timestamp = AChoreographerFrameCallbackData_getFrameTimelineExpectedPresentationTimeNanos(
-                callbackData, i);
-        timeline[i] = timestamp;
-    }
-
-    parent->onNewPresentationTime(timeline);
-
-    AChoreographer_postVsyncCallback(AChoreographer_getInstance(), frameCallback, data);
-}
-
-struct ChoreographerThread : public Thread {
-    ChoreographerThread();
-    status_t start(PreviewFrameScheduler* parent);
-    virtual status_t readyToRun() override;
-    virtual bool threadLoop() override;
-
-protected:
-    virtual ~ChoreographerThread() {}
-
-private:
-    ChoreographerThread &operator=(const ChoreographerThread &);
-
-    // This only impacts the shutdown time. It won't impact the choreographer
-    // callback frequency.
-    static constexpr nsecs_t kPollingTimeoutMs = 5;
-    PreviewFrameScheduler* mParent = nullptr;
-};
-
-ChoreographerThread::ChoreographerThread() : Thread(false /*canCallJava*/) {
-}
-
-status_t ChoreographerThread::start(PreviewFrameScheduler* parent) {
-    mParent = parent;
-    return run("PreviewChoreographer");
-}
-
-status_t ChoreographerThread::readyToRun() {
-    ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
-    if (AChoreographer_getInstance() == NULL) {
-        return NO_INIT;
-    }
-
-    AChoreographer_postVsyncCallback(
-            AChoreographer_getInstance(), frameCallback, mParent);
-    return OK;
-}
-
-bool ChoreographerThread::threadLoop() {
-    if (exitPending()) {
-        return false;
-    }
-    ALooper_pollOnce(kPollingTimeoutMs, nullptr, nullptr, nullptr);
-    return true;
-}
-
-/**
- * PreviewFrameScheduler implementation
- */
-
-PreviewFrameScheduler::PreviewFrameScheduler(Camera3OutputStream& parent, sp<Surface> consumer) :
-        mParent(parent),
-        mConsumer(consumer),
-        mChoreographerThread(new ChoreographerThread()) {
-}
-
-PreviewFrameScheduler::~PreviewFrameScheduler() {
-    {
-        Mutex::Autolock l(mLock);
-        mChoreographerThread->requestExit();
-    }
-    mChoreographerThread->join();
-}
-
-status_t PreviewFrameScheduler::queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
-        ANativeWindowBuffer* anwBuffer, int releaseFence) {
-    // Start choreographer thread if it's not already running.
-    if (!mChoreographerThread->isRunning()) {
-        status_t res = mChoreographerThread->start(this);
-        if (res != OK) {
-            ALOGE("%s: Failed to init choreographer thread!", __FUNCTION__);
-            return res;
-        }
-    }
-
-    {
-        Mutex::Autolock l(mLock);
-        mPendingBuffers.emplace(timestamp, transform, anwBuffer, releaseFence);
-
-        // Queue buffer to client right away if pending buffers are more than
-        // the queue depth watermark.
-        if (mPendingBuffers.size() > kQueueDepthWatermark) {
-            auto oldBuffer = mPendingBuffers.front();
-            mPendingBuffers.pop();
-
-            status_t res = queueBufferToClientLocked(oldBuffer, oldBuffer.timestamp);
-            if (res != OK) {
-                return res;
-            }
-
-            // Reset the last capture and presentation time
-            mLastCameraCaptureTime = 0;
-            mLastCameraPresentTime = 0;
-        } else {
-            ATRACE_INT(kPendingBufferTraceName, mPendingBuffers.size());
-        }
-    }
-    return OK;
-}
-
-void PreviewFrameScheduler::onNewPresentationTime(const std::vector<nsecs_t>& timeline) {
-    ATRACE_CALL();
-    Mutex::Autolock l(mLock);
-    if (mPendingBuffers.size() > 0) {
-        auto nextBuffer = mPendingBuffers.front();
-        mPendingBuffers.pop();
-
-        // Find the best presentation time by finding the element in the
-        // choreographer timeline that's closest to the ideal presentation time.
-        // The ideal presentation time is the last presentation time + frame
-        // interval.
-        nsecs_t cameraInterval = nextBuffer.timestamp - mLastCameraCaptureTime;
-        nsecs_t idealPresentTime = (cameraInterval < kSpacingResetIntervalNs) ?
-                (mLastCameraPresentTime + cameraInterval) : nextBuffer.timestamp;
-        nsecs_t presentTime = *std::min_element(timeline.begin(), timeline.end(),
-                [idealPresentTime](nsecs_t p1, nsecs_t p2) {
-                        return std::abs(p1 - idealPresentTime) < std::abs(p2 - idealPresentTime);
-                });
-
-        status_t res = queueBufferToClientLocked(nextBuffer, presentTime);
-        ATRACE_INT(kPendingBufferTraceName, mPendingBuffers.size());
-
-        if (mParent.shouldLogError(res)) {
-            ALOGE("%s: Preview Stream: Error queueing buffer to native window:"
-                    " %s (%d)", __FUNCTION__, strerror(-res), res);
-        }
-
-        mLastCameraCaptureTime = nextBuffer.timestamp;
-        mLastCameraPresentTime = presentTime;
-    }
-}
-
-status_t PreviewFrameScheduler::queueBufferToClientLocked(
-        const BufferHolder& bufferHolder, nsecs_t timestamp) {
-    mParent.setTransform(bufferHolder.transform, true/*mayChangeMirror*/);
-
-    status_t res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
-    if (res != OK) {
-        ALOGE("%s: Preview Stream: Error setting timestamp: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return res;
-    }
-
-    Camera3Stream::queueHDRMetadata(bufferHolder.anwBuffer.get()->handle, mConsumer,
-            mParent.getDynamicRangeProfile());
-
-    res = mConsumer->queueBuffer(mConsumer.get(), bufferHolder.anwBuffer.get(),
-            bufferHolder.releaseFence);
-    if (res != OK) {
-        close(bufferHolder.releaseFence);
-    }
-
-    return res;
-}
-
-}; // namespace camera3
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device3/PreviewFrameScheduler.h b/services/camera/libcameraservice/device3/PreviewFrameScheduler.h
deleted file mode 100644
index c0574fd..0000000
--- a/services/camera/libcameraservice/device3/PreviewFrameScheduler.h
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_PREVIEWFRAMESCHEDULER_H
-#define ANDROID_SERVERS_CAMERA_CAMERA3_PREVIEWFRAMESCHEDULER_H
-
-#include <queue>
-
-#include <android/choreographer.h>
-#include <gui/Surface.h>
-#include <gui/ISurfaceComposer.h>
-#include <utils/Condition.h>
-#include <utils/Mutex.h>
-#include <utils/Looper.h>
-#include <utils/Thread.h>
-#include <utils/Timers.h>
-
-namespace android {
-
-namespace camera3 {
-
-class Camera3OutputStream;
-struct ChoreographerThread;
-
-/***
- * Preview stream scheduler for better preview display synchronization
- *
- * The ideal viewfinder user experience is that frames are presented to the
- * user in the same cadence as outputed by the camera sensor. However, the
- * processing latency between frames could vary, due to factors such
- * as CPU load, differences in request settings, etc. This frame processing
- * latency results in variation in presentation of frames to the user.
- *
- * The PreviewFrameScheduler improves the viewfinder user experience by:
- * 1. Cache preview buffers in the scheduler
- * 2. For each choreographer callback, queue the oldest cached buffer with
- *    the best matching presentation timestamp. Frame N's presentation timestamp
- *    is the choreographer timeline timestamp closest to (Frame N-1's
- *    presentation time + camera capture interval between frame N-1 and frame N).
- * 3. Maintain at most 2 queue-able buffers. If the 3rd preview buffer becomes
- *    available, queue the oldest cached buffer to the buffer queue.
- */
-class PreviewFrameScheduler {
-  public:
-    explicit PreviewFrameScheduler(Camera3OutputStream& parent, sp<Surface> consumer);
-    virtual ~PreviewFrameScheduler();
-
-    // Queue preview buffer locally
-    status_t queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
-            ANativeWindowBuffer* anwBuffer, int releaseFence);
-
-    // Callback function with a new presentation timeline from choreographer. This
-    // will trigger a locally queued buffer be sent to the buffer queue.
-    void onNewPresentationTime(const std::vector<nsecs_t>& presentationTimeline);
-
-    // Maintain at most 2 queue-able buffers
-    static constexpr int32_t kQueueDepthWatermark = 2;
-
-  private:
-    // structure holding cached preview buffer info
-    struct BufferHolder {
-        nsecs_t timestamp;
-        int32_t transform;
-        sp<ANativeWindowBuffer> anwBuffer;
-        int releaseFence;
-
-        BufferHolder(nsecs_t t, int32_t tr, ANativeWindowBuffer* anwb, int rf) :
-                timestamp(t), transform(tr), anwBuffer(anwb), releaseFence(rf) {}
-    };
-
-    status_t queueBufferToClientLocked(const BufferHolder& bufferHolder,
-            nsecs_t presentTime);
-
-    static constexpr char kPendingBufferTraceName[] = "pending_preview_buffers";
-
-    // Camera capture interval for resetting frame spacing between preview sessions
-    static constexpr nsecs_t kSpacingResetIntervalNs = 1000000000L; // 1 second
-
-    Camera3OutputStream& mParent;
-    sp<ANativeWindow> mConsumer;
-    mutable Mutex mLock;
-
-    std::queue<BufferHolder> mPendingBuffers;
-    nsecs_t mLastCameraCaptureTime = 0;
-    nsecs_t mLastCameraPresentTime = 0;
-
-    // Choreographer related
-    sp<Looper> mLooper;
-    sp<ChoreographerThread> mChoreographerThread;
-};
-
-}; //namespace camera3
-}; //namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 529c9f0..f05520f 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -189,7 +189,6 @@
         return res;
     }
     if (session == nullptr) {
-      ALOGE("JCLog: null session returned");
       SET_ERR("Session iface returned is null");
       return INVALID_OPERATION;
     }
@@ -372,7 +371,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient}, mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -413,7 +412,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient}, mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 895ce56..336719d 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -123,7 +123,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -168,7 +168,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index ad4a480..33de2c5 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -75,7 +75,7 @@
     // See explanation for why we need a separate class for this in
     // AidlCamera3Device::AidlCameraDeviceCallbacks in AidlCamera3Device.h
     class AidlCameraDeviceCallbacks :
-            virtual public aidl::android::hardware::camera::device::BnCameraDeviceCallback {
+            public aidl::android::hardware::camera::device::BnCameraDeviceCallback {
       public:
 
         AidlCameraDeviceCallbacks(wp<AidlCamera3OfflineSession> parent) : mParent(parent)  { }
@@ -112,7 +112,9 @@
                     offlineSession) :
       Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
               offlineReqs, offlineStates),
-      mSession(offlineSession) { mCallbacks = std::make_shared<AidlCameraDeviceCallbacks>(this);};
+      mSession(offlineSession) {
+        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+      };
 
     /**
      * End of CameraOfflineSessionBase interface
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 4894ba9..cf6d462 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -369,7 +369,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
     };
 
     //HidlCaptureOutputStates hidlStates {
@@ -431,7 +431,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -483,7 +483,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -972,7 +972,7 @@
         }
         if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT &&
                 mHidlSession_3_8 == nullptr) {
-            ALOGE("%s: Camera device doesn't support non-default stream use case %d!",
+            ALOGE("%s: Camera device doesn't support non-default stream use case %" PRId64 "!",
                     __FUNCTION__, src->use_case);
             return BAD_VALUE;
         }
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index d517c8d..5c97f0e 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -105,7 +105,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -145,7 +145,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -180,7 +180,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
index 1563dcf..60e4e42 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
@@ -219,6 +219,7 @@
         tmpBuffers[i].buffer = convertToHidl(aBuf.buffer, handlesCreated);
         tmpBuffers[i].acquireFence = convertToHidl(aBuf.acquireFence, handlesCreated);
         tmpBuffers[i].releaseFence = convertToHidl(aBuf.releaseFence, handlesCreated);
+        i++;
     }
     hBuffersVal.buffers(std::move(tmpBuffers));
 }
diff --git a/services/camera/libcameraservice/tests/PreviewSchedulerTest.cpp b/services/camera/libcameraservice/tests/PreviewSchedulerTest.cpp
deleted file mode 100644
index 025521a..0000000
--- a/services/camera/libcameraservice/tests/PreviewSchedulerTest.cpp
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "PreviewSchedulerTest"
-
-#include <chrono>
-#include <thread>
-#include <utility>
-
-#include <gtest/gtest.h>
-#include <utils/Errors.h>
-#include <utils/Log.h>
-#include <utils/Mutex.h>
-
-#include <gui/BufferItemConsumer.h>
-#include <gui/BufferQueue.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/IGraphicBufferConsumer.h>
-#include <gui/Surface.h>
-
-#include "../device3/Camera3OutputStream.h"
-#include "../device3/PreviewFrameScheduler.h"
-
-using namespace android;
-using namespace android::camera3;
-
-// Consumer buffer available listener
-class SimpleListener : public BufferItemConsumer::FrameAvailableListener {
-public:
-    SimpleListener(size_t frameCount): mFrameCount(frameCount) {}
-
-    void waitForFrames() {
-        Mutex::Autolock lock(mMutex);
-        while (mFrameCount > 0) {
-            mCondition.wait(mMutex);
-        }
-    }
-
-    void onFrameAvailable(const BufferItem& /*item*/) override {
-        Mutex::Autolock lock(mMutex);
-        if (mFrameCount > 0) {
-            mFrameCount--;
-            mCondition.signal();
-        }
-    }
-
-    void reset(size_t frameCount) {
-        Mutex::Autolock lock(mMutex);
-        mFrameCount = frameCount;
-    }
-private:
-    size_t mFrameCount;
-    Mutex mMutex;
-    Condition mCondition;
-};
-
-// Test the PreviewFrameScheduler functionatliy of re-timing buffers
-TEST(PreviewSchedulerTest, BasicPreviewSchedulerTest) {
-    const int ID = 0;
-    const int FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    const uint32_t WIDTH = 640;
-    const uint32_t HEIGHT = 480;
-    const int32_t TRANSFORM = 0;
-    const nsecs_t T_OFFSET = 0;
-    const android_dataspace DATASPACE = HAL_DATASPACE_UNKNOWN;
-    const camera_stream_rotation_t ROTATION = CAMERA_STREAM_ROTATION_0;
-    const String8 PHY_ID;
-    const std::unordered_set<int32_t> PIX_MODES;
-    const int BUFFER_COUNT = 4;
-    const int TOTAL_BUFFER_COUNT = BUFFER_COUNT * 2;
-
-    // Create buffer queue
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-    ASSERT_NE(producer, nullptr);
-    ASSERT_NE(consumer, nullptr);
-    ASSERT_EQ(NO_ERROR, consumer->setDefaultBufferSize(WIDTH, HEIGHT));
-
-    // Set up consumer
-    sp<BufferItemConsumer> bufferConsumer = new BufferItemConsumer(consumer,
-            GRALLOC_USAGE_HW_COMPOSER, BUFFER_COUNT);
-    ASSERT_NE(bufferConsumer, nullptr);
-    sp<SimpleListener> consumerListener = new SimpleListener(BUFFER_COUNT);
-    bufferConsumer->setFrameAvailableListener(consumerListener);
-
-    // Set up producer
-    sp<Surface> surface = new Surface(producer);
-    sp<StubProducerListener> listener = new StubProducerListener();
-    ASSERT_EQ(NO_ERROR, surface->connect(NATIVE_WINDOW_API_CPU, listener));
-    sp<ANativeWindow> anw(surface);
-    ASSERT_EQ(NO_ERROR, native_window_set_buffer_count(anw.get(), TOTAL_BUFFER_COUNT));
-
-    // Create Camera3OutputStream and PreviewFrameScheduler
-    sp<Camera3OutputStream> stream = new Camera3OutputStream(ID, surface, WIDTH, HEIGHT,
-            FORMAT, DATASPACE, ROTATION, T_OFFSET, PHY_ID, PIX_MODES);
-    ASSERT_NE(stream, nullptr);
-    std::unique_ptr<PreviewFrameScheduler> scheduler =
-            std::make_unique<PreviewFrameScheduler>(*stream, surface);
-    ASSERT_NE(scheduler, nullptr);
-
-    // The pair of nsecs_t: camera timestamp delta (negative means in the past) and frame interval
-    const std::pair<nsecs_t, nsecs_t> inputTimestamps[][BUFFER_COUNT] = {
-        // 30fps, 33ms interval
-        {{-100000000LL, 33333333LL}, {-66666667LL, 33333333LL},
-          {-33333333LL, 33333333LL}, {0, 0}},
-        // 30fps, variable interval
-        {{-100000000LL, 16666667LL}, {-66666667LL, 33333333LL},
-          {-33333333LL, 50000000LL}, {0, 0}},
-        // 60fps, 16.7ms interval
-        {{-50000000LL, 16666667LL}, {-33333333LL, 16666667LL},
-          {-16666667LL, 16666667LL}, {0, 0}},
-        // 60fps, variable interval
-        {{-50000000LL, 8666667LL}, {-33333333LL, 19666667LL},
-          {-16666667LL, 20666667LL}, {0, 0}},
-    };
-
-    // Go through different use cases, and check the buffer timestamp
-    size_t iterations = sizeof(inputTimestamps)/sizeof(inputTimestamps[0]);
-    for (size_t i = 0; i < iterations; i++) {
-        // Space out different test sets to reset the frame scheduler
-        nsecs_t timeBase = systemTime() - s2ns(1) * (iterations - i);
-        nsecs_t lastQueueTime = 0;
-        nsecs_t duration = 0;
-        for (size_t j = 0; j < BUFFER_COUNT; j++) {
-            ANativeWindowBuffer* buffer = nullptr;
-            int fenceFd;
-            ASSERT_EQ(NO_ERROR, anw->dequeueBuffer(anw.get(), &buffer, &fenceFd));
-
-            // Sleep to space out queuePreviewBuffer
-            nsecs_t currentTime = systemTime();
-            if (duration > 0 && duration > currentTime - lastQueueTime) {
-                std::this_thread::sleep_for(
-                        std::chrono::nanoseconds(duration + lastQueueTime - currentTime));
-            }
-            nsecs_t timestamp = timeBase + inputTimestamps[i][j].first;
-            ASSERT_EQ(NO_ERROR,
-                    scheduler->queuePreviewBuffer(timestamp, TRANSFORM, buffer, fenceFd));
-
-            lastQueueTime = systemTime();
-            duration = inputTimestamps[i][j].second;
-        }
-
-        // Collect output timestamps, making sure they are either set by
-        // producer, or set by the scheduler.
-        consumerListener->waitForFrames();
-        nsecs_t outputTimestamps[BUFFER_COUNT];
-        for (size_t j = 0; j < BUFFER_COUNT; j++) {
-            BufferItem bufferItem;
-            ASSERT_EQ(NO_ERROR, bufferConsumer->acquireBuffer(&bufferItem, 0/*presentWhen*/));
-
-            outputTimestamps[j] = bufferItem.mTimestamp;
-            ALOGV("%s: [%zu][%zu]: input: %" PRId64 ", output: %" PRId64, __FUNCTION__,
-                  i, j, timeBase + inputTimestamps[i][j].first, bufferItem.mTimestamp);
-            ASSERT_GT(bufferItem.mTimestamp, inputTimestamps[i][j].first);
-
-            ASSERT_EQ(NO_ERROR, bufferConsumer->releaseBuffer(bufferItem));
-        }
-
-        // Check the output timestamp intervals are aligned with input intervals
-        const nsecs_t SHIFT_THRESHOLD = ms2ns(2);
-        for (size_t j = 0; j < BUFFER_COUNT - 1; j ++) {
-            nsecs_t interval_shift = outputTimestamps[j+1] - outputTimestamps[j] -
-                    (inputTimestamps[i][j+1].first - inputTimestamps[i][j].first);
-            ASSERT_LE(std::abs(interval_shift), SHIFT_THRESHOLD);
-        }
-
-        consumerListener->reset(BUFFER_COUNT);
-    }
-
-    // Disconnect the surface
-    ASSERT_EQ(NO_ERROR, surface->disconnect(NATIVE_WINDOW_API_CPU));
-}
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 8699543..82d58e0 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -66,10 +66,11 @@
     }
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive() {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
+    mSessionStats.mMaxPreviewFps = maxPreviewFps;
     updateProxyDeviceState(mSessionStats);
 
     // Reset mCreationDuration to -1 to distinguish between 1st session
@@ -158,7 +159,7 @@
     sessionStats->onStreamConfigured(operatingMode, internalConfig, latencyMs);
 }
 
-void CameraServiceProxyWrapper::logActive(const String8& id) {
+void CameraServiceProxyWrapper::logActive(const String8& id, float maxPreviewFps) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -171,7 +172,7 @@
     }
 
     ALOGV("%s: id %s", __FUNCTION__, id.c_str());
-    sessionStats->onActive();
+    sessionStats->onActive(maxPreviewFps);
 }
 
 void CameraServiceProxyWrapper::logIdle(const String8& id,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index f701e94..037316d 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -48,7 +48,7 @@
         void onOpen();
         void onClose(int32_t latencyMs);
         void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
-        void onActive();
+        void onActive(float maxPreviewFps);
         void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
     };
@@ -81,7 +81,7 @@
             int32_t latencyMs);
 
     // Session state becomes active
-    static void logActive(const String8& id);
+    static void logActive(const String8& id, float maxPreviewFps);
 
     // Session state becomes idle
     static void logIdle(const String8& id,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index af00e81..4090dae 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -313,7 +313,7 @@
     }
 }
 
-bool isStreamUseCaseSupported(int streamUseCase,
+bool isStreamUseCaseSupported(int64_t streamUseCase,
         const CameraMetadata &deviceInfo) {
     camera_metadata_ro_entry_t availableStreamUseCases =
             deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
@@ -324,7 +324,7 @@
     }
 
     for (size_t i = 0; i < availableStreamUseCases.count; i++) {
-        if (availableStreamUseCases.data.i32[i] == streamUseCase) {
+        if (availableStreamUseCases.data.i64[i] == streamUseCase) {
             return true;
         }
     }
@@ -336,7 +336,7 @@
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
-        int streamUseCase, int timestampBase, int mirrorMode) {
+        int64_t streamUseCase, int timestampBase, int mirrorMode) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -452,7 +452,7 @@
     }
     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
             physicalCameraMetadata)) {
-        String8 msg = String8::format("Camera %s: stream use case %d not supported,"
+        String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
                 " failed to create output stream", logicalCameraId.string(), streamUseCase);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
@@ -718,7 +718,7 @@
             return res;
         }
 
-        int streamUseCase = it.getStreamUseCase();
+        int64_t streamUseCase = it.getStreamUseCase();
         int timestampBase = it.getTimestampBase();
         int mirrorMode = it.getMirrorMode();
         if (deferredConsumer) {
@@ -1040,7 +1040,7 @@
             // image
             return false;
         }
-        if (static_cast<int32_t>(streamConfigV38.streams[i].useCase) !=
+        if (static_cast<int64_t>(streamConfigV38.streams[i].useCase) !=
                 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
             // ICameraDevice older than 3.8 doesn't support stream use case
             return false;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 406510f..8abcc95 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -99,7 +99,7 @@
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
-        int streamUseCase, int timestampBase, int mirrorMode);
+        int64_t streamUseCase, int timestampBase, int mirrorMode);
 
 void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
         camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
@@ -114,7 +114,7 @@
 // Check if the device supports a given dynamicRangeProfile
 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
 
-bool isStreamUseCaseSupported(int streamUseCase, const CameraMetadata &deviceInfo);
+bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
 
 void mapStreamInfo(const OutputStreamInfo &streamInfo,
         camera3::camera_stream_rotation_t rotation, String8 physicalId,
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index aacc2be..99e3691 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -297,33 +297,35 @@
     ALOGD("%s", __func__);
 
     // Add action to save AnalyticsState if audioserver is restarted.
-    // This triggers on an item of "audio.flinger"
-    // with a property "event" set to "AudioFlinger" (the constructor).
+    // This triggers on AudioFlinger or AudioPolicy ctors and onFirstRef,
+    // as well as TimeCheck events.
     mActions.addAction(
         AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
         std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
         std::make_shared<AnalyticsActions::Function>(
             [this](const std::shared_ptr<const android::mediametrics::Item> &item){
-                ALOGW("(key=%s) Audioflinger constructor event detected", item->getKey().c_str());
-                mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
-                        *mAnalyticsState.get()));
-                // Note: get returns shared_ptr temp, whose lifetime is extended
-                // to end of full expression.
-                mAnalyticsState->clear();  // TODO: filter the analytics state.
-                // Perhaps report this.
-
-                // Set up a timer to expire the previous audio state to save space.
-                // Use the transaction log size as a cookie to see if it is the
-                // same as before.  A benign race is possible where a state is cleared early.
-                const size_t size = mPreviousAnalyticsState->transactionLog().size();
-                mTimedAction.postIn(
-                        std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
-                    if (mPreviousAnalyticsState->transactionLog().size() == size) {
-                        ALOGD("expiring previous audio state after %d seconds.",
-                                PREVIOUS_STATE_EXPIRE_SEC);
-                        mPreviousAnalyticsState->clear();  // removes data from the state.
-                    }
-                });
+                mHealth.onAudioServerStart(Health::Module::AUDIOFLINGER, item);
+            }));
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_POLICY "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mHealth.onAudioServerStart(Health::Module::AUDIOPOLICY, item);
+            }));
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mHealth.onAudioServerTimeout(Health::Module::AUDIOFLINGER, item);
+            }));
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_POLICY "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mHealth.onAudioServerTimeout(Health::Module::AUDIOPOLICY, item);
             }));
 
     // Handle legacy aaudio playback stream statistics
@@ -1390,4 +1392,138 @@
     }
 }
 
+// Create new state, typically occurs after an AudioFlinger ctor event.
+void AudioAnalytics::newState()
+{
+    mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
+            *mAnalyticsState.get()));
+    // Note: get returns shared_ptr temp, whose lifetime is extended
+    // to end of full expression.
+    mAnalyticsState->clear();  // TODO: filter the analytics state.
+    // Perhaps report this.
+
+    // Set up a timer to expire the previous audio state to save space.
+    // Use the transaction log size as a cookie to see if it is the
+    // same as before.  A benign race is possible where a state is cleared early.
+    const size_t size = mPreviousAnalyticsState->transactionLog().size();
+    mTimedAction.postIn(
+            std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
+        if (mPreviousAnalyticsState->transactionLog().size() == size) {
+            ALOGD("expiring previous audio state after %d seconds.",
+                    PREVIOUS_STATE_EXPIRE_SEC);
+            mPreviousAnalyticsState->clear();  // removes data from the state.
+        }
+    });
+}
+
+void AudioAnalytics::Health::onAudioServerStart(Module module,
+        const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+    const auto nowTime = std::chrono::system_clock::now();
+    if (module == Module::AUDIOFLINGER) {
+       {
+            std::lock_guard lg(mLock);
+            // reset state on AudioFlinger construction.
+            // AudioPolicy is created after AudioFlinger.
+            mAudioFlingerCtorTime = nowTime;
+            mSimpleLog.log("AudioFlinger ctor");
+        }
+        mAudioAnalytics.newState();
+        return;
+    }
+    if (module == Module::AUDIOPOLICY) {
+        // A start event occurs when audioserver
+        //
+        // (1) Starts the first time
+        // (2) Restarts because of the TimeCheck watchdog
+        // (3) Restarts not because of the TimeCheck watchdog.
+        int64_t executionTimeNs = 0;
+        (void)item->get(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, &executionTimeNs);
+        const float loadTimeMs = executionTimeNs * 1e-6f;
+        std::lock_guard lg(mLock);
+        const int64_t restarts = mStartCount;
+        if (mStopCount == mStartCount) {
+            mAudioPolicyCtorTime = nowTime;
+            ++mStartCount;
+            if (mStopCount == 0) {
+                // (1) First time initialization.
+                ALOGW("%s: (key=%s) AudioPolicy ctor, loadTimeMs:%f",
+                        __func__, item->getKey().c_str(), loadTimeMs);
+                mSimpleLog.log("AudioPolicy ctor, loadTimeMs:%f", loadTimeMs);
+            } else {
+                // (2) Previous failure caught due to TimeCheck.  We know how long restart takes.
+                const float restartMs =
+                        std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                                mAudioFlingerCtorTime - mStopTime).count();
+                ALOGW("%s: (key=%s) AudioPolicy ctor, "
+                        "restarts:%lld restartMs:%f loadTimeMs:%f",
+                        __func__, item->getKey().c_str(),
+                        (long long)restarts, restartMs, loadTimeMs);
+                mSimpleLog.log("AudioPolicy ctor restarts:%lld restartMs:%f loadTimeMs:%f",
+                        (long long)restarts, restartMs, loadTimeMs);
+            }
+        } else {
+            // (3) Previous failure is NOT due to TimeCheck, so we don't know the restart time.
+            // However we can estimate the uptime from the delta time from previous ctor.
+            const float uptimeMs =
+                    std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                            nowTime - mAudioFlingerCtorTime).count();
+            mStopCount = mStartCount;
+            mAudioPolicyCtorTime = nowTime;
+            ++mStartCount;
+
+            ALOGW("%s: (key=%s) AudioPolicy ctor after uncaught failure, "
+                    "mStartCount:%lld mStopCount:%lld uptimeMs:%f loadTimeMs:%f",
+                    __func__, item->getKey().c_str(),
+                    (long long)mStartCount, (long long)mStopCount, uptimeMs, loadTimeMs);
+            mSimpleLog.log("AudioPolicy ctor after uncaught failure, "
+                    "restarts:%lld uptimeMs:%f loadTimeMs:%f",
+                    (long long)restarts, uptimeMs, loadTimeMs);
+        }
+    }
+}
+
+void AudioAnalytics::Health::onAudioServerTimeout(Module module,
+        const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+    std::string moduleName = getModuleName(module);
+    int64_t methodCode{};
+    std::string methodName;
+    (void)item->get(AMEDIAMETRICS_PROP_METHODCODE, &methodCode);
+    (void)item->get(AMEDIAMETRICS_PROP_METHODNAME, &methodName);
+
+    std::lock_guard lg(mLock);
+    if (mStopCount >= mStartCount) {
+        ALOGD("%s: (key=%s) %s timeout %s(%lld) "
+            "unmatched mStopCount(%lld) >= mStartCount(%lld), ignoring",
+            __func__, item->getKey().c_str(), moduleName.c_str(),
+            methodName.c_str(), (long long)methodCode,
+            (long long)mStopCount, (long long)mStartCount);
+        return;
+    }
+
+    const int64_t restarts = mStartCount - 1;
+    ++mStopCount;
+    mStopTime = std::chrono::system_clock::now();
+    const float uptimeMs = std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+            mStopTime - mAudioFlingerCtorTime).count();
+    ALOGW("%s: (key=%s) %s timeout %s(%lld) restarts:%lld uptimeMs:%f",
+         __func__, item->getKey().c_str(), moduleName.c_str(),
+         methodName.c_str(), (long long)methodCode,
+         (long long)restarts, uptimeMs);
+    mSimpleLog.log("%s timeout %s(%lld) restarts:%lld uptimeMs:%f",
+            moduleName.c_str(), methodName.c_str(), (long long)methodCode,
+            (long long)restarts, uptimeMs);
+}
+
+std::pair<std::string, int32_t> AudioAnalytics::Health::dump(
+        int32_t lines, const char *prefix) const
+{
+    std::lock_guard lg(mLock);
+    std::string s = mSimpleLog.dumpToString(prefix == nullptr ? "" : prefix, lines);
+    size_t n = std::count(s.begin(), s.end(), '\n');
+    return { s, n };
+}
+
+
 } // namespace android::mediametrics
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 636b343..ff16b9e 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -327,6 +327,15 @@
                 result << "-- some lines may be truncated --\n";
             }
 
+            const int32_t healthLinesToDump = all ? INT32_MAX : 15;
+            result << "\nHealth Message Log:";
+            const auto [ healthDumpString, healthLines ] =
+                    mAudioAnalytics.dumpHealth(healthLinesToDump);
+            result << "\n" << healthDumpString;
+            if (healthLines == healthLinesToDump) {
+                result << "-- some lines may be truncated --\n";
+            }
+
             result << "\nLogSessionId:\n"
                    << mediametrics::ValidateId::get()->dump();
 
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index a44fcc1..5ee8c30 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -83,6 +83,15 @@
         return mHeatMap.dump(lines);
     }
 
+    /**
+     * Returns a pair consisting of the dump string and the number of lines in the string.
+     *
+     * Health dump.
+     */
+    std::pair<std::string, int32_t> dumpHealth(int32_t lines = INT32_MAX) const {
+        return mHealth.dump(lines);
+    }
+
     void clear() {
         // underlying state is locked.
         mPreviousAnalyticsState->clear();
@@ -247,6 +256,67 @@
         AudioAnalytics &mAudioAnalytics;
     } mAAudioStreamInfo{*this};
 
+    // Create new state, typically occurs after an AudioFlinger ctor event.
+    void newState();
+
+    // Health is a nested class that tracks audioserver health properties
+    class Health {
+    public:
+        explicit Health(AudioAnalytics &audioAnalytics)
+            : mAudioAnalytics(audioAnalytics) {}
+
+        enum class Module {
+            AUDIOFLINGER,
+            AUDIOPOLICY,
+        };
+
+        const char *getModuleName(Module module) {
+            switch (module) {
+                case Module::AUDIOFLINGER: return "AudioFlinger";
+                case Module::AUDIOPOLICY: return "AudioPolicy";
+            }
+            return "Unknown";
+        }
+
+        // Called when we believe audioserver starts (AudioFlinger ctor)
+        void onAudioServerStart(Module module,
+                const std::shared_ptr<const android::mediametrics::Item> &item);
+
+        // Called when we believe audioserver crashes (TimeCheck timeouts).
+        void onAudioServerTimeout(Module module,
+                const std::shared_ptr<const android::mediametrics::Item> &item);
+
+        std::pair<std::string, int32_t> dump(
+                int32_t lines = INT32_MAX, const char *prefix = nullptr) const;
+
+    private:
+        AudioAnalytics& mAudioAnalytics;
+
+        mutable std::mutex mLock;
+
+        // Life cycle of AudioServer
+        // mAudioFlingerCtorTime
+        // mAudioPolicyCtorTime
+        // mAudioPolicyCtorDoneTime
+        // ...
+        // possibly mStopTime  (if TimeCheck thread)
+        //
+        // UpTime is measured from mStopTime - mAudioFlingerCtorTime.
+        //
+        // The stop events come from TimeCheck timeout aborts.  There may be other
+        // uncaught signals, e.g. SIGSEGV, that cause missing stop events.
+        std::chrono::system_clock::time_point mAudioFlingerCtorTime GUARDED_BY(mLock);
+        std::chrono::system_clock::time_point mAudioPolicyCtorTime GUARDED_BY(mLock);
+        std::chrono::system_clock::time_point mAudioPolicyCtorDoneTime GUARDED_BY(mLock);
+        std::chrono::system_clock::time_point mStopTime GUARDED_BY(mLock);
+
+        // mStartCount and mStopCount track the audioserver start and stop events.
+        int64_t mStartCount GUARDED_BY(mLock) = 0;
+        int64_t mStopCount GUARDED_BY(mLock) = 0;
+
+        SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
+    } mHealth{*this};
+
     AudioPowerUsage mAudioPowerUsage;
 };
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 57be435..1c09544 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -696,11 +696,11 @@
         if (clients.size() == 0) {
             // if we are here, run the fourth pass to free one codec with the different type.
             if (secureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kNonSecureCodec, 1);
+                MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
             if (nonSecureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kSecureCodec, 1);
+                MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
         }
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index acd9df1..003569d 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -116,6 +116,26 @@
 
 const EventTracker::Event EventTracker::NoEvent;
 
+static MediaResource createSecureVideoCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kSecureCodec,
+        MediaResource::SubType::kVideoCodec, amount);
+}
+
+static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kNonSecureCodec,
+        MediaResource::SubType::kVideoCodec, amount);
+}
+
+static MediaResource createSecureAudioCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kSecureCodec,
+        MediaResource::SubType::kAudioCodec, amount);
+}
+
+static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kNonSecureCodec,
+        MediaResource::SubType::kAudioCodec, amount);
+}
+
 // Operators for GTest macros.
 bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
     return lhs.type == rhs.type && lhs.uid == rhs.uid && lhs.pid == rhs.pid &&
@@ -233,30 +253,30 @@
 
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource()};
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
 
     // Add non-secure video codec.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
 
     // Add secure & non-secure video codecs.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
 
     // Add additional audio codecs, should be ignored.
-    resources.push_back(MediaResource::CodecResource(1 /*secure*/, 0 /*video*/));
-    resources.push_back(MediaResource::CodecResource(0 /*secure*/, 0 /*video*/));
+    resources.push_back(createSecureAudioCodecResource());
+    resources.push_back(createNonSecureAudioCodecResource());
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
@@ -276,9 +296,9 @@
 
     // Add multiple secure & non-secure video codecs.
     // Multiple entries of the same type should be merged, count should be propagated correctly.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 3 /*count*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(3)};
     observables1 = {{MediaObservableType::kVideoSecureCodec, 2}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
@@ -300,7 +320,7 @@
 
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec to client1.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource()};
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
@@ -322,7 +342,7 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add non-secure video codec to client2.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
@@ -344,24 +364,24 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add secure & non-secure video codecs, plus audio codecs (that's ignored).
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(),
+                 createSecureAudioCodecResource(),
+                 createNonSecureAudioCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, should have no event.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 0 /*video*/)};
+    resources = {createSecureAudioCodecResource()};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove the other audio codec and the secure video codec, only secure video codec
     // removal should be reported.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureAudioCodecResource(),
+                 createSecureVideoCodecResource()};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
@@ -386,10 +406,10 @@
 
     // Add multiple secure & non-secure video codecs, plus audio codecs (that's ignored).
     // (ResourceManager will merge these internally.)
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 4 /*count*/),
-                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(4),
+                 createSecureAudioCodecResource(),
+                 createNonSecureAudioCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
@@ -400,10 +420,10 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, 2 secure video codecs and 2 non-secure video codecs.
     // 1 secure video codec removal and 2 non-secure video codec removals should be reported.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 2 /*count*/)};
+    resources = {createNonSecureAudioCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(2)};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
@@ -443,8 +463,8 @@
     std::vector<MediaResourceParcel> resources;
 
     // Add secure & non-secure video codecs.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index efd1d80..bbfa0b7 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -228,9 +228,6 @@
     if (mMmapStream != nullptr) {
         // Needs to be explicitly cleared or CTS will fail but it is not clear why.
         mMmapStream.clear();
-        // Apparently the above close is asynchronous. An attempt to open a new device
-        // right after a close can fail. Also some callbacks may still be in flight!
-        // FIXME Make closing synchronous.
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 }