Merge tag 'android-16.0.0_r1' of https://android.googlesource.com/platform/frameworks/av into HEAD

Android 16.0.0 release 1

Change-Id: Ia831874c37e6cb3338110b60d997d227eb79b099
diff --git a/Android.bp b/Android.bp
index c716a06..bcddb3e 100644
--- a/Android.bp
+++ b/Android.bp
@@ -60,6 +60,7 @@
     double_loadable: true,
     local_include_dir: "aidl",
     srcs: [
+        "aidl/android/media/IAudioManagerNative.aidl",
         "aidl/android/media/InterpolatorConfig.aidl",
         "aidl/android/media/InterpolatorType.aidl",
         "aidl/android/media/MicrophoneInfoFw.aidl",
@@ -78,7 +79,7 @@
             min_sdk_version: "29",
             apex_available: [
                 "//apex_available:platform",
-                "com.android.btservices",
+                "com.android.bt",
                 "com.android.media",
                 "com.android.media.swcodec",
             ],
@@ -122,7 +123,7 @@
     min_sdk_version: "29",
     apex_available: [
         "//apex_available:platform",
-        "com.android.btservices",
+        "com.android.bt",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index e9b757b..0114625 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -33,5 +33,6 @@
                media/libaudioclient/tests/
                media/libaudiohal/tests/
                media/libmediatranscoding/
+               services/audioparameterparser/
                services/camera/virtualcamera/
                services/mediatranscoding/
diff --git a/aidl/android/media/IAudioManagerNative.aidl b/aidl/android/media/IAudioManagerNative.aidl
new file mode 100644
index 0000000..5350238
--- /dev/null
+++ b/aidl/android/media/IAudioManagerNative.aidl
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * Native accessible interface for AudioService.
+ * Note this interface has a mix of oneway and non-oneway methods. This is intentional for certain
+ * calls intended to come from audioserver.
+ * {@hide}
+ */
+interface IAudioManagerNative {
+    enum HardeningType {
+        // Restricted due to OP_CONTROL_AUDIO_PARTIAL
+        // This OP is more permissive than OP_CONTROL_AUDIO, which allows apps in a foreground state
+        // not associated with FGS to access audio
+        PARTIAL,
+        // Restricted due to OP_CONTROL_AUDIO
+        FULL,
+    }
+
+    /**
+     * audioserver is muting playback due to hardening.
+     * Calls which aren't from uid 1041 are dropped.
+     * @param uid - the uid whose playback is restricted
+     * @param type - the level of playback restriction which was hit (full or partial)
+     * @param bypassed - true if the client should be muted but was exempted (for example due to a
+     * certain audio usage to prevent regressions)
+     */
+    oneway void playbackHardeningEvent(in int uid, in HardeningType type, in boolean bypassed);
+
+    /**
+     * Block until AudioService synchronizes pending permission state with audioserver.
+     */
+    void permissionUpdateBarrier();
+
+    /**
+     * Update mute state event for port
+     * @param portId Port id to update
+     * @param event the mute event containing info about the mute
+     */
+    oneway void portMuteEvent(in int portId, in int event);
+}
diff --git a/apex/Android.bp b/apex/Android.bp
index 356bf03..30b359d 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -173,10 +173,10 @@
         "mediaswcodec",
     ],
     native_shared_libs: [
-        "libapexcodecs",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
         "libcodec2_hidl@1.2",
+        "libcom.android.media.swcodec.apexcodecs",
         "libstagefright_foundation",
     ],
     prebuilts: [
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 0bc735f..1da4fa6 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -26,8 +26,6 @@
 #include <Camera.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICamera.h>
-
-#include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 
 namespace android {
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 424923a..05d078b 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -132,9 +132,11 @@
     clear();
     mBuffer = buffer;
 
-    ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != OK,
-             "%s: Failed to validate metadata structure %p",
-             __FUNCTION__, buffer);
+    IF_ALOGV() {
+        ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != OK,
+                 "%s: Failed to validate metadata structure %p",
+                 __FUNCTION__, buffer);
+    }
 }
 
 void CameraMetadata::acquire(CameraMetadata &other) {
@@ -712,9 +714,11 @@
 
         // Not too big of a problem since receiving side does hard validation
         // Don't check the size since the compact size could be larger
-        if (validate_camera_metadata_structure(metadata, /*size*/NULL) != OK) {
-            ALOGW("%s: Failed to validate metadata %p before writing blob",
-                   __FUNCTION__, metadata);
+        IF_ALOGV() {
+            if (validate_camera_metadata_structure(metadata, /*size*/NULL) != OK) {
+                ALOGW("%s: Failed to validate metadata %p before writing blob",
+                       __FUNCTION__, metadata);
+            }
         }
 
     } while(false);
diff --git a/camera/OWNERS b/camera/OWNERS
index b705548..7bb77e3 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -2,7 +2,6 @@
 etalvala@google.com
 arakesh@google.com
 borgera@google.com
-bkukreja@google.com
 epeev@google.com
 jchowdhary@google.com
 rdhanjal@google.com
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 6431737..779c4a2 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -261,14 +261,6 @@
      */
     @utf8InCpp String getLegacyParameters(int cameraId);
 
-    /**
-     * apiVersion constants for supportsCameraApi
-     */
-    const int API_VERSION_1 = 1;
-    const int API_VERSION_2 = 2;
-
-    // Determines if a particular API version is supported directly for a cameraId.
-    boolean supportsCameraApi(@utf8InCpp String cameraId, int apiVersion);
     // Determines if a cameraId is a hidden physical camera of a logical multi-camera.
     boolean isHiddenPhysicalCamera(@utf8InCpp String cameraId);
     // Inject the external camera to replace the internal camera session.
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 887a68b..dea0588 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -64,4 +64,9 @@
      * Checks if the camera has been disabled via device policy.
      */
     boolean isCameraDisabled(int userId);
+
+    /**
+     * Notify a cameraserver watchdog.
+     */
+    oneway void notifyWatchdog(int /* pid_t */ pid, boolean isNative);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index a9191eb..2145edd 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -36,6 +36,20 @@
 
     SubmitInfo submitRequest(in CaptureRequest request, boolean streaming);
     SubmitInfo submitRequestList(in CaptureRequest[] requestList, boolean streaming);
+    /**
+     * When a camera device is opened in shared mode, only the primary client can change capture
+     * parameters and submit capture requests. Secondary clients can use the startStreaming API to
+     * provide the stream and surface IDs they want to stream on. If the primary client has an
+     * ongoing repeating request, camera service will attach these surfaces to it. Otherwise,
+     * camera service will create a default capture request with a preview template.
+     *
+     * @param streamIdxArray stream ids of the target surfaces
+     * @param surfaceIdxArray surface ids of the target surfaces
+     * @return SubmitInfo data structure containing the request id of the capture request and the
+     *         frame number of the last request, of the previous batch of repeating requests, if
+     *         any. If there is no previous  batch, the frame number returned will be -1.
+     */
+    SubmitInfo startStreaming(in int[] streamIdxArray, in int[] surfaceIdxArray);
 
     /**
      * Cancel the repeating request specified by requestId
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index d2fcde6..fee322a 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -118,7 +118,7 @@
 #else
         sp<Surface> surface;
         if (surfaceShim.graphicBufferProducer != NULL) {
-            surface = new Surface(surfaceShim.graphicBufferProducer);
+            surface = sp<Surface>::make(surfaceShim.graphicBufferProducer);
         }
 #endif
         mSurfaceList.push_back(surface);
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index f67214b..2f16483 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -343,9 +343,18 @@
     mMirrorModeForProducers = std::move(mirrorModeForProducers);
     mUseReadoutTimestamp = useReadoutTimestamp != 0;
     for (auto& surface : surfaceShims) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+        IF_ALOGV() {
+            uint64_t bufferID;
+            surface.getUniqueId(&bufferID);
+            ALOGV("%s: OutputConfiguration: %" PRIu64 ", name %s", __FUNCTION__,
+                    bufferID, toString8(surface.name).c_str());
+        }
+#else
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
                 toString8(surface.name).c_str());
+#endif
         mSurfaces.push_back(flagtools::toParcelableSurfaceType(surface));
     }
 
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 627b225..2b425b1 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -225,7 +225,7 @@
     name: "zoom_method"
     is_exported: true
     description: "Gives apps explicit control on reflects zoom via ZOOM_RATIO capture result"
-    bug: "298899993"
+    bug: "350076823"
 }
 
 flag {
@@ -256,16 +256,6 @@
 
 flag {
     namespace: "camera_platform"
-    name: "query_process_state"
-    description: "In opChanged, query the process state from AM instead of relying on mUidPolicy"
-    bug: "378016494"
-    metadata {
-        purpose: PURPOSE_BUGFIX
-    }
-}
-
-flag {
-    namespace: "camera_platform"
     name: "fmq_metadata"
     description: "Allow CameraMetadata transfer for ndk / sdk clients."
     bug: "362791857"
@@ -280,3 +270,13 @@
     description: "Applies system controlled effects targetting video conferencing"
     bug: "376797335"
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "metadata_resize_fix"
+    description: "metadata resize during update needs to consider existing entry"
+    bug: "379388099"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index fc1e547..275e131 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -78,7 +78,6 @@
     ],
     shared_libs: [
         "android.companion.virtual.virtualdevice_aidl-cpp",
-        "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.common-V2-cpp",
         "android.hardware.common.fmq-V1-cpp",
         "camera_platform_flags_c_lib",
@@ -119,7 +118,6 @@
 
 cc_library_shared {
     name: "libcamera2ndk_vendor",
-    cpp_std: "gnu++17",
     vendor: true,
     srcs: [
         "NdkCameraCaptureSession.cpp",
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 58370e5..06ee714 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -178,30 +178,39 @@
 
 EXPORT
 camera_status_t ACameraCaptureSessionShared_startStreaming(
-    ACameraCaptureSession* /*session*/, ACameraCaptureSession_captureCallbacksV2* /*callbacks*/,
-    int /*numOutputWindows*/, ANativeWindow** /*window*/,
-    int* /*captureSequenceId*/) {
+    ACameraCaptureSession* session,
+    /*optional*/ACameraCaptureSession_captureCallbacksV2* callbacks,
+    int numOutputWindows, ANativeWindow** windows,
+    /*optional*/int* captureSequenceId) {
     ATRACE_CALL();
-    // Todo: need to add implementation
-    return  ACAMERA_OK;
+    return startStreamingTemplate(session, callbacks, numOutputWindows, windows,
+            captureSequenceId);
 }
 
 EXPORT
 camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
-    ACameraCaptureSession* /*session*/,
-    ACameraCaptureSession_logicalCamera_captureCallbacksV2* /*callbacks*/,
-    int /*numOutputWindows*/, ANativeWindow** /*windows*/,
-    int* /*captureSequenceId*/) {
+    ACameraCaptureSession* session,
+    /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* callbacks,
+    int numOutputWindows, ANativeWindow** windows,
+    /*optional*/int* captureSequenceId) {
     ATRACE_CALL();
-    // Todo: need to add implementation
-    return  ACAMERA_OK;
+    return  startStreamingTemplate(session, callbacks, numOutputWindows, windows,
+            captureSequenceId);
 }
 
 EXPORT
-camera_status_t ACameraCaptureSessionShared_stopStreaming(ACameraCaptureSession* /*session*/) {
+camera_status_t ACameraCaptureSessionShared_stopStreaming(ACameraCaptureSession* session) {
     ATRACE_CALL();
-    // Todo: need to add implementation
-    return  ACAMERA_OK;
+    if (session == nullptr) {
+        ALOGE("%s: Error: session is null", __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    return session->stopStreaming();
 }
 
 EXPORT
diff --git a/camera/ndk/NdkCameraCaptureSession.inc b/camera/ndk/NdkCameraCaptureSession.inc
index 258e20d..3112735 100644
--- a/camera/ndk/NdkCameraCaptureSession.inc
+++ b/camera/ndk/NdkCameraCaptureSession.inc
@@ -68,3 +68,24 @@
 
     return session->setRepeatingRequest(cbs, numRequests, requests, captureSequenceId);
 }
+
+template <class CallbackType>
+camera_status_t startStreamingTemplate(ACameraCaptureSession* session,
+    /*optional*/CallbackType* callbacks,
+    int numOutputWindows, ANativeWindow** windows,
+    /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    if (session == nullptr || windows == nullptr || numOutputWindows < 1) {
+        ALOGE("%s: Error: invalid input: session %p, numOutputWindows %d, windows %p",
+                __FUNCTION__, session, numOutputWindows, windows);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        if (captureSequenceId) {
+            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+        }
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    return session->startStreaming(callbacks, numOutputWindows, windows, captureSequenceId);
+}
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index f2ec573..bc6b87a 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -59,6 +59,9 @@
                 __FUNCTION__, device, request);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
+    if (device->isSharedMode() && !device->isPrimaryClient()) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    }
     switch (templateId) {
         case TEMPLATE_PREVIEW:
         case TEMPLATE_STILL_CAPTURE:
@@ -86,6 +89,9 @@
                 __FUNCTION__, device, request, physicalCameraIdList);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
+    if (device->isSharedMode() && !device->isPrimaryClient()) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    }
     switch (templateId) {
         case TEMPLATE_PREVIEW:
         case TEMPLATE_STILL_CAPTURE:
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 28cc9af..a2c34e3 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -227,6 +227,11 @@
                 __FUNCTION__, mgr, cameraId, callback, device, primaryClient);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
+    bool sharedMode;
+    camera_status_t status = mgr->isCameraDeviceSharingSupported(cameraId, &sharedMode);
+    if ((status != ACAMERA_OK) || !sharedMode) {
+         return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    }
     return mgr->openCamera(cameraId, /*sharedMode*/true, callback, device, primaryClient);
 }
 
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 449c0b4..8e7264a 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -96,6 +96,10 @@
     camera_status_t ret;
     dev->lockDeviceForSessionOps();
     {
+        if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+            dev->unlockDevice();
+            return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+        }
         Mutex::Autolock _l(mSessionLock);
         ret = dev->stopRepeatingLocked();
     }
@@ -103,6 +107,27 @@
     return ret;
 }
 
+camera_status_t ACameraCaptureSession::stopStreaming() {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
+    sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
+    if (dev == nullptr) {
+        ALOGE("Error: Device associated with session %p has been closed!", this);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    camera_status_t ret;
+    dev->lockDeviceForSessionOps();
+    {
+        Mutex::Autolock _l(mSessionLock);
+        ret = dev->stopStreamingLocked();
+    }
+    dev->unlockDevice();
+    return ret;
+}
+
 camera_status_t
 ACameraCaptureSession::abortCaptures() {
 #ifdef __ANDROID_VNDK__
@@ -114,10 +139,13 @@
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
     }
-
     camera_status_t ret;
     dev->lockDeviceForSessionOps();
     {
+        if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+            dev->unlockDevice();
+            return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+        }
         Mutex::Autolock _l(mSessionLock);
         ret = dev->flushLocked(this);
     }
@@ -139,6 +167,10 @@
     camera_status_t ret;
     dev->lockDeviceForSessionOps();
     {
+        if (dev->isSharedMode()) {
+            dev->unlockDevice();
+            return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+        }
         Mutex::Autolock _l(mSessionLock);
         ret = dev->updateOutputConfigurationLocked(output);
     }
@@ -160,6 +192,10 @@
     camera_status_t ret;
     dev->lockDeviceForSessionOps();
     {
+        if (dev->isSharedMode()) {
+            dev->unlockDevice();
+            return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+        }
         Mutex::Autolock _l(mSessionLock);
         ret = dev->prepareLocked(window);
     }
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 0d7a2c1..eb13b96 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -150,6 +150,12 @@
 
     ACameraDevice* getDevice();
 
+    template<class T>
+    camera_status_t startStreaming(/*optional*/T* callbacks,
+            int numOutputWindows, ANativeWindow** windows, /*optional*/int* captureSequenceId);
+
+    camera_status_t stopStreaming();
+
   private:
     friend class android::acam::CameraDevice;
 
diff --git a/camera/ndk/impl/ACameraCaptureSession.inc b/camera/ndk/impl/ACameraCaptureSession.inc
index da535f8..695eb37 100644
--- a/camera/ndk/impl/ACameraCaptureSession.inc
+++ b/camera/ndk/impl/ACameraCaptureSession.inc
@@ -42,6 +42,9 @@
     camera_status_t ret;
     dev->lockDeviceForSessionOps();
     {
+        if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+            return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+        }
         Mutex::Autolock _l(mSessionLock);
         ret = dev->setRepeatingRequestsLocked(
                 this, cbs, numRequests, requests, captureSequenceId);
@@ -67,9 +70,37 @@
     camera_status_t ret;
     dev->lockDeviceForSessionOps();
     {
+        if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+            return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+        }
         Mutex::Autolock _l(mSessionLock);
         ret = dev->captureLocked(this, cbs, numRequests, requests, captureSequenceId);
     }
     dev->unlockDevice();
     return ret;
 }
+
+template <class T>
+camera_status_t ACameraCaptureSession::startStreaming(
+        /*optional*/T* callbacks, int numOutputWindows, ANativeWindow** windows,
+	/*optional*/int* captureSequenceId)  {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
+    sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
+    if (dev == nullptr) {
+        ALOGE("Error: Device associated with session %p has been closed!", this);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    camera_status_t ret;
+    dev->lockDeviceForSessionOps();
+    {
+        Mutex::Autolock _l(mSessionLock);
+        ret = dev->startStreamingLocked(this, callbacks, numOutputWindows, windows,
+                captureSequenceId);
+    }
+    dev->unlockDevice();
+    return ret;
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 4d21467..fc21172 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -272,6 +272,28 @@
     }
 }
 
+camera_status_t CameraDevice::stopStreamingLocked() {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+        return ret;
+    }
+    ret = stopRepeatingLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: error when trying to stop streaming %d", __FUNCTION__, ret);
+        return ret;
+    }
+    for (auto& outputTarget : mPreviewRequestOutputs) {
+        ACameraOutputTarget_free(outputTarget);
+    }
+    mPreviewRequestOutputs.clear();
+    if (mPreviewRequest) {
+        ACaptureRequest_free(mPreviewRequest);
+        mPreviewRequest = nullptr;
+    }
+    return ACAMERA_OK;
+}
+
 camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
@@ -689,6 +711,11 @@
         if (ret != ACAMERA_OK) {
             return ret;
         }
+        // Surface sharing cannot be enabled when a camera has been opened
+        // in shared mode.
+        if (flags::camera_multi_client() && mSharedMode && outConfig.mIsShared) {
+            return ACAMERA_ERROR_INVALID_PARAMETER;
+        }
         ParcelableSurfaceType pSurface = flagtools::convertSurfaceTypeToParcelable(surface);
         outputSet.insert(std::make_pair(
                 anw,
@@ -715,10 +742,14 @@
         return ret;
     }
 
-    ret = waitUntilIdleLocked();
-    if (ret != ACAMERA_OK) {
-        ALOGE("Camera device %s wait until idle failed, ret %d", getId(), ret);
-        return ret;
+    // If device is opened in shared mode, there can be multiple clients accessing the
+    // camera device. So do not wait for idle if the device is opened in shared mode.
+    if ((!flags::camera_multi_client()) || (!mSharedMode)) {
+        ret = waitUntilIdleLocked();
+        if (ret != ACAMERA_OK) {
+            ALOGE("Camera device %s wait until idle failed, ret %d", getId(), ret);
+            return ret;
+        }
     }
 
     // Send onReady to previous session
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index ea7d9b6..067923c 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -127,6 +127,7 @@
 
     void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
     bool isPrimaryClient() {return mIsPrimaryClient;};
+    bool isSharedMode() {return mSharedMode;};
 
   private:
     friend ACameraCaptureSession;
@@ -143,6 +144,15 @@
 
     camera_status_t waitUntilIdleLocked();
 
+    camera_status_t stopStreamingLocked();
+
+    template<class T>
+    camera_status_t startStreamingLocked(ACameraCaptureSession* session,
+            /*optional*/T* callbacks,
+            int numOutputWindows, ANativeWindow** windows, /*optional*/int* captureSequenceId);
+
+    ACaptureRequest* mPreviewRequest = nullptr;
+    std::vector<ACameraOutputTarget*> mPreviewRequestOutputs;
 
     template<class T>
     camera_status_t captureLocked(sp<ACameraCaptureSession> session,
@@ -476,11 +486,15 @@
         mDevice->setPrimaryClient(isPrimary);
     }
 
-    inline bool isPrimaryClient() {
+    inline bool isPrimaryClient() const {
         return mDevice->isPrimaryClient();
     }
 
-  private:
+    inline bool isSharedMode() const{
+        return mDevice->isSharedMode();
+    }
+
+ private:
     android::sp<android::acam::CameraDevice> mDevice;
 };
 
diff --git a/camera/ndk/impl/ACameraDevice.inc b/camera/ndk/impl/ACameraDevice.inc
index 1fc5352..7e70d39 100644
--- a/camera/ndk/impl/ACameraDevice.inc
+++ b/camera/ndk/impl/ACameraDevice.inc
@@ -126,5 +126,102 @@
     return ACAMERA_OK;
 }
 
+template<class T>
+camera_status_t CameraDevice::startStreamingLocked(ACameraCaptureSession* session,
+        /*optional*/T* callbacks, int numOutputWindows,
+        ANativeWindow** windows, /*optional*/int* captureSequenceId) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+        return ret;
+    }
+    CameraMetadata rawPreviewRequest;
+    binder::Status remoteRet = mRemote->createDefaultRequest(TEMPLATE_PREVIEW, &rawPreviewRequest);
+    if (!remoteRet.isOk()) {
+        ALOGE("%s: Create capture request failed: %s", __FUNCTION__, remoteRet.toString8().c_str());
+        return ACAMERA_ERROR_UNKNOWN;
+    }
+    // ToDo: Check if the memory allocation can be freed automatically using either default_delete
+    // or ScopedAResource.
+    mPreviewRequest = new ACaptureRequest();
+    mPreviewRequest->settings = new ACameraMetadata(rawPreviewRequest.release(),
+            ACameraMetadata::ACM_REQUEST);
+    mPreviewRequest->targets  = new ACameraOutputTargets();
+    for (int i = 0; i < numOutputWindows ; i++) {
+        ACameraOutputTarget* outputTarget = nullptr;
+        ret = ACameraOutputTarget_create(windows[i], &outputTarget);
+        if (ret != ACAMERA_OK) {
+            ALOGE("%s: error while ACameraOutputTarget_create %d", __FUNCTION__, ret);
+            return ret;
+        }
+        ret = ACaptureRequest_addTarget(mPreviewRequest, outputTarget);
+        if (ret != ACAMERA_OK) {
+            ALOGE("%s: error while ACaptureRequest_addTarget %d", __FUNCTION__, ret);
+            return ret;
+        }
+        mPreviewRequestOutputs.push_back(outputTarget);
+    }
+
+    sp<CaptureRequest> req;
+    ret = allocateCaptureRequest(mPreviewRequest, req);
+    if (ret != ACAMERA_OK) {
+        ALOGE("Convert capture request to internal format failure! ret %d", ret);
+        return ret;
+    }
+    if (req->mSurfaceList.empty()) {
+        ALOGE("Capture request without output target cannot be submitted!");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    // In shared session mode, if there are other active clients streaming then
+    // stoprepeating does not actually send request to HAL to cancel the request.
+    // Cameraservice will use this call to remove this client surfaces provided in its
+    // previous streaming request. If this is the only client for the shared camera device
+    // then camerservice will ask HAL to cancel the previous repeating request.
+    ret = stopRepeatingLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
+        return ret;
+    }
+
+    hardware::camera2::utils::SubmitInfo info;
+    std::vector<int> streamIds(req->mStreamIdxList.begin(), req->mStreamIdxList.end());
+    std::vector<int> surfaceIds(req->mSurfaceIdxList.begin(), req->mSurfaceIdxList.end());
+    remoteRet = mRemote->startStreaming(streamIds, surfaceIds, &info);
+    int sequenceId = info.mRequestId;
+    int64_t lastFrameNumber = info.mLastFrameNumber;
+    if (sequenceId < 0) {
+        ALOGE("Camera %s start streaming remote failure: ret %d", getId(), sequenceId);
+        return ACAMERA_ERROR_UNKNOWN;
+    }
+
+    Vector<sp<CaptureRequest> > requestsV;
+    requestsV.push_back(req);
+    CallbackHolder cbHolder(session, requestsV, true, callbacks);
+    mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
+
+    // stopRepeating above should have cleanup repeating sequence id
+    if (mRepeatingSequenceId != REQUEST_ID_NONE) {
+        setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
+        return ACAMERA_ERROR_CAMERA_DEVICE;
+    }
+    mRepeatingSequenceId = sequenceId;
+
+    if (mIdle) {
+        sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
+        msg->setPointer(kContextKey, session->mUserSessionCallback.context);
+        msg->setObject(kSessionSpKey, session);
+        msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
+        postSessionMsgAndCleanup(msg);
+    }
+    mIdle = false;
+    mBusySession = session;
+
+    if (captureSequenceId) {
+        *captureSequenceId = sequenceId;
+    }
+    return ACAMERA_OK;
+}
+
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index acd7917..26c16cc 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "ACameraManager"
 
 #include "ACameraManager.h"
-#include <android_companion_virtualdevice_flags.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <camera/VendorTagDescriptor.h>
@@ -31,7 +30,6 @@
 #include <com_android_internal_camera_flags.h>
 
 using namespace android::acam;
-namespace vd_flags = android::companion::virtualdevice::flags;
 namespace flags = com::android::internal::camera::flags;
 
 namespace android {
@@ -62,10 +60,6 @@
 // Returns device id calling process is running on.
 // If the process cannot be attributed to single virtual device id, returns default device id.
 int getCurrentDeviceId() {
-    if (!vd_flags::camera_device_awareness()) {
-        return kDefaultDeviceId;
-    }
-
     auto vdm = getVirtualDeviceManager();
     if (vdm == nullptr) {
         return kDefaultDeviceId;
@@ -91,7 +85,7 @@
 
 // Returns device policy for POLICY_TYPE_CAMERA corresponding to deviceId.
 DevicePolicy getDevicePolicyForDeviceId(const int deviceId) {
-    if (!vd_flags::camera_device_awareness() || deviceId == kDefaultDeviceId) {
+    if (deviceId == kDefaultDeviceId) {
         return DevicePolicy::DEVICE_POLICY_DEFAULT;
     }
 
@@ -113,8 +107,7 @@
 
 // Returns true if camera owned by device cameraDeviceId can be accessed within deviceContext.
 bool isCameraAccessible(const DeviceContext deviceContext, const int cameraDeviceId) {
-    if (!vd_flags::camera_device_awareness() ||
-        deviceContext.policy == DevicePolicy::DEVICE_POLICY_DEFAULT) {
+    if (deviceContext.policy == DevicePolicy::DEVICE_POLICY_DEFAULT) {
         return cameraDeviceId == kDefaultDeviceId;
     }
     return deviceContext.deviceId == cameraDeviceId;
@@ -376,17 +369,12 @@
     const auto& [_, newlyRegistered] = mCallbacks.insert(cb);
     // Send initial callbacks if callback is newly registered
     if (newlyRegistered) {
-        for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+        for (auto& [key, logicalAndPhysicalStatus] : mDeviceStatusMap) {
             if (!isCameraAccessible(deviceContext, key.deviceId)) {
                 continue;
             }
             const std::string& cameraId = key.cameraId;
-            int32_t status = statusAndHAL3Support.getStatus();
-            // Don't send initial callbacks for camera ids which don't support
-            // camera2
-            if (!statusAndHAL3Support.supportsHAL3) {
-                continue;
-            }
+            int32_t status = logicalAndPhysicalStatus.getStatus();
 
             // Camera available/unavailable callback
             sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
@@ -400,7 +388,7 @@
 
             // Physical camera unavailable callback
             std::set<std::string> unavailablePhysicalCameras =
-                    statusAndHAL3Support.getUnavailablePhysicalIds();
+                    logicalAndPhysicalStatus.getUnavailablePhysicalIds();
             for (const auto& physicalCameraId : unavailablePhysicalCameras) {
                 sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
                 ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
@@ -416,42 +404,23 @@
     }
 }
 
-bool CameraManagerGlobal::supportsCamera2ApiLocked(const std::string &cameraId) {
-    bool camera2Support = false;
-    auto cs = getCameraServiceLocked();
-    if (cs == nullptr) {
-        return false;
-    }
-    binder::Status serviceRet =
-        cs->supportsCameraApi(cameraId,
-                hardware::ICameraService::API_VERSION_2, &camera2Support);
-    if (!serviceRet.isOk()) {
-        ALOGE("%s: supportsCameraApi2Locked() call failed for cameraId  %s",
-                __FUNCTION__, cameraId.c_str());
-        return false;
-    }
-    return camera2Support;
-}
-
 void CameraManagerGlobal::getCameraIdList(const DeviceContext& context,
         std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     Mutex::Autolock _l(mLock);
     // Needed to make sure we're connected to cameraservice
     getCameraServiceLocked();
-    for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+    for (auto& [key, logicalAndPhysicalStatus] : mDeviceStatusMap) {
         if (!isCameraAccessible(context, key.deviceId)) {
             continue;
         }
 
-        int32_t status = statusAndHAL3Support.getStatus();
+        int32_t status = logicalAndPhysicalStatus.getStatus();
         if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
                 status == hardware::ICameraServiceListener::STATUS_ENUMERATING) {
             continue;
         }
-        if (!statusAndHAL3Support.supportsHAL3) {
-            continue;
-        }
+
         cameraIds->push_back(key.cameraId);
     }
 }
@@ -649,28 +618,25 @@
         return;
     }
 
-    bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
     if (firstStatus) {
         mDeviceStatusMap.emplace(std::piecewise_construct, std::forward_as_tuple(key),
-                                 std::forward_as_tuple(status, supportsHAL3));
+                std::forward_as_tuple(status));
     } else {
         mDeviceStatusMap[key].updateStatus(status);
     }
     // Iterate through all registered callbacks
-    if (supportsHAL3) {
-        for (auto cb : mCallbacks) {
-            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
-                continue;
-            }
-            sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
-            ACameraManager_AvailabilityCallback cbFp = isStatusAvailable(status) ?
-                    cb.mAvailable : cb.mUnavailable;
-            msg->setPointer(kCallbackFpKey, (void *) cbFp);
-            msg->setPointer(kContextKey, cb.mContext);
-            msg->setString(kCameraIdKey, AString(cameraId.c_str()));
-            mPendingCallbackCnt++;
-            msg->post();
+    for (auto cb : mCallbacks) {
+        if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+            continue;
         }
+        sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
+        ACameraManager_AvailabilityCallback cbFp = isStatusAvailable(status) ?
+                cb.mAvailable : cb.mUnavailable;
+        msg->setPointer(kCallbackFpKey, (void *) cbFp);
+        msg->setPointer(kContextKey, cb.mContext);
+        msg->setString(kCameraIdKey, AString(cameraId.c_str()));
+        mPendingCallbackCnt++;
+        msg->post();
     }
     if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT) {
         mDeviceStatusMap.erase(key);
@@ -705,8 +671,6 @@
         return;
     }
 
-    bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
-
     bool updated = false;
     if (status == hardware::ICameraServiceListener::STATUS_PRESENT) {
         updated = mDeviceStatusMap[key].removeUnavailablePhysicalId(physicalCameraId);
@@ -715,7 +679,7 @@
     }
 
     // Iterate through all registered callbacks
-    if (supportsHAL3 && updated) {
+    if (updated) {
         for (auto cb : mCallbacks) {
             if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
                 continue;
@@ -733,31 +697,31 @@
     }
 }
 
-int32_t CameraManagerGlobal::StatusAndHAL3Support::getStatus() {
+int32_t CameraManagerGlobal::Status::getStatus() {
     std::lock_guard<std::mutex> lock(mLock);
     return status;
 }
 
-void CameraManagerGlobal::StatusAndHAL3Support::updateStatus(int32_t newStatus) {
+void CameraManagerGlobal::Status::updateStatus(int32_t newStatus) {
     std::lock_guard<std::mutex> lock(mLock);
     status = newStatus;
 }
 
-bool CameraManagerGlobal::StatusAndHAL3Support::addUnavailablePhysicalId(
+bool CameraManagerGlobal::Status::addUnavailablePhysicalId(
         const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto result = unavailablePhysicalIds.insert(physicalCameraId);
     return result.second;
 }
 
-bool CameraManagerGlobal::StatusAndHAL3Support::removeUnavailablePhysicalId(
+bool CameraManagerGlobal::Status::removeUnavailablePhysicalId(
         const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto count = unavailablePhysicalIds.erase(physicalCameraId);
     return count > 0;
 }
 
-std::set<std::string> CameraManagerGlobal::StatusAndHAL3Support::getUnavailablePhysicalIds() {
+std::set<std::string> CameraManagerGlobal::Status::getUnavailablePhysicalIds() {
     std::lock_guard<std::mutex> lock(mLock);
     return unavailablePhysicalIds;
 }
@@ -881,7 +845,10 @@
     ret = ACameraMetadata_getConstEntry(chars, ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
             &entry);
     if (ret != ACAMERA_OK) {
-        return ret;
+        // If shared session metadata is not found return with sharing
+        // supported as false.
+        *isSharingSupported = false;
+        return ACAMERA_OK;
     }
     *isSharingSupported =  (entry.count > 0) ? true : false;
     return ACAMERA_OK;
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index fffe037..04f1ac4 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -260,18 +260,16 @@
     // Utils for status
     static bool validStatus(int32_t status);
     static bool isStatusAvailable(int32_t status);
-    bool supportsCamera2ApiLocked(const std::string &cameraId);
 
-    struct StatusAndHAL3Support {
+    struct Status {
       private:
         int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
         mutable std::mutex mLock;
         std::set<std::string> unavailablePhysicalIds;
       public:
-        const bool supportsHAL3 = false;
-        StatusAndHAL3Support(int32_t st, bool HAL3support):
-                status(st), supportsHAL3(HAL3support) { };
-        StatusAndHAL3Support() = default;
+        Status(int32_t st):
+                status(st) { };
+        Status() = default;
 
         bool addUnavailablePhysicalId(const std::string& physicalCameraId);
         bool removeUnavailablePhysicalId(const std::string& physicalCameraId);
@@ -308,7 +306,7 @@
         }
     };
 
-    std::map<DeviceStatusMapKey, StatusAndHAL3Support> mDeviceStatusMap;
+    std::map<DeviceStatusMapKey, Status> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 06e1d34..c2aae1c 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -1242,8 +1242,7 @@
  *         </ul>
  */
 camera_status_t ACameraCaptureSessionShared_stopStreaming(
-    ACameraCaptureSession *sharedSession
-)  __INTRODUCED_IN(36);
+    ACameraCaptureSession* sharedSession)  __INTRODUCED_IN(36);
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index d3a8e0d..b65aedf 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -562,6 +562,28 @@
     return ACAMERA_OK;
 }
 
+camera_status_t CameraDevice::stopStreamingLocked() {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+        return ret;
+    }
+    ret = stopRepeatingLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: error when trying to stop streaming %d", __FUNCTION__, ret);
+        return ret;
+    }
+    for (auto& outputTarget : mPreviewRequestOutputs) {
+        ACameraOutputTarget_free(outputTarget);
+    }
+    mPreviewRequestOutputs.clear();
+    if (mPreviewRequest) {
+        ACaptureRequest_free(mPreviewRequest);
+        mPreviewRequest = nullptr;
+    }
+    return ACAMERA_OK;
+}
+
 camera_status_t CameraDevice::flushLocked(ACameraCaptureSession* session) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 6ba30bb..5d03e95 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -157,6 +157,7 @@
     void stopLooperAndDisconnect();
     void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
     bool isPrimaryClient() {return mIsPrimaryClient;};
+    bool isSharedMode() {return mSharedMode;};
 
   private:
     friend ACameraCaptureSession;
@@ -195,6 +196,13 @@
             /*out*/int* captureSequenceId,
             bool isRepeating);
 
+    camera_status_t stopStreamingLocked();
+
+    template<class T>
+    camera_status_t startStreamingLocked(ACameraCaptureSession* session,
+            /*optional*/T* callbacks,
+            int numOutputWindows, ANativeWindow** windows, /*optional*/int* captureSequenceId);
+
     void addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest, sp<CaptureRequest> &req);
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
@@ -237,6 +245,8 @@
     ACameraDevice* mWrapper;
     bool mSharedMode;
     bool mIsPrimaryClient;
+    ACaptureRequest* mPreviewRequest = nullptr;
+    std::vector<ACameraOutputTarget*> mPreviewRequestOutputs;
 
     // stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
     // camera service)
@@ -490,9 +500,12 @@
     inline void setPrimaryClient(bool isPrimary) {
         mDevice->setPrimaryClient(isPrimary);
     }
-    inline bool isPrimaryClient() {
+    inline bool isPrimaryClient() const {
         return mDevice->isPrimaryClient();
     }
+    inline bool isSharedMode() const {
+        return mDevice->isSharedMode();
+    }
 
   private:
     std::shared_ptr<android::acam::CameraDevice> mDevice;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
index 1e724eb..1f568d2 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
+++ b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
@@ -147,5 +147,131 @@
     return ACAMERA_OK;
 }
 
+template<class T>
+camera_status_t CameraDevice::startStreamingLocked(ACameraCaptureSession* session,
+        /*optional*/T* callbacks, int numOutputWindows,
+        ANativeWindow** windows, /*optional*/int* captureSequenceId) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+        return ret;
+    }
+    utils::AidlCameraMetadata aidlMetadata;
+    ndk::ScopedAStatus remoteRet = mRemote->createDefaultRequest(utils::TemplateId::PREVIEW,
+            &aidlMetadata);
+    if (!remoteRet.isOk()) {
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: submitRequestList call failed: %s",
+                  __FUNCTION__, toString(errStatus).c_str());
+            return utils::convertFromAidl(errStatus);
+        } else {
+            ALOGE("%s: Transaction error for submitRequestList call: %d", __FUNCTION__,
+                  remoteRet.getExceptionCode());
+            return ACAMERA_ERROR_UNKNOWN;
+        }
+    }
+    camera_metadata_t* rawPreviewRequest;
+    utils::cloneFromAidl(aidlMetadata, &rawPreviewRequest);
+    // ToDo: Check if the memory allocation can be freed automatically using either default_delete
+    // or ScopedAResource.
+    mPreviewRequest = new ACaptureRequest();
+    mPreviewRequest->settings = new ACameraMetadata(rawPreviewRequest,
+            ACameraMetadata::ACM_REQUEST);
+    mPreviewRequest->targets  = new ACameraOutputTargets();
+    for (int i = 0; i < numOutputWindows ; i++) {
+        ACameraOutputTarget* outputTarget = nullptr;
+        ret = ACameraOutputTarget_create(windows[i], &outputTarget);
+        if (ret != ACAMERA_OK) {
+            ALOGE("%s: error while ACameraOutputTarget_create %d", __FUNCTION__, ret);
+            return ret;
+        }
+        ret = ACaptureRequest_addTarget(mPreviewRequest, outputTarget);
+        if (ret != ACAMERA_OK) {
+            ALOGE("%s: error while ACaptureRequest_addTarget %d", __FUNCTION__, ret);
+            return ret;
+        }
+        mPreviewRequestOutputs.push_back(outputTarget);
+    }
+
+    std::vector<sp<CaptureRequest>> requestsV;
+    sp<CaptureRequest> req;
+    ret = allocateCaptureRequestLocked(mPreviewRequest, req);
+    // We need to call this method since after submitRequestList is called,
+    // the request metadata queue might have removed the capture request
+    // metadata. Therefore we simply add the metadata to its wrapper class,
+    // so that it can be retrieved later.
+    addRequestSettingsMetadata(mPreviewRequest, req);
+    if (ret != ACAMERA_OK) {
+        ALOGE("Convert capture request to internal format failure! ret %d", ret);
+        return ret;
+    }
+    if (req->mCaptureRequest.streamAndWindowIds.size() == 0) {
+        ALOGE("Capture request without output target cannot be submitted!");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    requestsV.push_back(req);
+    // In shared session mode, if there are other active clients streaming then
+    // stoprepeating does not actually send request to HAL to cancel the request.
+    // Cameraservice will use this call to remove this client surfaces provided in its
+    // previous streaming request. If this is the only client for the shared camera device
+    // then camerservice will ask HAL to cancel the previous repeating request.
+    ret = stopRepeatingLocked();
+    if (ret != ACAMERA_OK) {
+        ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
+        return ret;
+    }
+    SubmitInfo info;
+    std::vector<int> streamIds;
+    std::vector<int> surfaceIds;
+    for (const auto& streamAndWindowId : req->mCaptureRequest.streamAndWindowIds) {
+        streamIds.push_back(streamAndWindowId.streamId);
+        surfaceIds.push_back(streamAndWindowId.windowId);
+    }
+    remoteRet = mRemote->startStreaming(streamIds, surfaceIds, &info);
+    if (!remoteRet.isOk()) {
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: startStreaming call failed: %s",
+                  __FUNCTION__, toString(errStatus).c_str());
+            return utils::convertFromAidl(errStatus);
+        } else {
+            ALOGE("%s: Transaction error for startStreaming call: %d", __FUNCTION__,
+                  remoteRet.getExceptionCode());
+            return ACAMERA_ERROR_UNKNOWN;
+        }
+    }
+
+    int32_t sequenceId = info.requestId;
+    int64_t lastFrameNumber = info.lastFrameNumber;
+    if (sequenceId < 0) {
+        ALOGE("Camera %s submit request remote failure: ret %d", getId(), sequenceId);
+        return ACAMERA_ERROR_UNKNOWN;
+    }
+    CallbackHolder cbHolder(session, requestsV, true, callbacks);
+    mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
+    // stopRepeating above should have cleanup repeating sequence id
+    if (mRepeatingSequenceId != REQUEST_ID_NONE) {
+        setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
+        return ACAMERA_ERROR_CAMERA_DEVICE;
+    }
+    mRepeatingSequenceId = sequenceId;
+
+    if (mIdle) {
+        sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
+        msg->setPointer(kContextKey, session->mUserSessionCallback.context);
+        msg->setObject(kSessionSpKey, session);
+        msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
+        postSessionMsgAndCleanup(msg);
+    }
+    mIdle = false;
+    mBusySession = session;
+
+    if (captureSequenceId) {
+        *captureSequenceId = sequenceId;
+    }
+    return ACAMERA_OK;
+}
+
 } // namespace acam
 } // namespace android
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 5f7f2f6..e74a48c 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -49,7 +49,6 @@
 
 #include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItemConsumer.h>
-#include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 
 #include <gtest/gtest.h>
@@ -387,15 +386,6 @@
 
     for (int32_t i = 0; i < numCameras; i++) {
         std::string cameraId = std::to_string(i);
-        bool isSupported = false;
-        res = service->supportsCameraApi(cameraId,
-                hardware::ICameraService::API_VERSION_2, &isSupported);
-        EXPECT_TRUE(res.isOk()) << res;
-
-        // We only care about binder calls for the Camera2 API.  Camera1 is deprecated.
-        if (!isSupported) {
-            continue;
-        }
 
         // Check metadata binder call
         CameraMetadata metadata;
@@ -536,8 +526,7 @@
 
         // Setup a buffer queue; I'm just using the vendor opaque format here as that is
         // guaranteed to be present
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+        auto [opaqueConsumer, surface] = BufferItemConsumer::create(
                 GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 2, /*controlledByApp*/ true);
         EXPECT_TRUE(opaqueConsumer.get() != nullptr);
         opaqueConsumer->setName(String8("nom nom nom"));
@@ -547,29 +536,9 @@
         EXPECT_EQ(OK,
                   opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
 
-        sp<Surface> surface = opaqueConsumer->getSurface();
-
-        sp<IGraphicBufferProducer> producer = surface->getIGraphicBufferProducer();
+        ParcelableSurfaceType pSurface = flagtools::surfaceToParcelableSurfaceType(surface);
         std::string noPhysicalId;
-        OutputConfiguration output(producer, /*rotation*/ 0, noPhysicalId);
-#else
-        sp<IGraphicBufferProducer> gbProducer;
-        sp<IGraphicBufferConsumer> gbConsumer;
-        BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
-        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(gbConsumer,
-                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/2, /*controlledByApp*/true);
-        EXPECT_TRUE(opaqueConsumer.get() != nullptr);
-        opaqueConsumer->setName(String8("nom nom nom"));
-
-        // Set to VGA dimens for default, as that is guaranteed to be present
-        EXPECT_EQ(OK, gbConsumer->setDefaultBufferSize(640, 480));
-        EXPECT_EQ(OK, gbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
-
-        sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
-
-        std::string noPhysicalId;
-        OutputConfiguration output(gbProducer, /*rotation*/0, noPhysicalId);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        OutputConfiguration output(pSurface, /*rotation*/ 0, noPhysicalId);
 
         // Can we configure?
         res = device->beginConfigure();
@@ -705,10 +674,12 @@
 
 TEST_F(CameraClientBinderTest, CheckBinderCaptureRequest) {
     sp<CaptureRequest> requestOriginal, requestParceled;
-    sp<IGraphicBufferProducer> gbProducer;
-    sp<IGraphicBufferConsumer> gbConsumer;
-    BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
-    sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
+
+    auto [opaqueConsumer, surface] = BufferItemConsumer::create(
+            GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 2, /*controlledByApp*/ true);
+    EXPECT_TRUE(opaqueConsumer.get() != nullptr);
+    opaqueConsumer->setName(String8("nom nom nom"));
+
     Vector<sp<Surface>> surfaceList;
     surfaceList.push_back(surface);
     std::string physicalDeviceId1 = "0";
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 9204eb1..5056c3e 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -66,19 +66,12 @@
 TEST_F(CameraCharacteristicsPermission, TestCameraPermission) {
     for (int32_t cameraId = 0; cameraId < numCameras; cameraId++) {
         std::string cameraIdStr = std::to_string(cameraId);
-        bool isSupported = false;
-        auto rc = mCameraService->supportsCameraApi(cameraIdStr,
-                hardware::ICameraService::API_VERSION_2, &isSupported);
-        EXPECT_TRUE(rc.isOk());
-        if (!isSupported) {
-            continue;
-        }
 
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
         AttributionSourceState clientAttribution;
         clientAttribution.deviceId = kDefaultDeviceId;
-        rc = mCameraService->getCameraCharacteristics(cameraIdStr,
+        auto rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, &metadata);
         ASSERT_TRUE(rc.isOk());
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index b06f9b4..b300643 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -93,7 +93,7 @@
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 
-    mComposerClient = new SurfaceComposerClient;
+    mComposerClient = sp<SurfaceComposerClient>::make();
     ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
 }
 
@@ -176,15 +176,6 @@
         sp<ICamera> cameraDevice;
 
         std::string cameraIdStr = std::to_string(cameraId);
-        bool isSupported = false;
-        rc = mCameraService->supportsCameraApi(cameraIdStr,
-                hardware::ICameraService::API_VERSION_1, &isSupported);
-        EXPECT_TRUE(rc.isOk());
-
-        // We only care about camera Camera1 ZSL support.
-        if (!isSupported) {
-            continue;
-        }
 
         CameraMetadata metadata;
         AttributionSourceState clientAttribution;
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
index 5ad9530..abe292f 100644
--- a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -21,6 +21,7 @@
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <gui/view/Surface.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include "camera2common.h"
 
 using namespace std;
@@ -90,9 +91,13 @@
                 sp<Surface> surface = surfaceControl->getSurface();
                 captureRequest->mSurfaceList.push_back(surface);
                 if (fdp.ConsumeBool()) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                    view::Surface surfaceShim = view::Surface::fromSurface(surface);
+#else
                     view::Surface surfaceShim;
-                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
                     surfaceShim.graphicBufferProducer = surface->getIGraphicBufferProducer();
+#endif
+                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
                     surfaceShim.writeToParcel(&parcelCamCaptureReq);
                 }
                 surface.clear();
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index 22e04fc..a9498a2 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -17,7 +17,6 @@
 #include <camera2/OutputConfiguration.h>
 #include <camera2/SessionConfiguration.h>
 #include <fuzzer/FuzzedDataProvider.h>
-#include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include <gui/SurfaceComposerClient.h>
@@ -70,8 +69,10 @@
                         int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
                         bool isShared = mFDP->ConsumeBool();
                         sp<SurfaceType> surface = createSurface();
+                        ParcelableSurfaceType pSurface =
+                            flagtools::convertSurfaceTypeToParcelable(surface);
                         outputConfiguration = make_unique<OutputConfiguration>(
-                                surface, rotation, physicalCameraId, surfaceSetID, isShared);
+                                pSurface, rotation, physicalCameraId, surfaceSetID, isShared);
                     },
 
                     [&]() {
@@ -81,10 +82,12 @@
                         bool isShared = mFDP->ConsumeBool();
                         size_t surfaceSize =
                                 mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
-                        vector<sp<SurfaceType>> surfaces;
+                        vector<ParcelableSurfaceType> surfaces;
                         for (size_t idx = 0; idx < surfaceSize; ++idx) {
                             sp<SurfaceType> surface = createSurface();
-                            surfaces.push_back(surface);
+                            ParcelableSurfaceType pSurface =
+                                flagtools::convertSurfaceTypeToParcelable(surface);
+                            surfaces.push_back(pSurface);
                         }
                         outputConfiguration = make_unique<OutputConfiguration>(
                                 surfaces, rotation, physicalCameraId, surfaceSetID, isShared);
@@ -115,7 +118,9 @@
                 [&]() { outputConfiguration->getSurfaces(); },
                 [&]() {
                     sp<SurfaceType> surface = createSurface();
-                    outputConfiguration->addSurface(surface);
+                    ParcelableSurfaceType pSurface =
+                        flagtools::convertSurfaceTypeToParcelable(surface);
+                    outputConfiguration->addSurface(pSurface);
                 },
                 [&]() { outputConfiguration->isMultiResolution(); },
                 [&]() { outputConfiguration->getColorSpace(); },
@@ -123,7 +128,9 @@
                 [&]() { outputConfiguration->getTimestampBase(); },
                 [&]() {
                     sp<SurfaceType> surface = createSurface();
-                    outputConfiguration->getMirrorMode(surface);
+                    ParcelableSurfaceType pSurface =
+                        flagtools::convertSurfaceTypeToParcelable(surface);
+                    outputConfiguration->getMirrorMode(pSurface);
                 },
                 [&]() { outputConfiguration->useReadoutTimestamp(); },
         });
diff --git a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
index 7cd0e59..b19ffc8 100644
--- a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
@@ -17,7 +17,12 @@
 #include <camera2/OutputConfiguration.h>
 #include <camera2/SessionConfiguration.h>
 #include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Flags.h> // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <gui/Surface.h>
+#else
 #include <gui/IGraphicBufferProducer.h>
+#endif
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include "camera2common.h"
@@ -53,7 +58,7 @@
         outputConfiguration = new OutputConfiguration();
         sessionConfiguration->addOutputConfiguration(*outputConfiguration);
     } else {
-        sp<IGraphicBufferProducer> iGBP = nullptr;
+        ParcelableSurfaceType pSurface;
         sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
         sp<SurfaceControl> surfaceControl = composerClient->createSurface(
                 static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()),
@@ -61,7 +66,7 @@
                 fdp.ConsumeIntegral<int32_t>(), fdp.ConsumeIntegral<int32_t>());
         if (surfaceControl) {
             sp<Surface> surface = surfaceControl->getSurface();
-            iGBP = surface->getIGraphicBufferProducer();
+            pSurface = flagtools::surfaceToParcelableSurfaceType(surface);
             surface.clear();
         }
         int32_t rotation = fdp.ConsumeIntegral<int32_t>();
@@ -69,7 +74,7 @@
         int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
         bool isShared = fdp.ConsumeBool();
         outputConfiguration =
-                new OutputConfiguration(iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+            new OutputConfiguration(pSurface, rotation, physicalCameraId, surfaceSetID, isShared);
         sessionConfiguration->addOutputConfiguration(*outputConfiguration);
     }
 
diff --git a/cmds/screenrecord/FrameOutput.cpp b/cmds/screenrecord/FrameOutput.cpp
index 6388518..7ffd66d 100644
--- a/cmds/screenrecord/FrameOutput.cpp
+++ b/cmds/screenrecord/FrameOutput.cpp
@@ -70,17 +70,11 @@
         return UNKNOWN_ERROR;
     }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mGlConsumer = new GLConsumer(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
-                                 /*isControlledByApp=*/false);
-    auto producer = mGlConsumer->getSurface()->getIGraphicBufferProducer();
-#else
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-    mGlConsumer = new GLConsumer(consumer, mExtTextureName,
-                GL_TEXTURE_EXTERNAL_OES, true, false);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<Surface> surface;
+    std::tie(mGlConsumer, surface) =
+            GLConsumer::create(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
+                               /*isControlledByApp=*/false);
+    auto producer = surface->getIGraphicBufferProducer();
     mGlConsumer->setName(String8("virtual display"));
     mGlConsumer->setDefaultBufferSize(width, height);
     producer->setMaxDequeuedBufferCount(4);
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 727f16a..f0bd402 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -172,16 +172,11 @@
         return UNKNOWN_ERROR;
     }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mGlConsumer = new GLConsumer(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
-                                 /*isControlledByApp=*/false);
-    mProducer = mGlConsumer->getSurface()->getIGraphicBufferProducer();
-#else
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&mProducer, &consumer);
-    mGlConsumer = new GLConsumer(consumer, mExtTextureName,
-                GL_TEXTURE_EXTERNAL_OES, true, false);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<Surface> surface;
+    std::tie(mGlConsumer, surface) =
+            GLConsumer::create(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
+                               /*isControlledByApp=*/false);
+    mProducer = surface->getIGraphicBufferProducer();
     mGlConsumer->setName(String8("virtual display"));
     mGlConsumer->setDefaultBufferSize(width, height);
     mProducer->setMaxDequeuedBufferCount(4);
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index de925b8..e260165 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -1382,14 +1382,16 @@
             }
             break;
         case 'd':
-            if (const auto id = android::DisplayId::fromValue<PhysicalDisplayId>(atoll(optarg));
-                id && SurfaceComposerClient::getPhysicalDisplayToken(*id)) {
-                gPhysicalDisplayId = *id;
+        {
+            const PhysicalDisplayId id = android::PhysicalDisplayId::fromValue(atoll(optarg));
+            if (SurfaceComposerClient::getPhysicalDisplayToken(id)) {
+                gPhysicalDisplayId = id;
                 break;
             }
 
             fprintf(stderr, "Invalid physical display ID\n");
             return 2;
+        }
         case 'S':
             gSecureDisplay = true;
             break;
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 1a6e5e8..f2758f8 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -737,9 +737,10 @@
                 }
 
                 printf("    owner: \"%s\"\n", info->getOwnerName());
+                printf("    hal name: \"%s\"\n", info->getHalName());
                 printf("    rank: %u\n", info->getRank());
             } else {
-                printf("    aliases, attributes, owner, rank: see above\n");
+                printf("    aliases, attributes, owner, hal name, rank: see above\n");
             }
 
             {
@@ -757,7 +758,8 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCProfile(pl.mProfile) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Profile(pl.mProfile) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Profile(pl.mProfile) :
-                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION) ? asString_DolbyVisionProfile(pl.mProfile) :"??";
+                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION) ? asString_DolbyVisionProfile(pl.mProfile) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AC4)   ? asString_AC4Profile(pl.mProfile) : "??";
                     const char *niceLevel =
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263)  ? asString_H263Level(pl.mLevel) :
@@ -768,6 +770,7 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION) ? asString_DolbyVisionLevel(pl.mLevel) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AC4)   ? asString_AC4Level(pl.mLevel) :
                         "??";
 
                     list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
@@ -1134,20 +1137,10 @@
             CHECK(gSurface != NULL);
         } else {
             CHECK(useSurfaceTexAlloc);
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-            sp<GLConsumer> texture =
-                    new GLConsumer(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
-                                   true /* useFenceSync */, false /* isControlledByApp */);
-            gSurface = texture->getSurface();
-#else
-            sp<IGraphicBufferProducer> producer;
-            sp<IGraphicBufferConsumer> consumer;
-            BufferQueue::createBufferQueue(&producer, &consumer);
-            sp<GLConsumer> texture = new GLConsumer(consumer, 0 /* tex */,
-                    GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
-                    false /* isControlledByApp */);
-            gSurface = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+            sp<GLConsumer> texture;
+            std::tie(texture, gSurface) =
+                    GLConsumer::create(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
+                                       true /* useFenceSync */, false /* isControlledByApp */);
         }
     }
 
diff --git a/drm/OWNERS b/drm/OWNERS
index 090c021..ac63d7c 100644
--- a/drm/OWNERS
+++ b/drm/OWNERS
@@ -1,3 +1,2 @@
-jtinker@google.com
 kelzhan@google.com
 robertshih@google.com
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index 98eba2a..d22ba78 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -70,6 +70,9 @@
     if (ssid == NULL) {
         android_errorWriteLog(0x534e4554, "121035042");
 
+        LOG_ALWAYS_FATAL_IF(nullptr != IPCThreadState::self()->getServingStackPointer(),
+            "Missing SID from other process");
+
         if (getpidcon(spid, &sctx) != 0) {
             ALOGE("SELinux: getpidcon(pid=%d) failed.\n", spid);
             return false;
diff --git a/include/OWNERS b/include/OWNERS
index e1d4db7..67de1f3 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,6 +1,5 @@
 elaurent@google.com
 hunga@google.com
-jtinker@google.com
 lajos@google.com
 essick@google.com
 philburk@google.com
diff --git a/include/mediadrm/OWNERS b/include/mediadrm/OWNERS
index e788754..e69de29 100644
--- a/include/mediadrm/OWNERS
+++ b/include/mediadrm/OWNERS
@@ -1 +0,0 @@
-jtinker@google.com
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 695cad6..54b256f 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -43,9 +43,7 @@
                 }
             ],
             "file_patterns": ["(?i)drm|crypto"]
-        }
-    ],
-    "postsubmit": [
+        },
         {
             "name": "MctsMediaCodecTestCases",
             "options": [
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index a1413b7..089ddf8 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -29,6 +29,13 @@
 }
 
 flag {
+  name: "codec_availability_metrics"
+  namespace: "codec_fwk"
+  description: "Feature flag for codec availability metrics collection"
+  bug: "402463766"
+}
+
+flag {
   name: "codec_availability_support"
   namespace: "codec_fwk"
   description: "Feature flag for codec availability HAL API implementation"
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
index 9dd1fdd..cb8a963 100644
--- a/media/aconfig/swcodec_flags.aconfig
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -5,6 +5,16 @@
 container: "com.android.media.swcodec"
 
 flag {
+  name: "apexcodecs_base"
+  # ApexCodecs API is getting called early in the boot process, so we need to make
+  # sure that the flag value is stable from the early boot stage.
+  is_fixed_read_only: true
+  namespace: "codec_fwk"
+  description: "Feature flag for base implementation of apexcodecs"
+  bug: "401332082"
+}
+
+flag {
   name: "apv_software_codec"
   is_exported: true
   is_fixed_read_only: true
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index cab126f..6769fe7 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -77,7 +77,7 @@
     // TODO(b/316909431) native_bridge_supported: true,
     apex_available: [
         "//apex_available:platform",
-        "com.android.btservices",
+        "com.android.bt",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -149,6 +149,7 @@
 java_aconfig_library {
     name: "android.media.audio-aconfig-java",
     aconfig_declarations: "android.media.audio-aconfig",
+    host_supported: true,
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
 }
 
@@ -156,13 +157,15 @@
     name: "android.media.audio-aconfig-exported-java",
     aconfig_declarations: "android.media.audio-aconfig",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
-    min_sdk_version: "Tiramisu",
+    min_sdk_version: "30",
     mode: "exported",
     apex_available: [
-        "com.android.btservices",
+        "com.android.bt",
+        "com.android.permission",
     ],
     visibility: [
         "//packages/modules/Bluetooth:__subpackages__",
+        "//packages/modules/Permission:__subpackages__",
     ],
 }
 
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index fe53824..84646d7 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -41,6 +41,7 @@
     bug: "367667349"
 }
 
+# unused
 flag {
     name: "bluetooth_mac_address_anonymization"
     namespace: "media_audio"
@@ -51,6 +52,15 @@
 }
 
 flag {
+    name: "defer_wear_permission_updates"
+    namespace: "media_audio"
+    description:
+        "defer permission updates to reduce pressure"
+        "on app launch latency on lower core devices."
+    bug: "380347376"
+}
+
+flag {
     name: "disable_prescale_absolute_volume"
     namespace: "media_audio"
     description:
@@ -84,6 +94,21 @@
 }
 
 flag {
+    name: "hardening_partial"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Flag for partial enforcement of hardening"
+    bug: "376480814"
+}
+
+flag {
+    name: "hardening_partial_volume"
+    namespace: "media_audio"
+    description: "Flag for partial enforcement of volume hardening"
+    bug: "376480814"
+}
+
+flag {
     name: "hardening_strict"
     is_exported: true
     namespace: "media_audio"
@@ -99,6 +124,13 @@
 }
 
 flag {
+    name: "optimize_bt_device_switch"
+    namespace: "media_audio"
+    description: "Optimize the Bluetooth active device switch sequence"
+    bug: "373867402"
+}
+
+flag {
     name: "port_to_piid_simplification"
     namespace: "media_audio"
     description: "PAM only needs for each piid the last portId mapping"
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index d157a97..a4956b8 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -24,6 +24,22 @@
 }
 
 flag {
+    name: "cache_get_stream_min_max_volume"
+    namespace: "media_audio"
+    description:
+       "Cache getStream[Min|Max]Volume calls between AudioManager and AudioService"
+    bug: "383667500"
+}
+
+flag {
+    name: "cache_get_stream_volume"
+    namespace: "media_audio"
+    description:
+       "Cache getStreamVolume calls between AudioManager and AudioService"
+    bug: "383667500"
+}
+
+flag {
     name: "concurrent_audio_record_bypass_permission"
     namespace: "media_audio"
     description:
@@ -86,6 +102,14 @@
     is_exported: true
 }
 
+flag {
+    name: "sony_360ra_mpegh_3d_format"
+    namespace: "media_audio"
+    description: "360 Reality Audio MPEG-H 3D Format"
+    is_fixed_read_only: true
+    bug: "238402306"
+}
+
 # TODO remove
 flag {
     name: "foreground_audio_control"
@@ -164,6 +188,14 @@
 }
 
 flag {
+    name: "ringtone_user_uri_check"
+    is_exported: true
+    namespace: "media_audio"
+    description: "check user for RingtonePlayer URIs"
+    bug: "400434060"
+}
+
+flag {
     name: "sco_managed_by_audio"
     is_exported: true
     namespace: "media_audio"
@@ -241,6 +273,14 @@
     is_exported: true
 }
 
+flag {
+    name: "unify_absolute_volume_management"
+    namespace: "media_audio"
+    description: "Unify absolute volume management in AudioService for A2DP, LEA, SCO, ASHA"
+    is_exported: true
+    bug: "393657380"
+}
+
 # TODO remove
 flag {
     name: "volume_ringer_api_hardening"
@@ -249,3 +289,13 @@
     bug: "296232417"
     is_fixed_read_only: true
 }
+
+flag {
+    name: "register_volume_callback_api_hardening"
+    namespace: "media_audio"
+    description:
+            "Add modify audio settings privilege permission to un/register volume group "
+            "callback APIs"
+    bug: "402502314"
+    is_fixed_read_only: true
+}
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 1ce4d00..3278a44 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -5,6 +5,16 @@
 package: "com.android.media.audioserver"
 container: "system"
 
+# shipped 24Q3
+flag {
+    name: "conditionally_ignore_preferred_input_device"
+    namespace: "media_audio"
+    description:
+        "Allows to ignore preferred device requests in getInputDeviceForAttributes() depending "
+        "on the calling context."
+    bug: "303079083"
+}
+
 flag {
     name: "direct_track_reprioritization"
     namespace: "media_audio"
@@ -29,6 +39,14 @@
 }
 
 flag {
+    name: "enable_gmap_mode"
+    namespace: "media_audio"
+    description: "enable track metadata tag generation for GMAP."
+    bug: "366456949"
+}
+
+# shipped 24Q3
+flag {
     name: "fdtostring_timeout_fix"
     namespace: "media_audio"
     description: "Improve fdtostring implementation to properly handle timing out."
@@ -36,6 +54,14 @@
 }
 
 flag {
+    name: "fix_aaudio_stream_reopen_in_libaudiohal_aidl"
+    namespace: "media_audio"
+    description:
+        "Support reopening of AAudio streams in the libaudiohal@aidl layer"
+    bug: "274456992"
+}
+
+flag {
     name: "fix_call_audio_patch"
     namespace: "media_audio"
     description:
diff --git a/media/audio/aconfig/soundtrigger.aconfig b/media/audio/aconfig/soundtrigger.aconfig
index 5233119..498e2bc 100644
--- a/media/audio/aconfig/soundtrigger.aconfig
+++ b/media/audio/aconfig/soundtrigger.aconfig
@@ -6,6 +6,14 @@
 container: "system"
 
 flag {
+    name: "detection_service_paused_resumed_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Feature flag for adding SoundTriggerDetectionService onRecognitionPaused and onRecognitionResumed APIs to SystemApi"
+    bug: "392968319"
+}
+
+flag {
     name: "generic_model_api"
     is_exported: true
     namespace: "media_audio"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 99b5381..af39904 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -364,7 +364,7 @@
         DEFINE_OUTPUT_LAYOUT(7POINT1),
         DEFINE_OUTPUT_LAYOUT(7POINT1POINT2),
         DEFINE_OUTPUT_LAYOUT(7POINT1POINT4),
-        DEFINE_OUTPUT_LAYOUT(13POINT_360RA),
+        DEFINE_OUTPUT_LAYOUT(13POINT0),
         DEFINE_OUTPUT_LAYOUT(22POINT2),
         DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_A),
         DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_A),
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index 2e1eb8c..249b18c 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -26,7 +26,7 @@
     ],
     apex_available: [
         "//apex_available:platform",
-        "com.android.btservices",
+        "com.android.bt",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 47b48e3..47a7625 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -32,7 +32,6 @@
         "libaaudioservice",
         "libaudioflinger",
         "libaudiopolicyservice",
-        "libmedialogservice",
     ],
 
     shared_libs: [
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index 8bdb86e..d4680df 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -34,6 +34,12 @@
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
 
+on property:init.svc.audioserver=running && property:vts.native_server.on=1
+    # See b/378773354. To ensure the audioserver disable when
+    # running test suite, this would cover the double start
+    # request from init that caused test flaky.
+    stop audioserver
+
 on property:init.svc.audioserver=running
     start vendor.audio-hal
     start vendor.audio-hal-aidl
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 4f5b95d..12cb8cb 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -40,7 +40,6 @@
 #include "AudioPolicyService.h"
 #include "AAudioService.h"
 #include "utility/AAudioUtilities.h"
-#include "MediaLogService.h"
 
 using namespace android;
 
@@ -48,7 +47,7 @@
 using android::media::audio::common::AudioMMapPolicyInfo;
 using android::media::audio::common::AudioMMapPolicyType;
 
-int main(int argc __unused, char **argv)
+int main(int argc __unused, char **argv __unused)
 {
     ALOGD("%s: starting", __func__);
     const auto startTime = std::chrono::steady_clock::now();
@@ -60,156 +59,70 @@
 
     signal(SIGPIPE, SIG_IGN);
 
-#if 1
-    // FIXME See bug 165702394 and bug 168511485
-    const bool doLog = false;
-#else
-    bool doLog = (bool) property_get_bool("ro.test_harness", 0);
-#endif
+    android::hardware::configureRpcThreadpool(4, false /*callerWillJoin*/);
 
-    pid_t childPid;
-    // FIXME The advantage of making the process containing media.log service the parent process of
-    // the process that contains the other audio services, is that it allows us to collect more
-    // detailed information such as signal numbers, stop and continue, resource usage, etc.
-    // But it is also more complex.  Consider replacing this by independent processes, and using
-    // binder on death notification instead.
-    if (doLog && (childPid = fork()) != 0) {
-        // media.log service
-        //prctl(PR_SET_NAME, (unsigned long) "media.log", 0, 0, 0);
-        // unfortunately ps ignores PR_SET_NAME for the main thread, so use this ugly hack
-        strcpy(argv[0], "media.log");
-        sp<ProcessState> proc(ProcessState::self());
-        MediaLogService::instantiate();
-        ProcessState::self()->startThreadPool();
-        IPCThreadState::self()->joinThreadPool();
-        for (;;) {
-            siginfo_t info;
-            int ret = TEMP_FAILURE_RETRY(waitid(P_PID, childPid, &info,
-                                                WEXITED | WSTOPPED | WCONTINUED));
-            if (ret < 0) {
-                break;
-            }
-            char buffer[32];
-            const char *code;
-            switch (info.si_code) {
-            case CLD_EXITED:
-                code = "CLD_EXITED";
-                break;
-            case CLD_KILLED:
-                code = "CLD_KILLED";
-                break;
-            case CLD_DUMPED:
-                code = "CLD_DUMPED";
-                break;
-            case CLD_STOPPED:
-                code = "CLD_STOPPED";
-                break;
-            case CLD_TRAPPED:
-                code = "CLD_TRAPPED";
-                break;
-            case CLD_CONTINUED:
-                code = "CLD_CONTINUED";
-                break;
-            default:
-                snprintf(buffer, sizeof(buffer), "unknown (%d)", info.si_code);
-                code = buffer;
-                break;
-            }
-            struct rusage usage;
-            getrusage(RUSAGE_CHILDREN, &usage);
-            ALOG(LOG_ERROR, "media.log", "pid %d status %d code %s user %ld.%03lds sys %ld.%03lds",
-                    info.si_pid, info.si_status, code,
-                    usage.ru_utime.tv_sec, usage.ru_utime.tv_usec / 1000,
-                    usage.ru_stime.tv_sec, usage.ru_stime.tv_usec / 1000);
-            sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder = sm->getService(String16("media.log"));
-            if (binder != 0) {
-                Vector<String16> args;
-                binder->dump(-1, args);
-            }
-            switch (info.si_code) {
-            case CLD_EXITED:
-            case CLD_KILLED:
-            case CLD_DUMPED: {
-                ALOG(LOG_INFO, "media.log", "exiting");
-                _exit(0);
-                // not reached
-                }
-            default:
-                break;
-            }
-        }
+    // Ensure threads for possible callbacks.  Note that get_audio_flinger() does
+    // this automatically when called from AudioPolicy, but we do this anyways here.
+    ProcessState::self()->startThreadPool();
+
+    // Instantiating AudioFlinger (making it public, e.g. through ::initialize())
+    // and then instantiating AudioPolicy (and making it public)
+    // leads to situations where AudioFlinger is accessed remotely before
+    // AudioPolicy is initialized.  Not only might this
+    // cause inaccurate results, but if AudioPolicy has slow audio HAL
+    // initialization, it can cause a TimeCheck abort to occur on an AudioFlinger
+    // call which tries to access AudioPolicy.
+    //
+    // We create AudioFlinger and AudioPolicy locally then make it public to ServiceManager.
+    // This requires both AudioFlinger and AudioPolicy to be in-proc.
+    //
+    const auto af = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(af);
+    ALOGD("%s: AudioFlinger created", __func__);
+    ALOGW_IF(AudioSystem::setLocalAudioFlinger(af) != OK,
+            "%s: AudioSystem already has an AudioFlinger instance!", __func__);
+    const auto aps = sp<AudioPolicyService>::make();
+    af->initAudioPolicyLocal(aps);
+    ALOGD("%s: AudioPolicy created", __func__);
+    ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
+             "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
+
+    // Start initialization of internally managed audio objects such as Device Effects.
+    aps->onAudioSystemReady();
+
+    // Add AudioFlinger and AudioPolicy to ServiceManager.
+    sp<IServiceManager> sm = defaultServiceManager();
+    sm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME), afAdapter,
+            false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+    sm->addService(String16(AudioPolicyService::getServiceName()), aps,
+            false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    // AAudioService should only be used in OC-MR1 and later.
+    // And only enable the AAudioService if the system MMAP policy explicitly allows it.
+    // This prevents a client from misusing AAudioService when it is not supported.
+    // If we cannot get audio flinger here, there must be some serious problems. In that case,
+    // attempting to call audio flinger on a null pointer could make the process crash
+    // and attract attentions.
+    std::vector<AudioMMapPolicyInfo> policyInfos;
+    status_t status = AudioSystem::getMmapPolicyInfos(
+            AudioMMapPolicyType::DEFAULT, &policyInfos);
+    // Initialize aaudio service when querying mmap policy succeeds and
+    // any of the policy supports MMAP.
+    if (status == NO_ERROR &&
+        std::any_of(policyInfos.begin(), policyInfos.end(), [](const auto& info) {
+                return info.mmapPolicy == AudioMMapPolicy::AUTO ||
+                       info.mmapPolicy == AudioMMapPolicy::ALWAYS;
+        })) {
+        AAudioService::instantiate();
     } else {
-        // all other services
-        if (doLog) {
-            prctl(PR_SET_PDEATHSIG, SIGKILL);   // if parent media.log dies before me, kill me also
-            setpgid(0, 0);                      // but if I die first, don't kill my parent
-        }
-        android::hardware::configureRpcThreadpool(4, false /*callerWillJoin*/);
-
-        // Ensure threads for possible callbacks.  Note that get_audio_flinger() does
-        // this automatically when called from AudioPolicy, but we do this anyways here.
-        ProcessState::self()->startThreadPool();
-
-        // Instantiating AudioFlinger (making it public, e.g. through ::initialize())
-        // and then instantiating AudioPolicy (and making it public)
-        // leads to situations where AudioFlinger is accessed remotely before
-        // AudioPolicy is initialized.  Not only might this
-        // cause inaccurate results, but if AudioPolicy has slow audio HAL
-        // initialization, it can cause a TimeCheck abort to occur on an AudioFlinger
-        // call which tries to access AudioPolicy.
-        //
-        // We create AudioFlinger and AudioPolicy locally then make it public to ServiceManager.
-        // This requires both AudioFlinger and AudioPolicy to be in-proc.
-        //
-        const auto af = sp<AudioFlinger>::make();
-        const auto afAdapter = sp<AudioFlingerServerAdapter>::make(af);
-        ALOGD("%s: AudioFlinger created", __func__);
-        ALOGW_IF(AudioSystem::setLocalAudioFlinger(af) != OK,
-                "%s: AudioSystem already has an AudioFlinger instance!", __func__);
-        const auto aps = sp<AudioPolicyService>::make();
-        af->initAudioPolicyLocal(aps);
-        ALOGD("%s: AudioPolicy created", __func__);
-        ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
-                 "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
-
-        // Start initialization of internally managed audio objects such as Device Effects.
-        aps->onAudioSystemReady();
-
-        // Add AudioFlinger and AudioPolicy to ServiceManager.
-        sp<IServiceManager> sm = defaultServiceManager();
-        sm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME), afAdapter,
-                false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
-        sm->addService(String16(AudioPolicyService::getServiceName()), aps,
-                false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
-
-        // AAudioService should only be used in OC-MR1 and later.
-        // And only enable the AAudioService if the system MMAP policy explicitly allows it.
-        // This prevents a client from misusing AAudioService when it is not supported.
-        // If we cannot get audio flinger here, there must be some serious problems. In that case,
-        // attempting to call audio flinger on a null pointer could make the process crash
-        // and attract attentions.
-        std::vector<AudioMMapPolicyInfo> policyInfos;
-        status_t status = AudioSystem::getMmapPolicyInfos(
-                AudioMMapPolicyType::DEFAULT, &policyInfos);
-        // Initialize aaudio service when querying mmap policy succeeds and
-        // any of the policy supports MMAP.
-        if (status == NO_ERROR &&
-            std::any_of(policyInfos.begin(), policyInfos.end(), [](const auto& info) {
-                    return info.mmapPolicy == AudioMMapPolicy::AUTO ||
-                           info.mmapPolicy == AudioMMapPolicy::ALWAYS;
-            })) {
-            AAudioService::instantiate();
-        } else {
-            ALOGD("%s: Do not init aaudio service, status %d, policy info size %zu",
-                  __func__, status, policyInfos.size());
-        }
-        const auto endTime = std::chrono::steady_clock::now();
-        af->startupFinished();
-        using FloatMillis = std::chrono::duration<float, std::milli>;
-        const float timeTaken = std::chrono::duration_cast<FloatMillis>(
-                endTime - startTime).count();
-        ALOGI("%s: initialization done in %.3f ms, joining thread pool", __func__, timeTaken);
-        IPCThreadState::self()->joinThreadPool();
+        ALOGD("%s: Do not init aaudio service, status %d, policy info size %zu",
+              __func__, status, policyInfos.size());
     }
+    const auto endTime = std::chrono::steady_clock::now();
+    af->startupFinished();
+    using FloatMillis = std::chrono::duration<float, std::milli>;
+    const float timeTaken = std::chrono::duration_cast<FloatMillis>(
+            endTime - startTime).count();
+    ALOGI("%s: initialization done in %.3f ms, joining thread pool", __func__, timeTaken);
+    IPCThreadState::self()->joinThreadPool();
 }
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index c770d0c..921f60a 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -294,6 +294,13 @@
       mDeviceApiLevel(android_get_device_api_level()) {
 }
 
+C2SoftAacDec::C2SoftAacDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftAacDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAacDec::~C2SoftAacDec() {
     onRelease();
 }
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index f85d45f..452e61d 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -18,7 +18,7 @@
 #define ANDROID_C2_SOFT_AAC_DEC_H_
 
 #include <SimpleC2Component.h>
-
+#include <util/C2InterfaceHelper.h>
 
 #include "aacdecoder_lib.h"
 #include "DrcPresModeWrap.h"
@@ -29,6 +29,8 @@
     class IntfImpl;
 
     C2SoftAacDec(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftAacDec(const char *name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftAacDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 721a12a..14a30a0 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -155,12 +155,18 @@
       mNumBytesPerInputFrame(0u),
       mOutBufferSize(0u),
       mSentCodecSpecificData(false),
-      mInputSize(0),
       mSignalledError(false),
       mOutIndex(0u),
       mRemainderLen(0u) {
 }
 
+C2SoftAacEnc::C2SoftAacEnc(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftAacEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAacEnc::~C2SoftAacEnc() {
     onReset();
 }
@@ -180,7 +186,6 @@
 
 c2_status_t C2SoftAacEnc::onStop() {
     mSentCodecSpecificData = false;
-    mInputSize = 0u;
     mNextFrameTimestampUs.reset();
     mLastFrameEndTimestampUs.reset();
     mSignalledError = false;
@@ -205,7 +210,6 @@
         }
     }
     mSentCodecSpecificData = false;
-    mInputSize = 0u;
     mNextFrameTimestampUs.reset();
     mLastFrameEndTimestampUs.reset();
     mRemainderLen = 0;
@@ -401,7 +405,7 @@
         MaybeLogTimestampWarning(mLastFrameEndTimestampUs->peekll(), inputTimestampUs.peekll());
         inputTimestampUs = *mLastFrameEndTimestampUs;
     }
-    if (capacity > 0) {
+    if (capacity > 0 || eos) {
         if (!mNextFrameTimestampUs) {
             mNextFrameTimestampUs = work->input.ordinal.timestamp;
         }
@@ -409,14 +413,6 @@
                 + (capacity / sizeof(int16_t) * 1000000ll / channelCount / sampleRate);
     }
 
-    size_t numFrames =
-        (mRemainderLen + capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
-        / mNumBytesPerInputFrame;
-    ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu "
-          "mNumBytesPerInputFrame = %u inputTS = %lld remaining = %zu",
-          capacity, mInputSize, numFrames, mNumBytesPerInputFrame, inputTimestampUs.peekll(),
-          mRemainderLen);
-
     std::shared_ptr<C2LinearBlock> block;
     std::unique_ptr<C2WriteView> wView;
     uint8_t *outPtr = temp;
@@ -509,8 +505,9 @@
             inargs.numInSamples = 0;
         }
     }
-    while (encoderErr == AACENC_OK && inargs.numInSamples >= channelCount) {
-        if (numFrames && !block) {
+    int processedSampleCntInCurrBatch = 0;
+    while (encoderErr == AACENC_OK && (inargs.numInSamples >= channelCount || eos)) {
+        if (!block) {
             C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
             // TODO: error handling, proper usage, etc.
             c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
@@ -523,7 +520,6 @@
             wView.reset(new C2WriteView(block->map().get()));
             outPtr = wView->data();
             outAvailable = wView->size();
-            --numFrames;
         }
 
         memset(&outargs, 0, sizeof(outargs));
@@ -531,6 +527,10 @@
         outBuffer[0] = outPtr;
         outBufferSize[0] = outAvailable;
 
+        // flush
+        if (eos && inargs.numInSamples < channelCount) {
+            inargs.numInSamples = -1;
+        }
         encoderErr = aacEncEncode(mAACEncoder,
                                   &inBufDesc,
                                   &outBufDesc,
@@ -539,15 +539,16 @@
 
         if (encoderErr == AACENC_OK) {
             if (outargs.numOutBytes > 0) {
-                mInputSize = 0;
-                int consumed = (capacity / sizeof(int16_t)) - inargs.numInSamples
-                        + outargs.numInSamples;
-                ALOGV("consumed = %d, capacity = %zu, inSamples = %d, outSamples = %d",
-                      consumed, capacity, inargs.numInSamples, outargs.numInSamples);
+                processedSampleCntInCurrBatch += mNumBytesPerInputFrame / sizeof(int16_t);
+                ALOGV("processedSampleCntInCurrBatch = %d, capacity = %zu, inSamples = %d, "
+                      "outSamples = %d",
+                      processedSampleCntInCurrBatch, capacity, inargs.numInSamples,
+                      outargs.numInSamples);
                 c2_cntr64_t currentFrameTimestampUs = *mNextFrameTimestampUs;
-                mNextFrameTimestampUs = inputTimestampUs
-                        + (consumed * 1000000ll / channelCount / sampleRate);
-                std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
+                mNextFrameTimestampUs = inputTimestampUs + (processedSampleCntInCurrBatch *
+                                                            1000000ll / channelCount / sampleRate);
+                std::shared_ptr<C2Buffer> buffer =
+                        createLinearBuffer(block, 0, outargs.numOutBytes);
 #if 0
                 hexdump(outPtr, std::min(outargs.numOutBytes, 256));
 #endif
@@ -556,8 +557,6 @@
                 block.reset();
 
                 outputBuffers.push_back({buffer, currentFrameTimestampUs});
-            } else {
-                mInputSize += outargs.numInSamples * sizeof(int16_t);
             }
 
             if (inBuffer[0] == mRemainder) {
@@ -575,44 +574,8 @@
             inBufferSize[0] = 0;
             inargs.numInSamples = 0;
         }
-        ALOGV("encoderErr = %d mInputSize = %zu "
-              "inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
-              encoderErr, mInputSize, inargs.numInSamples, mNextFrameTimestampUs->peekll());
-    }
-    if (eos && inBufferSize[0] > 0) {
-        if (numFrames && !block) {
-            C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-            // TODO: error handling, proper usage, etc.
-            c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
-            if (err != C2_OK) {
-                ALOGE("fetchLinearBlock failed : err = %d", err);
-                work->result = C2_NO_MEMORY;
-                return;
-            }
-
-            wView.reset(new C2WriteView(block->map().get()));
-            outPtr = wView->data();
-            outAvailable = wView->size();
-            --numFrames;
-        }
-
-        memset(&outargs, 0, sizeof(outargs));
-
-        outBuffer[0] = outPtr;
-        outBufferSize[0] = outAvailable;
-
-        // Flush
-        inargs.numInSamples = -1;
-
-        (void)aacEncEncode(mAACEncoder,
-                           &inBufDesc,
-                           &outBufDesc,
-                           &inargs,
-                           &outargs);
-
-        // after flush, discard remaining input bytes.
-        inBuffer[0] = nullptr;
-        inBufferSize[0] = 0;
+        ALOGV("encoderErr = %d, inargs.numInSamples = %d, mNextFrameTimestampUs = %lld", encoderErr,
+              inargs.numInSamples, mNextFrameTimestampUs->peekll());
     }
 
     if (inBufferSize[0] > 0) {
@@ -670,7 +633,6 @@
 
     (void)pool;
     mSentCodecSpecificData = false;
-    mInputSize = 0u;
     mNextFrameTimestampUs.reset();
     mLastFrameEndTimestampUs.reset();
 
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
index c79526c..328a5f6 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.h
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -21,6 +21,7 @@
 #include <optional>
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "aacenc_lib.h"
 
@@ -31,6 +32,8 @@
     class IntfImpl;
 
     C2SoftAacEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftAacEnc(const char *name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftAacEnc();
 
     // From SimpleC2Component
@@ -58,7 +61,6 @@
     UINT mOutBufferSize;
 
     bool mSentCodecSpecificData;
-    size_t mInputSize;
     std::optional<c2_cntr64_t> mNextFrameTimestampUs;
     std::optional<c2_cntr64_t> mLastFrameEndTimestampUs;
 
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index e92d38d..4310fc3 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -129,6 +129,12 @@
     mIsWide = true;
 #endif
 }
+C2SoftAmrDec::C2SoftAmrDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftAmrDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
 
 C2SoftAmrDec::~C2SoftAmrDec() {
     (void)onRelease();
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h
index afe1537..76d9ef3 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_AMR_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 #include "gsmamr_dec.h"
 #include "pvamrwbdecoder.h"
 
@@ -29,6 +30,8 @@
 
     C2SoftAmrDec(const char *name, c2_node_id_t id,
               const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftAmrDec(const char *name, c2_node_id_t id,
+              const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftAmrDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index 7afea91..3fab7e1 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -104,6 +104,11 @@
       mSidState(nullptr) {
 }
 
+C2SoftAmrNbEnc::C2SoftAmrNbEnc(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftAmrNbEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAmrNbEnc::~C2SoftAmrNbEnc() {
     onRelease();
 }
@@ -229,7 +234,7 @@
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
-    while (inPos < inSize) {
+    while (inPos < inSize || eos) {
         const uint8_t *inPtr = rView.data() + inOffset;
         int validSamples = mFilledLen / sizeof(int16_t);
         if ((inPos + (kNumBytesPerInputFrame - mFilledLen)) <= inSize) {
@@ -240,11 +245,10 @@
             memcpy(mInputFrame + validSamples, inPtr + inPos, (inSize - inPos));
             mFilledLen += (inSize - inPos);
             inPos += (inSize - inPos);
-            if (eos) {
+            if (eos && (mFilledLen > 0)) {
                 validSamples = mFilledLen / sizeof(int16_t);
                 memset(mInputFrame + validSamples, 0, (kNumBytesPerInputFrame - mFilledLen));
             } else break;
-
         }
         Frame_Type_3GPP frameType;
         int numEncBytes = AMREncode(mEncState, mSidState, mMode, mInputFrame,
@@ -278,7 +282,6 @@
     if (eos) {
         mSignalledOutputEos = true;
         ALOGV("signalled EOS");
-        if (mFilledLen) ALOGV("Discarding trailing %d bytes", mFilledLen);
     }
 }
 
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
index 4920b23..54cd1c9 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_AMR_NB_ENC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "gsmamr_enc.h"
 
@@ -28,6 +29,8 @@
     class IntfImpl;
     C2SoftAmrNbEnc(const char* name, c2_node_id_t id,
                    const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftAmrNbEnc(const char* name, c2_node_id_t id,
+                   const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftAmrNbEnc();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 29b1040..efbd22f 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -106,6 +106,11 @@
       mMemOperator(nullptr) {
 }
 
+C2SoftAmrWbEnc::C2SoftAmrWbEnc(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftAmrWbEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAmrWbEnc::~C2SoftAmrWbEnc() {
     onRelease();
 }
@@ -311,7 +316,7 @@
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
-    while (inPos < inSize) {
+    while (inPos < inSize || eos) {
         const uint8_t *inPtr = rView.data() + inOffset;
         int validSamples = mFilledLen / sizeof(int16_t);
         if ((inPos + (kNumBytesPerInputFrame - mFilledLen)) <= inSize) {
@@ -322,7 +327,7 @@
             memcpy(mInputFrame + validSamples, inPtr + inPos, (inSize - inPos));
             mFilledLen += (inSize - inPos);
             inPos += (inSize - inPos);
-            if (eos) {
+            if (eos && (mFilledLen > 0)) {
                 validSamples = mFilledLen / sizeof(int16_t);
                 memset(mInputFrame + validSamples, 0, (kNumBytesPerInputFrame - mFilledLen));
             } else break;
@@ -352,7 +357,6 @@
     if (eos) {
         mSignalledOutputEos = true;
         ALOGV("signalled EOS");
-        if (mFilledLen) ALOGV("Discarding trailing %d bytes", mFilledLen);
     }
 }
 
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
index 72990c3..6f82eb7 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
@@ -19,6 +19,7 @@
 
 #include <SimpleC2Component.h>
 
+#include "util/C2InterfaceHelper.h"
 #include "voAMRWB.h"
 
 namespace android {
@@ -28,6 +29,8 @@
     class IntfImpl;
     C2SoftAmrWbEnc(const char* name, c2_node_id_t id,
                    const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftAmrWbEnc(const char* name, c2_node_id_t id,
+                   const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftAmrWbEnc();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index 257cf4e..83066c7 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -12,6 +12,7 @@
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_all-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
     ],
 
     // coordinated with frameworks/av/media/codec2/components/gav1/Android.bp
@@ -29,6 +30,7 @@
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_all-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
     ],
 
     static_libs: ["libaom"],
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index 0eb47f4..f407cdd 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -264,6 +264,11 @@
     mTimeStart = mTimeEnd = systemTime();
 }
 
+C2SoftAomDec::C2SoftAomDec(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftAomDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAomDec::~C2SoftAomDec() {
     onRelease();
 }
diff --git a/media/codec2/components/aom/C2SoftAomDec.h b/media/codec2/components/aom/C2SoftAomDec.h
index 8b953fe..d7f10fc 100644
--- a/media/codec2/components/aom/C2SoftAomDec.h
+++ b/media/codec2/components/aom/C2SoftAomDec.h
@@ -20,6 +20,7 @@
 #include <inttypes.h>
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 #include "aom/aom_decoder.h"
 #include "aom/aomdx.h"
 
@@ -30,6 +31,8 @@
 
     C2SoftAomDec(const char* name, c2_node_id_t id,
                  const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftAomDec(const char* name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper>& intfImpl);
     virtual ~C2SoftAomDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 93009c4..c348ac3 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -397,6 +397,11 @@
     ALOGV("Constructor");
 }
 
+C2SoftAomEnc::C2SoftAomEnc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftAomEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAomEnc::~C2SoftAomEnc() {
     ALOGV("Destructor");
     onRelease();
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 067b04f..1fbac8b 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -34,6 +34,8 @@
     class IntfImpl;
 
     C2SoftAomEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftAomEnc(const char* name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper>& helper);
 
     // From SimpleC2Component
     c2_status_t onInit() override final;
diff --git a/media/codec2/components/apv/Android.bp b/media/codec2/components/apv/Android.bp
index f565978..ce37376 100644
--- a/media/codec2/components/apv/Android.bp
+++ b/media/codec2/components/apv/Android.bp
@@ -12,23 +12,26 @@
     name: "libcodec2_soft_apvenc",
     defaults: [
         "libcodec2_soft-defaults",
-        "libcodec2_soft_sanitize_signed-defaults",
         "libcodec2_soft_sanitize_cfi-defaults",
+        "libcodec2_soft_sanitize_signed-defaults",
     ],
 
     static_libs: [
-        "libopenapv",
         "android.media.swcodec.flags-aconfig-cc",
+        "libopenapv",
     ],
 
-    srcs: ["C2SoftApvEnc.cpp"],
+    srcs: [
+        "C2SoftApvEnc.cpp",
+        "isAtLeastRelease.cpp",
+    ],
 
     cflags: [
         "-DOAPV_STATIC_DEFINE",
-        "-Wno-unused-variable",
-        "-Wno-unused-parameter",
-        "-Wno-unused-function",
         "-Wno-reorder-ctor",
+        "-Wno-unused-function",
+        "-Wno-unused-parameter",
+        "-Wno-unused-variable",
     ],
 }
 
@@ -37,22 +40,25 @@
     name: "libcodec2_soft_apvdec",
     defaults: [
         "libcodec2_soft-defaults",
-        "libcodec2_soft_sanitize_signed-defaults",
         "libcodec2_soft_sanitize_cfi-defaults",
+        "libcodec2_soft_sanitize_signed-defaults",
     ],
 
     static_libs: [
-        "libopenapv",
         "android.media.swcodec.flags-aconfig-cc",
+        "libopenapv",
     ],
 
-    srcs: ["C2SoftApvDec.cpp"],
+    srcs: [
+        "C2SoftApvDec.cpp",
+        "isAtLeastRelease.cpp",
+    ],
 
     cflags: [
         "-DOAPV_STATIC_DEFINE",
-        "-Wno-unused-variable",
-        "-Wno-unused-parameter",
-        "-Wno-unused-function",
         "-Wno-reorder-ctor",
+        "-Wno-unused-function",
+        "-Wno-unused-parameter",
+        "-Wno-unused-variable",
     ],
 }
diff --git a/media/codec2/components/apv/C2SoftApvDec.cpp b/media/codec2/components/apv/C2SoftApvDec.cpp
index 77305ce..4c69b11 100644
--- a/media/codec2/components/apv/C2SoftApvDec.cpp
+++ b/media/codec2/components/apv/C2SoftApvDec.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -30,6 +30,7 @@
 #include <Codec2Mapper.h>
 #include <SimpleC2Interface.h>
 #include "C2SoftApvDec.h"
+#include "isAtLeastRelease.h"
 
 #include <cutils/properties.h>
 
@@ -37,6 +38,8 @@
 
 #define MAX_NUM_FRMS (1)  // supports only 1-frame output
 #define FRM_IDX (0)       // supports only 1-frame output
+#define MAX_SUPPORTED_WIDTH (4096)
+#define MAX_SUPPORTED_HEIGHT (4096)
 // check generic frame or not
 #define IS_NON_AUX_FRM(frm)                              \
     (((frm)->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) || \
@@ -45,6 +48,7 @@
 #define IS_AUX_FRM(frm) (!(IS_NON_AUX_FRM(frm)))
 #define OUTPUT_CSP_NATIVE (0)
 #define OUTPUT_CSP_P210 (1)
+#define CLIP3(min, v, max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
 
 namespace android {
 namespace {
@@ -52,6 +56,7 @@
 constexpr uint32_t kDefaultOutputDelay = 8;
 constexpr uint32_t kMaxOutputDelay = 16;
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+constexpr int32_t kDefaultSoftApvDecNumThreads = 1;
 }  // namespace
 
 class C2SoftApvDec::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -73,8 +78,8 @@
         addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                              .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
                              .withFields({
-                                     C2F(mSize, width).inRange(2, 4096),
-                                     C2F(mSize, height).inRange(2, 4096),
+                                     C2F(mSize, width).inRange(2, MAX_SUPPORTED_WIDTH),
+                                     C2F(mSize, height).inRange(2, MAX_SUPPORTED_HEIGHT),
                              })
                              .withSetter(SizeSetter)
                              .build());
@@ -282,6 +287,9 @@
         if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
             pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
         }
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_RGBA_1010102)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
+        }
 
         // If color format surface isn't added to supported formats, there is no way to know
         // when the color-format is configured to surface. This is necessary to be able to
@@ -495,20 +503,23 @@
                            const std::shared_ptr<IntfImpl>& intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mDecHandle(nullptr),
       mOutBufferFlush(nullptr),
-      mIvColorformat(IV_YUV_420P),
       mOutputDelay(kDefaultOutputDelay),
-      mHeaderDecoded(false),
       mOutIndex(0u),
       mHalPixelFormat(HAL_PIXEL_FORMAT_YV12),
       mWidth(320),
       mHeight(240),
       mSignalledOutputEos(false),
-      mSignalledError(false) {
-    oapvdHandle = NULL;
-    oapvmHandle = NULL;
-    outputCsp = OUTPUT_CSP_NATIVE;
+      mSignalledError(false),
+      mDecHandle(nullptr),
+      mMetadataHandle(nullptr),
+      mOutCsp(OUTPUT_CSP_P210) {
+    memset(&mOutFrames, 0, sizeof(oapv_frms_t));
+}
+
+C2SoftApvDec::C2SoftApvDec(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftApvDec(name, id, std::make_shared<IntfImpl>(helper)) {
 }
 
 C2SoftApvDec::~C2SoftApvDec() {
@@ -535,18 +546,18 @@
 
 status_t C2SoftApvDec::deleteDecoder() {
     ALOGV("%s", __FUNCTION__);
-    if (oapvdHandle) {
-        oapvd_delete(oapvdHandle);
-        oapvdHandle = NULL;
+    if (mDecHandle) {
+        oapvd_delete(mDecHandle);
+        mDecHandle = nullptr;
     }
-    if (oapvmHandle) {
-        oapvm_delete(oapvmHandle);
-        oapvmHandle = NULL;
+    if (mMetadataHandle) {
+        oapvm_delete(mMetadataHandle);
+        mMetadataHandle = nullptr;
     }
-    for (int i = 0; i < ofrms.num_frms; i++) {
-        if (ofrms.frm[i].imgb != NULL) {
-            ofrms.frm[i].imgb->release(ofrms.frm[i].imgb);
-            ofrms.frm[i].imgb = NULL;
+    for (int i = 0; i < mOutFrames.num_frms; i++) {
+        if (mOutFrames.frm[i].imgb != NULL) {
+            mOutFrames.frm[i].imgb->release(mOutFrames.frm[i].imgb);
+            mOutFrames.frm[i].imgb = NULL;
         }
     }
     return OK;
@@ -587,22 +598,23 @@
         mPixelFormatInfo = mIntf->getPixelFormat_l();
         ALOGW("Hal pixel format = %d", mPixelFormatInfo->value);
     }
-    memset(&cdesc, 0, sizeof(oapvd_cdesc_t));
 
-    cdesc.threads = 1;  // default
-    oapvdHandle = oapvd_create(&cdesc, &ret);
-    if (oapvdHandle == NULL) {
+    oapvd_cdesc_t cdesc;
+    memset(&cdesc, 0, sizeof(oapvd_cdesc_t));
+    cdesc.threads = kDefaultSoftApvDecNumThreads;
+    mDecHandle = oapvd_create(&cdesc, &ret);
+    if (mDecHandle == nullptr) {
         ALOGE("ERROR: cannot create APV decoder (err=%d)\n", ret);
         return C2_NO_INIT;
     }
 
-    memset(&ofrms, 0, sizeof(oapv_frms_t));
+    memset(&mOutFrames, 0, sizeof(oapv_frms_t));
 
-    oapvmHandle = oapvm_create(&ret);
+    mMetadataHandle = oapvm_create(&ret);
     if (OAPV_FAILED(ret)) {
         ALOGE("oapvm create failed");
-        oapvd_delete(oapvdHandle);
-        oapvdHandle = NULL;
+        oapvd_delete(mDecHandle);
+        mDecHandle = nullptr;
         return C2_NO_INIT;
     }
 
@@ -881,14 +893,17 @@
                                      size_t dstVStride, size_t width, size_t height) {
     for (size_t i = 0; i < height; ++i) {
         for (size_t j = 0; j < width; ++j) {
-            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 8) & 0xFF;
+            dstY[i * dstYStride + j] = (uint8_t)CLIP3(0, 255,
+                            ((((int)srcY[i * srcYStride + j] >> 6) * 255.0) / 1023.0 + 0.5));
         }
     }
 
     for (size_t i = 0; i < height / 2; ++i) {
         for (size_t j = 0; j < width / 2; ++j) {
-            dstV[i * dstVStride + j] = (srcUV[i * srcUVStride * 2 + j * 2] >> 8) & 0xFF;
-            dstU[i * dstUStride + j] = (srcUV[i * srcUVStride * 2 + j * 2 + 1] >> 8) & 0xFF;
+            dstU[i * dstVStride + j] = (uint8_t)CLIP3(0, 255,
+                ((((int)srcUV[i * srcUVStride * 2 + j * 2] >> 6) * 255.0) / 1023.0 + 0.5));
+            dstV[i * dstUStride + j] = (uint8_t)CLIP3(0, 255,
+                ((((int)srcUV[i * srcUVStride * 2 + j * 2 + 1] >> 6) * 255.0) / 1023.0 + 0.5));
         }
     }
 }
@@ -940,23 +955,32 @@
 
         if (OAPV_FAILED(oapvd_info(bitb.addr, bitb.ssize, &aui))) {
             ALOGE("cannot get information from bitstream");
+            work->result = C2_CORRUPTED;
             return;
         }
 
         /* create decoding frame buffers */
-        ofrms.num_frms = aui.num_frms;
-        if (ofrms.num_frms <= 0) {
-            ALOGE("Parse error - no output frame(%d)", ofrms.num_frms);
+        mOutFrames.num_frms = aui.num_frms;
+        if (mOutFrames.num_frms <= 0) {
+            ALOGE("Parse error - no output frame(%d)", mOutFrames.num_frms);
             fillEmptyWork(work);
             return;
         }
-        for (int i = 0; i < ofrms.num_frms; i++) {
-            oapv_frm_info_t* finfo = &aui.frm_info[FRM_IDX];
-            oapv_frm_t* frm = &ofrms.frm[i];
+        bool reportResolutionChange = false;
+        for (int i = 0; i < mOutFrames.num_frms; i++) {
+            oapv_frm_info_t* finfo = &aui.frm_info[i];
+            oapv_frm_t* frm = &mOutFrames.frm[i];
 
-            if (mWidth != finfo->w || mHeight != finfo->w) {
+            if (mWidth != finfo->w || mHeight != finfo->h) {
                 mWidth = finfo->w;
                 mHeight = finfo->h;
+                reportResolutionChange = true;
+            }
+
+            if (mWidth > MAX_SUPPORTED_WIDTH || mHeight > MAX_SUPPORTED_HEIGHT) {
+                ALOGE("Stream resolution of %dx%d is not supported.", mWidth, mHeight);
+                work->result = C2_CORRUPTED;
+                return;
             }
 
             if (frm->imgb != NULL && (frm->imgb->w[0] != finfo->w || frm->imgb->h[0] != finfo->h)) {
@@ -965,7 +989,7 @@
             }
 
             if (frm->imgb == NULL) {
-                if (outputCsp == OUTPUT_CSP_P210) {
+                if (mOutCsp == OUTPUT_CSP_P210) {
                     frm->imgb = imgb_create(finfo->w, finfo->h, OAPV_CS_P210);
                 } else {
                     frm->imgb = imgb_create(finfo->w, finfo->h, finfo->cs);
@@ -979,8 +1003,24 @@
             }
         }
 
+        if (reportResolutionChange) {
+            C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+            if (err == C2_OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(size));
+            } else {
+                ALOGE("Config update size failed");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+
         oapvd_stat_t stat;
-        ret = oapvd_decode(oapvdHandle, &bitb, &ofrms, oapvmHandle, &stat);
+        ret = oapvd_decode(mDecHandle, &bitb, &mOutFrames, mMetadataHandle, &stat);
         if (bitb.ssize != stat.read) {
             ALOGW("decode done, input size: %d, processed size: %d", bitb.ssize, stat.read);
         }
@@ -991,6 +1031,18 @@
             return;
         }
 
+        for(int i = 0; i < stat.aui.num_frms; i++) {
+            oapv_frm_info_t* finfo = &stat.aui.frm_info[i];
+            if(finfo->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) {
+                if(finfo->color_description_present_flag > 0) {
+                    vuiColorAspects.primaries = finfo->color_primaries;
+                    vuiColorAspects.transfer = finfo->transfer_characteristics;
+                    vuiColorAspects.coeffs = finfo->matrix_coefficients;
+                    vuiColorAspects.fullRange = finfo->full_range_flag;
+                }
+            }
+        }
+
         status_t err = outputBuffer(pool, work);
         if (err == NOT_ENOUGH_DATA) {
             if (inSize > 0) {
@@ -1014,13 +1066,7 @@
     }
 }
 
-void C2SoftApvDec::getVuiParams(VuiColorAspects* buffer) {
-    VuiColorAspects vuiColorAspects;
-    vuiColorAspects.primaries = buffer->primaries;
-    vuiColorAspects.transfer = buffer->transfer;
-    vuiColorAspects.coeffs = buffer->coeffs;
-    vuiColorAspects.fullRange = buffer->fullRange;
-
+void C2SoftApvDec::getVuiParams(const std::unique_ptr<C2Work> &work) {
     // convert vui aspects to C2 values if changed
     if (!(vuiColorAspects == mBitstreamColorAspects)) {
         mBitstreamColorAspects = vuiColorAspects;
@@ -1045,7 +1091,165 @@
                 codedAspects.primaries, codedAspects.transfer, codedAspects.matrix,
                 codedAspects.range);
         std::vector<std::unique_ptr<C2SettingResult>> failures;
-        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+        c2_status_t err = mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(
+              C2Param::Copy(codedAspects));
+        } else {
+            ALOGE("Config update colorAspect failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+}
+
+void C2SoftApvDec::getHDRStaticParams(const struct ApvHdrInfo *buffer,
+                                       const std::unique_ptr<C2Work> &work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if(buffer->has_hdr_mdcv) {
+        ALOGV("has hdr mdcv");
+        // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
+        hdrStaticMetadataInfo.mastering.red.x =
+            buffer->hdr_mdcv.primary_chromaticity_x[0] / 50000.0;
+        hdrStaticMetadataInfo.mastering.red.y =
+            buffer->hdr_mdcv.primary_chromaticity_y[0] / 50000.0;
+        hdrStaticMetadataInfo.mastering.green.x =
+            buffer->hdr_mdcv.primary_chromaticity_x[1] / 50000.0;
+        hdrStaticMetadataInfo.mastering.green.y =
+            buffer->hdr_mdcv.primary_chromaticity_y[1] / 50000.0;
+        hdrStaticMetadataInfo.mastering.blue.x =
+            buffer->hdr_mdcv.primary_chromaticity_x[2] / 50000.0;
+        hdrStaticMetadataInfo.mastering.blue.y =
+            buffer->hdr_mdcv.primary_chromaticity_y[2] / 50000.0;
+
+        // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
+        hdrStaticMetadataInfo.mastering.white.x =
+            buffer->hdr_mdcv.white_point_chromaticity_x / 50000.0;
+        hdrStaticMetadataInfo.mastering.white.y =
+            buffer->hdr_mdcv.white_point_chromaticity_y / 50000.0;
+
+        // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
+        hdrStaticMetadataInfo.mastering.maxLuminance =
+            buffer->hdr_mdcv.max_mastering_luminance / 10000.0;
+        // hdr_mdcv.luminance_min is in 18.14 format.
+        hdrStaticMetadataInfo.mastering.minLuminance =
+            buffer->hdr_mdcv.min_mastering_luminance / 10000.0;
+        infoPresent = true;
+    }
+
+    if(buffer->has_hdr_cll) {
+        ALOGV("has hdr cll");
+        hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
+        hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
+        infoPresent = true;
+    }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftApvDec::getHDR10PlusInfoData(const struct ApvHdrInfo *buffer,
+                                         const std::unique_ptr<C2Work> &work) {
+    if(!buffer->has_itut_t35) {
+        ALOGV("no itu_t_t35 data");
+        return;
+    }
+
+    std::vector<uint8_t> payload;
+    size_t payloadSize = buffer->itut_t35.payload_size;
+    if (payloadSize > 0) {
+        payload.push_back(buffer->itut_t35.country_code);
+        if (buffer->itut_t35.country_code == 0xFF) {
+            payload.push_back(buffer->itut_t35.country_code_extension_byte);
+        }
+        payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
+                    buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
+    }
+
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+            C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+    if (!hdr10PlusInfo) {
+        ALOGE("Hdr10PlusInfo allocation failed");
+        mSignalledError = true;
+        work->result = C2_NO_MEMORY;
+        return;
+    }
+    memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+    // config if hdr10Plus info has changed
+    if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+        mHdr10PlusInfo = std::move(hdr10PlusInfo);
+        work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+    }
+}
+
+void C2SoftApvDec::getHdrInfo(struct ApvHdrInfo *hdrInfo, int groupId) {
+    void *pld;
+    int size;
+
+    int ret = oapvm_get(mMetadataHandle, groupId, OAPV_METADATA_MDCV, &pld, &size, nullptr);
+    if(ret == OAPV_OK) {
+        if(size < sizeof(struct ApvHdrInfo::HdrMdcv)) {
+            ALOGW("metadata_mdcv size is smaller than expected");
+            return;
+        }
+        unsigned char *data = (unsigned char *)pld;
+        hdrInfo->has_hdr_mdcv = true;
+        for(int i = 0; i < 3; i++) {
+            hdrInfo->hdr_mdcv.primary_chromaticity_x[i] = (*data++) << 8;
+            hdrInfo->hdr_mdcv.primary_chromaticity_x[i] |= (*data++);
+            hdrInfo->hdr_mdcv.primary_chromaticity_y[i] = (*data++) << 8;
+            hdrInfo->hdr_mdcv.primary_chromaticity_y[i] |= (*data++);
+        }
+        hdrInfo->hdr_mdcv.white_point_chromaticity_x = (*data++) << 8;
+        hdrInfo->hdr_mdcv.white_point_chromaticity_x |= (*data++);
+        hdrInfo->hdr_mdcv.white_point_chromaticity_y = (*data++) << 8;
+        hdrInfo->hdr_mdcv.white_point_chromaticity_y |= (*data++);
+        hdrInfo->hdr_mdcv.max_mastering_luminance =  (*data++) << 24;
+        hdrInfo->hdr_mdcv.max_mastering_luminance |= (*data++) << 16;
+        hdrInfo->hdr_mdcv.max_mastering_luminance |= (*data++) << 8;
+        hdrInfo->hdr_mdcv.max_mastering_luminance |= (*data++);
+        hdrInfo->hdr_mdcv.min_mastering_luminance =  (*data++) << 24;
+        hdrInfo->hdr_mdcv.min_mastering_luminance |= (*data++) << 16;
+        hdrInfo->hdr_mdcv.min_mastering_luminance |= (*data++) << 8;
+        hdrInfo->hdr_mdcv.min_mastering_luminance |= (*data);
+    }
+
+    ret = oapvm_get(mMetadataHandle, groupId, OAPV_METADATA_CLL, &pld, &size, nullptr);
+    if(ret == OAPV_OK) {
+        if(size < sizeof(struct ApvHdrInfo::HdrCll)) {
+            ALOGW("metadata_cll size is smaller than expected");
+            return;
+        }
+        unsigned char *data = (unsigned char *)pld;
+        hdrInfo->has_hdr_cll = true;
+        hdrInfo->hdr_cll.max_cll =  (*data++) << 8;
+        hdrInfo->hdr_cll.max_cll |= (*data++);
+        hdrInfo->hdr_cll.max_fall =  (*data++) << 8;
+        hdrInfo->hdr_cll.max_fall |= (*data);
+    }
+
+    ret = oapvm_get(mMetadataHandle, groupId, OAPV_METADATA_ITU_T_T35, &pld, &size, nullptr);
+    if(ret == OAPV_OK) {
+        char *data = (char *)pld;
+        hdrInfo->has_itut_t35 = true;
+        int readSize = size;
+        hdrInfo->itut_t35.country_code = *data++;
+        readSize--;
+        if(hdrInfo->itut_t35.country_code == 0xFF) {
+            hdrInfo->itut_t35.country_code_extension_byte = *data++;
+            readSize--;
+        }
+        hdrInfo->itut_t35.payload_bytes = data;
+        hdrInfo->itut_t35.payload_size = readSize;
     }
 }
 
@@ -1054,13 +1258,15 @@
     if (!(work && pool)) return BAD_VALUE;
 
     oapv_imgb_t* imgbOutput = nullptr;
+    int groupId = -1;
     std::shared_ptr<C2GraphicBlock> block;
 
-    if (ofrms.num_frms > 0) {
-        for(int i = 0; i < ofrms.num_frms; i++) {
-            oapv_frm_t* frm = &ofrms.frm[0];
+    if (mOutFrames.num_frms > 0) {
+        for(int i = 0; i < mOutFrames.num_frms; i++) {
+            oapv_frm_t* frm = &mOutFrames.frm[i];
             if(frm->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) {
                 imgbOutput = frm->imgb;
+                groupId = frm->group_id;
                 break;
             }
         }
@@ -1074,28 +1280,33 @@
     }
     bool isMonochrome = OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CS_YCBCR400;
 
-    // TODO: use bitstream color aspect after vui parsing
-    VuiColorAspects colorAspect;
-    colorAspect.primaries = 2;
-    colorAspect.transfer = 2;
-    colorAspect.coeffs = 2;
-    colorAspect.fullRange = 1;
-    getVuiParams(&colorAspect);
+    getVuiParams(work);
+    struct ApvHdrInfo hdrInfo = {};
+    getHdrInfo(&hdrInfo, groupId);
+    getHDRStaticParams(&hdrInfo, work);
+    getHDR10PlusInfoData(&hdrInfo, work);
+
+    if (mSignalledError) {
+        // 'work' should already have signalled error at this point
+        return C2_CORRUPTED;
+    }
 
     uint32_t format = HAL_PIXEL_FORMAT_YV12;
     std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
-    if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10 &&
-        mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
-        IntfImpl::Lock lock = mIntf->lock();
-        codedColorAspects = mIntf->getColorAspects_l();
-
-        bool allowRGBA1010102 = false;
-        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
-            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
-            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
-            allowRGBA1010102 = true;
+    if (mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+            format = AHARDWAREBUFFER_FORMAT_YCbCr_P210;
+        } else if (isHalPixelFormatSupported(
+                        (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            format = HAL_PIXEL_FORMAT_YCBCR_P010;
+        } else if (isHalPixelFormatSupported(
+                        (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_RGBA_1010102)) {
+            IntfImpl::Lock lock = mIntf->lock();
+            codedColorAspects = mIntf->getColorAspects_l();
+            format = HAL_PIXEL_FORMAT_RGBA_1010102;
+        } else {
+            format = HAL_PIXEL_FORMAT_YV12;
         }
-        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
     }
 
     if (mHalPixelFormat != format) {
@@ -1145,7 +1356,28 @@
     size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
     size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-    if(format == AHARDWAREBUFFER_FORMAT_YCbCr_P210) {
+    if(format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+        if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+            const uint16_t* srcU = (const uint16_t*)imgbOutput->a[1];
+            const uint16_t* srcV = (const uint16_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcUStride = imgbOutput->s[1] / 2;
+            size_t srcVStride = imgbOutput->s[2] / 2;
+            dstYStride /= 4;
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+                ALOGV("OAPV_CS_P210 to RGBA1010102");
+                convertP210ToRGBA1010102((uint32_t *)dstY, srcY, srcU, srcYStride, srcUStride,
+                                           dstYStride, mWidth, mHeight, codedColorAspects);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else {
+            ALOGE("Not supported convert from bitdepth:%d, format: %d, to format: %d",
+                  OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                  format);
+        }
+    } else if(format == AHARDWAREBUFFER_FORMAT_YCbCr_P210) {
         if(OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
             const uint16_t *srcY = (const uint16_t *)imgbOutput->a[0];
             const uint16_t *srcU = (const uint16_t *)imgbOutput->a[1];
@@ -1215,63 +1447,79 @@
                                      : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
         }
     } else {  // HAL_PIXEL_FORMAT_YV12
-        if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
-            const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
-            const uint16_t* srcV = (const uint16_t*)imgbOutput->a[1];
-            const uint16_t* srcU = (const uint16_t*)imgbOutput->a[2];
-            size_t srcYStride = imgbOutput->s[0] / 2;
-            size_t srcVStride = imgbOutput->s[1] / 2;
-            size_t srcUStride = imgbOutput->s[2] / 2;
-            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
-                ALOGV("OAPV_CS_YUV420 10bit to YV12");
-                copyBufferFromYUV42010bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
-                                                srcUStride, srcVStride, dstYStride, dstUStride,
-                                                dstVStride, mWidth, mHeight);
-            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
-                ALOGV("OAPV_CS_YUV422 10bit to YV12");
-                copyBufferFromYUV42210bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
-                                                srcUStride, srcVStride, dstYStride, dstUStride,
-                                                dstVStride, mWidth, mHeight);
-            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
-                ALOGV("OAPV_CS_P210 to YV12");
-                copyBufferFromP210ToYV12(dstY, dstU, dstV, srcY, srcV, srcYStride, srcVStride,
-                                         dstYStride, dstUStride, dstVStride, mWidth, mHeight);
-            } else {
-                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
-            }
-        } else if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 8) {
-            const uint8_t* srcY = (const uint8_t*)imgbOutput->a[0];
-            const uint8_t* srcV = (const uint8_t*)imgbOutput->a[1];
-            const uint8_t* srcU = (const uint8_t*)imgbOutput->a[2];
-            size_t srcYStride = imgbOutput->s[0];
-            size_t srcVStride = imgbOutput->s[1];
-            size_t srcUStride = imgbOutput->s[2];
-            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
-                ALOGV("OAPV_CS_YUV420 to YV12");
-                copyBufferFromYUV420ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
-                                           srcUStride, srcVStride, dstYStride, dstUStride,
-                                           dstVStride, mWidth, mHeight);
-            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
-                ALOGV("OAPV_CS_YUV422 to YV12");
-                copyBufferFromYUV422ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
-                                           srcUStride, srcVStride, dstYStride, dstUStride,
-                                           dstVStride, mWidth, mHeight);
-            } else {
-                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
-            }
+        if (!IsI420(wView)) {
+            ALOGE("Only P210 to I420 conversion is supported.");
         } else {
-            ALOGE("Not supported convert from bd:%d, format: %d(%s), to format: %d(%s)",
-                  OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
-                  OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
-                          ? "YUV420"
-                          : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
-                                                                                    : "UNKNOWN"),
-                  format,
-                  format == HAL_PIXEL_FORMAT_YCBCR_P010
-                          ? "P010"
-                          : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
-                                     ? "YUV420"
-                                     : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+            if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+                const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+                const uint16_t* srcV = (const uint16_t*)imgbOutput->a[1];
+                const uint16_t* srcU = (const uint16_t*)imgbOutput->a[2];
+                size_t srcYStride = imgbOutput->s[0] / 2;
+                size_t srcVStride = imgbOutput->s[1] / 2;
+                size_t srcUStride = imgbOutput->s[2] / 2;
+                if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                    ALOGV("OAPV_CS_YUV420 10bit to YV12");
+                    copyBufferFromYUV42010bitToYV12(
+                        dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                        srcVStride, dstYStride, dstUStride, dstVStride, mWidth,
+                        mHeight);
+                } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                    ALOGV("OAPV_CS_YUV422 10bit to YV12");
+                    copyBufferFromYUV42210bitToYV12(
+                        dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                        srcVStride, dstYStride, dstUStride, dstVStride, mWidth,
+                        mHeight);
+                } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+                    ALOGV("OAPV_CS_P210 to YV12");
+                    copyBufferFromP210ToYV12(dstY, dstU, dstV, srcY, srcV, srcYStride,
+                                       srcVStride, dstYStride, dstUStride,
+                                       dstVStride, mWidth, mHeight);
+                } else {
+                    ALOGE("Not supported convert format : %d",
+                            OAPV_CS_GET_FORMAT(imgbOutput->cs));
+                }
+            } else if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 8) {
+                const uint8_t* srcY = (const uint8_t*)imgbOutput->a[0];
+                const uint8_t* srcV = (const uint8_t*)imgbOutput->a[1];
+                const uint8_t* srcU = (const uint8_t*)imgbOutput->a[2];
+                size_t srcYStride = imgbOutput->s[0];
+                size_t srcVStride = imgbOutput->s[1];
+                size_t srcUStride = imgbOutput->s[2];
+                if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                    ALOGV("OAPV_CS_YUV420 to YV12");
+                    copyBufferFromYUV420ToYV12(dstY, dstU, dstV, srcY, srcU, srcV,
+                                         srcYStride, srcUStride, srcVStride,
+                                         dstYStride, dstUStride, dstVStride,
+                                         mWidth, mHeight);
+                } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                    ALOGV("OAPV_CS_YUV422 to YV12");
+                    copyBufferFromYUV422ToYV12(dstY, dstU, dstV, srcY, srcU, srcV,
+                                         srcYStride, srcUStride, srcVStride,
+                                         dstYStride, dstUStride, dstVStride,
+                                         mWidth, mHeight);
+                } else {
+                    ALOGE("Not supported convert format : %d",
+                        OAPV_CS_GET_FORMAT(imgbOutput->cs));
+                }
+            } else {
+                ALOGE(
+                "Not supported convert from bd:%d, format: %d(%s), to format: "
+                    "%d(%s)",
+                    OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs),
+                    OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                    OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+                        ? "YUV420"
+                        : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422
+                           ? "YUV422"
+                           : "UNKNOWN"),
+                    format,
+                    format == HAL_PIXEL_FORMAT_YCBCR_P010
+                        ? "P010"
+                        : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+                           ? "YUV420"
+                           : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12"
+                                                              : "UNKNOWN")));
+            }
         }
     }
 
@@ -1340,6 +1588,13 @@
         ALOGV("APV SW Codec is not enabled");
         return nullptr;
     }
+
+    bool enabled = isAtLeastRelease(36, "Baklava");
+    ALOGD("isAtLeastRelease(36, Baklava) says enable: %s", enabled ? "yes" : "no");
+    if (!enabled) {
+        return nullptr;
+    }
+
     return new ::android::C2SoftApvDecFactory();
 }
 
diff --git a/media/codec2/components/apv/C2SoftApvDec.h b/media/codec2/components/apv/C2SoftApvDec.h
index 05afdb2..128e9eb 100644
--- a/media/codec2/components/apv/C2SoftApvDec.h
+++ b/media/codec2/components/apv/C2SoftApvDec.h
@@ -20,56 +20,21 @@
 #include <media/stagefright/foundation/ColorUtils.h>
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 #include <inttypes.h>
 #include <atomic>
 
 #include "oapv.h"
 #include <C2SoftApvCommon.h>
 
-typedef unsigned int UWORD32;
-
-typedef enum {
-    IV_CHROMA_NA = 0xFFFFFFFF,
-    IV_YUV_420P = 0x1,
-    IV_YUV_422P = 0x2,
-    IV_420_UV_INTL = 0x3,
-    IV_YUV_422IBE = 0x4,
-    IV_YUV_422ILE = 0x5,
-    IV_YUV_444P = 0x6,
-    IV_YUV_411P = 0x7,
-    IV_GRAY = 0x8,
-    IV_RGB_565 = 0x9,
-    IV_RGB_24 = 0xa,
-    IV_YUV_420SP_UV = 0xb,
-    IV_YUV_420SP_VU = 0xc,
-    IV_YUV_422SP_UV = 0xd,
-    IV_YUV_422SP_VU = 0xe
-
-} IV_COLOR_FORMAT_T;
-
-typedef struct {
-    /**
-     * u4_size of the structure
-     */
-    UWORD32 u4_size;
-
-    /**
-     * Pointer to the API function pointer table of the codec
-     */
-    void* pv_fxns;
-
-    /**
-     * Pointer to the handle of the codec
-     */
-    void* pv_codec_handle;
-} iv_obj_t;
-
 namespace android {
 
 struct C2SoftApvDec final : public SimpleC2Component {
     class IntfImpl;
 
     C2SoftApvDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftApvDec(const char* name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftApvDec();
 
     // From SimpleC2Component
@@ -102,11 +67,8 @@
                           const std::unique_ptr<C2Work>& work);
 
     std::shared_ptr<IntfImpl> mIntf;
-    iv_obj_t* mDecHandle;
-    uint8_t* mOutBufferFlush;
-    IV_COLOR_FORMAT_T mIvColorformat;
+    uint8_t *mOutBufferFlush;
     uint32_t mOutputDelay;
-    bool mHeaderDecoded;
     std::atomic_uint64_t mOutIndex;
     std::shared_ptr<C2GraphicBlock> mOutBlock;
 
@@ -118,6 +80,10 @@
     uint32_t mHeight;
     bool mSignalledOutputEos;
     bool mSignalledError;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
     // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
     // converting them to C2 values for each frame
     struct VuiColorAspects {
@@ -133,20 +99,60 @@
             coeffs(C2Color::MATRIX_UNSPECIFIED),
             fullRange(C2Color::RANGE_UNSPECIFIED) { }
 
-        bool operator==(const VuiColorAspects &o) {
+        bool operator==(const VuiColorAspects &o) const {
             return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
                 && fullRange == o.fullRange;
         }
     } mBitstreamColorAspects;
+    struct VuiColorAspects vuiColorAspects;
 
-    oapvd_t oapvdHandle;
-    oapvm_t oapvmHandle;
-    oapvd_cdesc_t cdesc;
-    oapv_frms_t ofrms;
+    // HDR info that can be carried in APV bitstream
+    // Section 10.3.1 of APV syntax https://www.ietf.org/archive/id/draft-lim-apv-02.html
+    struct ApvHdrInfo {
+        bool has_hdr_mdcv;
+        bool has_itut_t35;
+        bool has_hdr_cll;
 
-    int outputCsp;
+        ApvHdrInfo()
+            : has_hdr_mdcv(false),
+            has_itut_t35(false),
+            has_hdr_cll(false) { }
 
-    void getVuiParams(VuiColorAspects* buffer);
+        // Master Display Color Volume
+        struct HdrMdcv {
+            uint16_t primary_chromaticity_x[3];
+            uint16_t primary_chromaticity_y[3];
+            uint16_t white_point_chromaticity_x;
+            uint16_t white_point_chromaticity_y;
+            uint32_t max_mastering_luminance;
+            uint32_t min_mastering_luminance;
+        } hdr_mdcv;
+
+        // Content Light Level info
+        struct HdrCll {
+            uint16_t max_cll;
+            uint16_t max_fall;
+        } hdr_cll;
+
+        // ITU-T35 info
+        struct ItutT35 {
+            char country_code;
+            char country_code_extension_byte;
+            char *payload_bytes;
+            int payload_size;
+        } itut_t35;
+    };
+
+    oapvd_t mDecHandle;
+    oapvm_t mMetadataHandle;
+    oapv_frms_t mOutFrames;
+
+    int mOutCsp;
+
+    void getVuiParams(const std::unique_ptr<C2Work> &work);
+    void getHdrInfo(struct ApvHdrInfo *buffer, int id);
+    void getHDRStaticParams(const struct ApvHdrInfo *buffer, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(const struct ApvHdrInfo *buffer, const std::unique_ptr<C2Work>& work);
 
     C2_DO_NOT_COPY(C2SoftApvDec);
 };
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index 9036df1..9a258dc 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -36,6 +36,7 @@
 #include <util/C2InterfaceHelper.h>
 #include <cmath>
 #include "C2SoftApvEnc.h"
+#include "isAtLeastRelease.h"
 
 namespace android {
 
@@ -85,17 +86,6 @@
                              .withSetter(SizeSetter)
                              .build());
 
-        // matches limits in codec library
-        addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
-                             .withDefault(new C2StreamBitrateModeTuning::output(
-                                     0u, C2Config::BITRATE_VARIABLE))
-                             .withFields({C2F(mBitrateMode, value)
-                                                  .oneOf({C2Config::BITRATE_CONST,
-                                                          C2Config::BITRATE_VARIABLE,
-                                                          C2Config::BITRATE_IGNORE})})
-                             .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
-                             .build());
-
         addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
                              .withDefault(new C2StreamBitrateInfo::output(0u, 512000))
                              .withFields({C2F(mBitrate, value).inRange(512000, 240000000)})
@@ -718,25 +708,6 @@
         return bandIdc;
     }
 
-    int32_t getBitrateMode_l() const {
-        int32_t bitrateMode = C2Config::BITRATE_CONST;
-
-        switch (mBitrateMode->value) {
-            case C2Config::BITRATE_CONST:
-                bitrateMode = OAPV_RC_CQP;
-                break;
-            case C2Config::BITRATE_VARIABLE:
-                bitrateMode = OAPV_RC_ABR;
-                break;
-            case C2Config::BITRATE_IGNORE:
-                bitrateMode = 0;
-                break;
-            default:
-                ALOGE("Unrecognized bitrate mode: %x", mBitrateMode->value);
-        }
-        return bitrateMode;
-    }
-
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
     std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
@@ -763,7 +734,6 @@
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
-    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamQualityTuning::output> mQuality;
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
@@ -781,7 +751,12 @@
       mSignalledEos(false),
       mSignalledError(false),
       mOutBlock(nullptr) {
-    reset();
+    resetEncoder();
+}
+
+C2SoftApvEnc::C2SoftApvEnc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftApvEnc(name, id, std::make_shared<IntfImpl>(helper)) {
 }
 
 C2SoftApvEnc::~C2SoftApvEnc() {
@@ -798,7 +773,7 @@
 
 void C2SoftApvEnc::onReset() {
     releaseEncoder();
-    reset();
+    resetEncoder();
 }
 
 void C2SoftApvEnc::onRelease() {
@@ -828,7 +803,7 @@
     return qp;
 }
 
-c2_status_t C2SoftApvEnc::reset() {
+c2_status_t C2SoftApvEnc::resetEncoder() {
     ALOGV("reset");
     mInitEncoder = false;
     mStarted = false;
@@ -968,18 +943,42 @@
     param.fps_num = (int)(mFrameRate->value * 100);
     param.fps_den = 100;
     param.bitrate = (int)(mBitrate->value / 1000);
-    param.rc_type = mIntf->getBitrateMode_l();
+    param.rc_type = OAPV_RC_ABR;
 
-    int ApvQP = kApvDefaultQP;
-    if (param.rc_type == OAPV_RC_CQP) {
-        ApvQP = getQpFromQuality(mQuality->value);
-        ALOGI("Bitrate mode is CQ, so QP value is derived from Quality. Quality is %d, QP is %d",
-              mQuality->value, ApvQP);
-    }
-    param.qp = ApvQP;
+    param.qp = kApvDefaultQP;
     param.band_idc = mIntf->getBandIdc_l();
     param.profile_idc = mIntf->getProfile_l();
     param.level_idc = mIntf->getLevel_l();
+    mColorAspects = mIntf->getColorAspects_l();
+    ColorAspects sfAspects;
+    if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
+        sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
+        sfAspects.mRange = android::ColorAspects::RangeUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
+        sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
+        sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
+    }
+
+    int32_t isoPrimaries, isoTransfer, isoMatrix;
+    bool isoFullRange;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
+        &isoPrimaries, &isoTransfer, &isoMatrix, &isoFullRange);
+    param.color_primaries = isoPrimaries;
+    param.transfer_characteristics = isoTransfer;
+    param.matrix_coefficients = isoMatrix;
+    param.full_range_flag = isoFullRange;
+
+    if (mColorAspects->primaries != C2Color::PRIMARIES_UNSPECIFIED ||
+            mColorAspects->transfer != C2Color::TRANSFER_UNSPECIFIED ||
+            mColorAspects->matrix != C2Color::MATRIX_UNSPECIFIED ||
+            mColorAspects->range != C2Color::RANGE_UNSPECIFIED) {
+        param.color_description_present_flag = 1;
+    }
 }
 
 c2_status_t C2SoftApvEnc::setEncodeArgs(oapv_frms_t* inputFrames, const C2GraphicView* const input,
@@ -994,9 +993,6 @@
     uint8_t* yPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_Y]);
     uint8_t* uPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_U]);
     uint8_t* vPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_V]);
-    int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-    int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
-    int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
     uint32_t width = mSize->width;
     uint32_t height = mSize->height;
@@ -1013,61 +1009,111 @@
     inputFrames->frm[mReceivedFrames].pbu_type = OAPV_PBU_TYPE_PRIMARY_FRAME;
 
     switch (layout.type) {
-        case C2PlanarLayout::TYPE_RGB:
-            ALOGE("Not supported RGB color format");
-            return C2_BAD_VALUE;
+        case C2PlanarLayout::TYPE_RGB: {
+            uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+            uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+            size_t dstYStride = inputFrames->frm[0].imgb->s[0] / 2;
+            size_t dstUVStride = inputFrames->frm[0].imgb->s[1] / 2;
+            convertRGBToP210(dstY, dstUV, (uint32_t*)(input->data()[0]),
+                                        layout.planes[layout.PLANE_Y].rowInc / 4,
+                                        dstYStride, dstUVStride, width, height,
+                                        mColorAspects->matrix, mColorAspects->range);
+            break;
+        }
         case C2PlanarLayout::TYPE_RGBA: {
             [[fallthrough]];
         }
         case C2PlanarLayout::TYPE_YUVA: {
             ALOGV("Convert from ABGR2101010 to P210");
-            uint16_t *dstY, *dstU, *dstV;
-            dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
-            dstU = (uint16_t*)inputFrames->frm[0].imgb->a[1];
-            dstV = (uint16_t*)inputFrames->frm[0].imgb->a[2];
-            convertRGBA1010102ToYUV420Planar16(dstY, dstU, dstV, (uint32_t*)(input->data()[0]),
-                                                layout.planes[layout.PLANE_Y].rowInc / 4, width,
-                                                height, mColorAspects->matrix,
-                                                mColorAspects->range);
-            break;
+            if (mColorFormat == OAPV_CF_PLANAR2) {
+                uint16_t *dstY, *dstUV;
+                dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                size_t dstYStride = inputFrames->frm[0].imgb->s[0] / 2;
+                size_t dstUVStride = inputFrames->frm[0].imgb->s[1] / 2;
+                convertRGBA1010102ToP210(dstY, dstUV, (uint32_t*)(input->data()[0]),
+                                                layout.planes[layout.PLANE_Y].rowInc / 4,
+                                                dstYStride, dstUVStride, width, height,
+                                                mColorAspects->matrix, mColorAspects->range);
+                break;
+            } else {
+                ALOGE("Not supported color format. %d", mColorFormat);
+                return C2_BAD_VALUE;
+            }
         }
         case C2PlanarLayout::TYPE_YUV: {
             if (IsP010(*input)) {
-                if (mColorFormat == OAPV_CF_YCBCR422) {
-                    ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
-                } else if (mColorFormat == OAPV_CF_PLANAR2) {
+                ALOGV("Convert from P010 to P210");
+                if (mColorFormat == OAPV_CF_PLANAR2) {
                     uint16_t *srcY  = (uint16_t*)(input->data()[0]);
                     uint16_t *srcUV = (uint16_t*)(input->data()[1]);
                     uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
                     uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                    size_t dstYStride = inputFrames->frm[0].imgb->s[0] / 2;
+                    size_t dstUVStride = inputFrames->frm[0].imgb->s[1] / 2;
                     convertP010ToP210(dstY, dstUV, srcY, srcUV,
-                                      input->width(), input->width(), input->width(),
-                                      input->height());
+                                      layout.planes[layout.PLANE_Y].rowInc / 2,
+                                      layout.planes[layout.PLANE_U].rowInc / 2,
+                                      dstYStride, dstUVStride, width, height);
                 } else {
                     ALOGE("Not supported color format. %d", mColorFormat);
                     return C2_BAD_VALUE;
                 }
-            } else if (IsNV12(*input)) {
+            } else if (IsP210(*input)) {
+                ALOGV("Convert from P210 to P210");
+                if (mColorFormat == OAPV_CF_PLANAR2) {
+                    uint16_t *srcY  = (uint16_t*)(input->data()[0]);
+                    uint16_t *srcUV = (uint16_t*)(input->data()[1]);
+                    uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                    uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                    size_t srcYStride = layout.planes[layout.PLANE_Y].rowInc / 2;
+                    size_t srcUVStride = layout.planes[layout.PLANE_U].rowInc / 2;
+                    size_t dstYStride = inputFrames->frm[0].imgb->s[0] / 2;
+                    size_t dstUVStride = inputFrames->frm[0].imgb->s[1] / 2;
+
+                    for (size_t y = 0; y < height; ++y) {
+                        std::memcpy(dstY, srcY, width * sizeof(uint16_t));
+                        dstY += dstYStride;
+                        srcY += srcYStride;
+                    }
+
+                    for (size_t y = 0; y < height; ++y) {
+                        std::memcpy(dstUV, srcUV, width * sizeof(uint16_t));
+                        srcUV += srcUVStride;
+                        dstUV += dstUVStride;
+                    }
+                } else {
+                    ALOGE("Not supported color format. %d", mColorFormat);
+                    return C2_BAD_VALUE;
+                }
+            } else if (IsNV12(*input) || IsNV21(*input)) {
+                ALOGV("Convert from NV12 to P210");
                 uint8_t  *srcY  = (uint8_t*)input->data()[0];
                 uint8_t  *srcUV = (uint8_t*)input->data()[1];
                 uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
                 uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                size_t dstYStride = inputFrames->frm[0].imgb->s[0] / 2;
+                size_t dstUVStride = inputFrames->frm[0].imgb->s[1] / 2;
                 convertSemiPlanar8ToP210(dstY, dstUV, srcY, srcUV,
-                                         input->width(), input->width(), input->width(),
-                                         input->width(), input->width(), input->height(),
-                                         CONV_FORMAT_I420);
+                                         layout.planes[layout.PLANE_Y].rowInc,
+                                         layout.planes[layout.PLANE_U].rowInc,
+                                         dstYStride, dstUVStride,
+                                         width, height, CONV_FORMAT_I420, IsNV12(*input));
             } else if (IsI420(*input)) {
+                ALOGV("Convert from I420 to P210");
                 uint8_t  *srcY  = (uint8_t*)input->data()[0];
                 uint8_t  *srcU  = (uint8_t*)input->data()[1];
                 uint8_t  *srcV  = (uint8_t*)input->data()[2];
                 uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
                 uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                size_t dstYStride = inputFrames->frm[0].imgb->s[0] / 2;
+                size_t dstUVStride = inputFrames->frm[0].imgb->s[1] / 2;
                 convertPlanar8ToP210(dstY, dstUV, srcY, srcU, srcV,
                                         layout.planes[C2PlanarLayout::PLANE_Y].rowInc,
                                         layout.planes[C2PlanarLayout::PLANE_U].rowInc,
                                         layout.planes[C2PlanarLayout::PLANE_V].rowInc,
-                                        input->width(), input->width(),
-                                        input->width(), input->height(),
+                                        dstYStride, dstUVStride,
+                                        width, height,
                                         CONV_FORMAT_I420);
 
             } else {
@@ -1085,50 +1131,6 @@
     return C2_OK;
 }
 
-void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
-                                                 oapv_imgb_t* imgb) {
-    uint32_t width = input->width();
-    uint32_t height = input->height();
-
-    uint8_t* yPlane = (uint8_t*)input->data()[0];
-    auto* uvPlane = (uint8_t*)input->data()[1];
-    uint32_t stride[3];
-    stride[0] = width * 2;
-    stride[1] = stride[2] = width;
-
-    uint8_t *dst, *src;
-    uint16_t tmp;
-    for (int32_t y = 0; y < height; ++y) {
-        src = yPlane + y * stride[0];
-        dst = (uint8_t*)imgb->a[0] + y * stride[0];
-        for (int32_t x = 0; x < stride[0]; x += 2) {
-            tmp = (src[x + 1] << 2) | (src[x] >> 6);
-            dst[x] = tmp & 0xFF;
-            dst[x + 1] = tmp >> 8;
-        }
-    }
-
-    uint8_t *dst_u, *dst_v;
-    for (int32_t y = 0; y < height / 2; ++y) {
-        src = uvPlane + y * stride[1] * 2;
-        dst_u = (uint8_t*)imgb->a[1] + (y * 2) * stride[1];
-        dst_v = (uint8_t*)imgb->a[2] + (y * 2) * stride[2];
-        for (int32_t x = 0; x < stride[1] * 2; x += 4) {
-            tmp = (src[x + 1] << 2) | (src[x] >> 6);  // cb
-            dst_u[x / 2] = tmp & 0xFF;
-            dst_u[x / 2 + 1] = tmp >> 8;
-            dst_u[x / 2 + stride[1]] = dst_u[x / 2];
-            dst_u[x / 2 + stride[1] + 1] = dst_u[x / 2 + 1];
-
-            tmp = (src[x + 3] << 2) | (src[x + 2] >> 6);  // cr
-            dst_v[x / 2] = tmp & 0xFF;
-            dst_v[x / 2 + 1] = tmp >> 8;
-            dst_v[x / 2 + stride[2]] = dst_v[x / 2];
-            dst_v[x / 2 + stride[2] + 1] = dst_v[x / 2 + 1];
-        }
-    }
-}
-
 void C2SoftApvEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
                               const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
                               oapve_stat_t* stat) {
@@ -1183,7 +1185,7 @@
 void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work,
                                  oapv_bitb_t* bitb,
                                  uint32_t encodedSize) {
-    if (encodedSize < 31) {
+    if (encodedSize < 35) {
         ALOGE("the first frame size is too small, so no csd data will be created.");
         return;
     }
@@ -1197,17 +1199,19 @@
     uint8_t profile_idc = 0;
     uint8_t level_idc = 0;
     uint8_t band_idc = 0;
-    uint32_t frame_width_minus1 = 0;
-    uint32_t frame_height_minus1 = 0;
+    uint32_t frame_width = 0;
+    uint32_t frame_height = 0;
     uint8_t chroma_format_idc = 0;
     uint8_t bit_depth_minus8 = 0;
     uint8_t capture_time_distance = 0;
     uint8_t color_primaries = 0;
     uint8_t transfer_characteristics = 0;
+    uint8_t full_range_flag = 0;
     uint8_t matrix_coefficients = 0;
 
     /* pbu_header() */
     reader.skipBits(32);           // pbu_size
+    reader.skipBits(32);           // signature
     reader.skipBits(32);           // currReadSize
     pbu_type = reader.getBits(8);  // pbu_type
     reader.skipBits(16);           // group_id
@@ -1218,8 +1222,8 @@
     level_idc = reader.getBits(8);              // level_idc
     band_idc = reader.getBits(3);               // band_idc
     reader.skipBits(5);                         // reserved_zero_5bits
-    frame_width_minus1 = reader.getBits(32);    // width
-    frame_height_minus1 = reader.getBits(32);   // height
+    frame_width = reader.getBits(24);           // width
+    frame_height = reader.getBits(24);          // height
     chroma_format_idc = reader.getBits(4);      // chroma_format_idc
     bit_depth_minus8 = reader.getBits(4);       // bit_depth
     capture_time_distance = reader.getBits(8);  // capture_time_distance
@@ -1232,12 +1236,15 @@
         color_primaries = reader.getBits(8);           // color_primaries
         transfer_characteristics = reader.getBits(8);  // transfer_characteristics
         matrix_coefficients = reader.getBits(8);       // matrix_coefficients
+        full_range_flag = reader.getBits(1);           // full_range_flag
+        reader.skipBits(7);                            // reserved_zero_7bits
     }
 
     number_of_configuration_entry = 1;  // The real-time encoding on the device is assumed to be 1.
     number_of_frame_info = 1;  // The real-time encoding on the device is assumed to be 1.
 
     std::vector<uint8_t> csdData;
+
     csdData.push_back((uint8_t)0x1);
     csdData.push_back(number_of_configuration_entry);
 
@@ -1250,14 +1257,14 @@
             csdData.push_back(profile_idc);
             csdData.push_back(level_idc);
             csdData.push_back(band_idc);
-            csdData.push_back((uint8_t)((frame_width_minus1 >> 24) & 0xff));
-            csdData.push_back((uint8_t)((frame_width_minus1 >> 16) & 0xff));
-            csdData.push_back((uint8_t)((frame_width_minus1 >> 8) & 0xff));
-            csdData.push_back((uint8_t)(frame_width_minus1 & 0xff));
-            csdData.push_back((uint8_t)((frame_height_minus1 >> 24) & 0xff));
-            csdData.push_back((uint8_t)((frame_height_minus1 >> 16) & 0xff));
-            csdData.push_back((uint8_t)((frame_height_minus1 >> 8) & 0xff));
-            csdData.push_back((uint8_t)(frame_height_minus1 & 0xff));
+            csdData.push_back((uint8_t)((frame_width >> 24) & 0xff));
+            csdData.push_back((uint8_t)((frame_width >> 16) & 0xff));
+            csdData.push_back((uint8_t)((frame_width >> 8) & 0xff));
+            csdData.push_back((uint8_t)(frame_width & 0xff));
+            csdData.push_back((uint8_t)((frame_height >> 24) & 0xff));
+            csdData.push_back((uint8_t)((frame_height >> 16) & 0xff));
+            csdData.push_back((uint8_t)((frame_height >> 8) & 0xff));
+            csdData.push_back((uint8_t)(frame_height & 0xff));
             csdData.push_back((uint8_t)(((chroma_format_idc << 4) & 0xf0) |
                                       (bit_depth_minus8 & 0xf)));
             csdData.push_back((uint8_t)(capture_time_distance));
@@ -1265,6 +1272,7 @@
                 csdData.push_back(color_primaries);
                 csdData.push_back(transfer_characteristics);
                 csdData.push_back(matrix_coefficients);
+                csdData.push_back(full_range_flag << 7);
             }
         }
     }
@@ -1377,6 +1385,8 @@
         return;
     }
 
+    view->setCrop_be(C2Rect(mSize->width, mSize->height));
+
     error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
     if (error != C2_OK) {
         ALOGE("setEncodeArgs has failed. err = %d", error);
@@ -1452,6 +1462,13 @@
         ALOGV("APV SW Codec is not enabled");
         return nullptr;
     }
+
+    bool enabled = isAtLeastRelease(36, "Baklava");
+    ALOGD("isAtLeastRelease(36, Baklava) says enable: %s", enabled ? "yes" : "no");
+    if (!enabled) {
+        return nullptr;
+    }
+
     return new ::android::C2SoftApvEncFactory();
 }
 
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
index f281052..2b873e8 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.h
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_APV_ENC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 #include <utils/Vector.h>
 #include <map>
 #include "oapv.h"
@@ -35,6 +36,8 @@
     class IntfImpl;
 
     C2SoftApvEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftApvEnc(const char* name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftApvEnc();
 
     // From SimpleC2Component
@@ -48,7 +51,7 @@
     c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
 
   private:
-    c2_status_t reset();
+    c2_status_t resetEncoder();
     c2_status_t initEncoder();
     c2_status_t releaseEncoder();
     c2_status_t setEncodeArgs(oapv_frms_t* imgb_inp, const C2GraphicView* const input,
@@ -84,7 +87,6 @@
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
     std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
     std::shared_ptr<C2StreamQualityTuning::output> mQuality;
-    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2LinearBlock> mOutBlock;
     std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
     std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
diff --git a/media/codec2/components/apv/isAtLeastRelease.cpp b/media/codec2/components/apv/isAtLeastRelease.cpp
new file mode 100644
index 0000000..75b21e3
--- /dev/null
+++ b/media/codec2/components/apv/isAtLeastRelease.cpp
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "isAtLeastRelease"
+
+#include <android/api-level.h>
+#include <android-base/properties.h>
+#include <utils/Log.h>
+
+#include <mutex>
+#include <string>
+
+// current SDK for this device; filled in when initializing the parser.
+static int mySdk = 0;
+static std::string myCodeName;
+
+// to help address b/388925029
+
+/**
+ * support code so a plugin (currently the APV codecs) can self-manage
+ * whether it is running on a sufficiently new code base.
+ *
+ * this is here because the XMLparser for Media codec definitions has
+ * an off-by-one error in how it handles <MediaCodec name=".." ... minsdk="" >
+ *
+ * we will want to fix that starting in Android B/16, but devices in Android V/15
+ * still have issues [and we build the codecs into module code so that it goes back
+ * to older releases].
+ *
+ */
+
+bool isAtLeastRelease(int minsdk, const char *codename) {
+
+    static std::once_flag sCheckOnce;
+    std::call_once(sCheckOnce, [&](){
+        mySdk = android_get_device_api_level();
+        myCodeName  = android::base::GetProperty("ro.build.version.codename", "<none>");
+    });
+
+    bool satisfied = false;
+    ALOGI("device sdk %d, minsdk %d", mySdk, minsdk);
+    if (mySdk >= minsdk) {
+        satisfied = true;
+    }
+
+    // allow the called to skip the codename.
+    if (codename != nullptr) {
+        ALOGI("active codename %s, to match %s", myCodeName.c_str(), codename);
+        if (myCodeName == codename) {
+            satisfied = true;
+        }
+    }
+
+    return satisfied;
+}
diff --git a/media/codec2/components/apv/isAtLeastRelease.h b/media/codec2/components/apv/isAtLeastRelease.h
new file mode 100644
index 0000000..20c5208
--- /dev/null
+++ b/media/codec2/components/apv/isAtLeastRelease.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+bool isAtLeastRelease(int minsdk, const char *codename);
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 77fdeb9..f58a131 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -351,6 +351,12 @@
     GENERATE_FILE_NAMES();
     CREATE_DUMP_FILE(mInFile);
 }
+C2SoftAvcDec::C2SoftAvcDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftAvcDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
 
 C2SoftAvcDec::~C2SoftAvcDec() {
     onRelease();
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 36a463e..0104bb0 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -24,6 +24,7 @@
 
 #include <atomic>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "ih264_typedefs.h"
 #include "ih264d.h"
@@ -96,6 +97,8 @@
 public:
     class IntfImpl;
     C2SoftAvcDec(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftAvcDec(const char *name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftAvcDec();
 
     // From SimpleC2Component
@@ -173,7 +176,7 @@
         VuiColorAspects()
             : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
 
-        bool operator==(const VuiColorAspects &o) {
+        bool operator==(const VuiColorAspects &o) const {
             return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
                     && fullRange == o.fullRange;
         }
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 80a5e67..728a871 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -619,6 +619,11 @@
     initEncParams();
 }
 
+C2SoftAvcEnc::C2SoftAvcEnc(
+        const char *name, c2_node_id_t id, const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftAvcEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftAvcEnc::~C2SoftAvcEnc() {
     onRelease();
 }
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 33d166f..2aaefbc 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -23,6 +23,7 @@
 #include <utils/Vector.h>
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "ih264_typedefs.h"
 #include "ih264e.h"
@@ -123,6 +124,8 @@
     class IntfImpl;
 
     C2SoftAvcEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftAvcEnc(const char *name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper> &helper);
 
     // From SimpleC2Component
     c2_status_t onInit() override;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index ea67bf4..7b51afe 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -354,6 +354,67 @@
     }
 }
 
+void convertP210ToRGBA1010102(uint32_t* dst, const uint16_t* srcY, const uint16_t* srcUV,
+                                size_t srcYStride, size_t srcUVStride, size_t dstStride,
+                                size_t width, size_t height,
+                                std::shared_ptr<const C2ColorAspectsStruct> aspects) {
+    C2ColorAspectsStruct _aspects = FillMissingColorAspects(aspects, width, height);
+    struct Coeffs coeffs = GetCoeffsForAspects(_aspects);
+
+    int32_t _y = coeffs._y;
+    int32_t _b_u = coeffs._b_u;
+    int32_t _neg_g_u = -coeffs._g_u;
+    int32_t _neg_g_v = -coeffs._g_v;
+    int32_t _r_v = coeffs._r_v;
+    int32_t _c16 = coeffs._c16;
+
+    for (size_t y = 0; y < height; y++) {
+        uint32_t *dstTop = (uint32_t *)dst;
+        uint16_t *ySrcTop = (uint16_t *)srcY;
+        uint16_t *uSrc = (uint16_t *)srcUV;
+        uint16_t *vSrc = (uint16_t *)(srcUV + 1);
+        for (size_t x = 0; x < width; x += 2) {
+            int32_t u, v, y00, y01;
+            u = ((*uSrc) >> 6) - 512;
+            uSrc += 2;
+            v = ((*vSrc) >> 6) - 512;
+            vSrc += 2;
+
+            y00 = ((*ySrcTop) >> 6) - _c16;
+            ySrcTop += 1;
+            y01 = ((*ySrcTop) >> 6) - _c16;
+            ySrcTop += 1;
+
+            int32_t u_b = u * _b_u;
+            int32_t u_g = u * _neg_g_u;
+            int32_t v_g = v * _neg_g_v;
+            int32_t v_r = v * _r_v;
+
+            int32_t yMult, b, g, r;
+            yMult = y00 * _y + 512;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y01 * _y + 512;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+        }
+        srcY += srcYStride;
+        srcUV += srcUVStride;
+        dst += dstStride;
+    }
+}
+
 void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
                                  const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
                                  size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -464,15 +525,18 @@
 }
 
 void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY, const uint16_t *srcUV,
-                       size_t srcUVStride, size_t dstUVStride, size_t width, size_t height) {
-    std::memcpy(dstY, srcY, width * height * sizeof(uint16_t));
+                       size_t srcYStride, size_t srcUVStride, size_t dstYStride, size_t dstUVStride,
+                       size_t width, size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        std::memcpy(dstY + (y * dstYStride), srcY + (y * srcYStride), width * sizeof(uint16_t));
+    }
 
     int32_t offsetTop, offsetBot;
     for (size_t y = 0; y < (height + 1) / 2; ++y) {
-        offsetTop = (y * 2) * dstUVStride;
-        offsetBot = (y * 2 + 1) * dstUVStride;
-        std::memcpy(dstUV + offsetTop, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
-        std::memcpy(dstUV + offsetBot, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+        std::memcpy(dstUV, srcUV, width * sizeof(uint16_t));
+        std::memcpy(dstUV + dstUVStride, srcUV, width * sizeof(uint16_t));
+        srcUV += srcUVStride;
+        dstUV += dstUVStride << 1;
     }
 }
 
@@ -531,8 +595,9 @@
 }
 
 void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
-                              size_t srcRGBStride, size_t width, size_t height,
-                              C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
+                              size_t srcRGBStride, size_t dstYStride, size_t dstUVStride,
+                              size_t width, size_t height, C2Color::matrix_t colorMatrix,
+                              C2Color::range_t colorRange) {
     uint16_t r, g, b;
     int32_t i32Y, i32U, i32V;
     uint16_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 64;
@@ -565,7 +630,63 @@
             }
         }
         srcRGBA += srcRGBStride;
-        dstY += width;
+        dstY += dstYStride;
+        dstUV += dstUVStride;
+    }
+}
+
+// Matrix coefficient to convert RGB to Planar YUV data.
+// Each sub-array represents the 3X3 coeff used with R, G and B
+static const int16_t bt601Matrix[2][3][3] = {
+    { { 77, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+    { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } },  /* RANGE_LIMITED */
+};
+
+static const int16_t bt709Matrix[2][3][3] = {
+    // TRICKY: 18 is adjusted to 19 so that sum of row 1 is 256
+    { { 54, 183, 19 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    // TRICKY: -87 is adjusted to -86 so that sum of row 2 is 0
+    { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
+};
+
+void convertRGBToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+                              size_t srcRGBStride, size_t dstYStride, size_t dstUVStride,
+                              size_t width, size_t height,
+                              C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
+    uint8_t r, g, b;
+    uint8_t i8Y, i8U, i8V;
+    int32_t i32Y, i32U, i32V;
+    uint8_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 16;
+    uint8_t maxLvlLuma =  colorRange == C2Color::RANGE_FULL ? 255 : 235;
+    uint8_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 255 : 240;
+    // set default range as limited
+    if (colorRange != C2Color::RANGE_FULL && colorRange != C2Color::RANGE_LIMITED) {
+        colorRange = C2Color::RANGE_LIMITED;
+    }
+    const int16_t (*weights)[3] =
+        (colorMatrix == C2Color::MATRIX_BT709) ?
+            bt709Matrix[colorRange - 1] : bt601Matrix[colorRange - 1];
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            b = (srcRGBA[x] >> 16) & 0xFF;
+            g = (srcRGBA[x] >> 8) & 0xFF;
+            r = srcRGBA[x] & 0xFF;
+
+            i32Y = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2]) >> 8) + zeroLvl;
+            i8Y = CLIP3(zeroLvl, i32Y, maxLvlLuma);
+            dstY[x] = ((uint16_t)((double)i8Y * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+            if (x % 2 == 0) {
+                i32U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2]) >> 8) + 128;
+                i32V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2]) >> 8) + 128;
+                i8U = CLIP3(zeroLvl, i32U, maxLvlChroma);
+                i8V = CLIP3(zeroLvl, i32V, maxLvlChroma);
+                dstUV[x] = ((uint16_t)((double)i8U * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+                dstUV[x + 1] = ((uint16_t)((double)i8V * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+            }
+        }
+        srcRGBA += srcRGBStride;
+        dstY += dstYStride;
+        dstUV += dstUVStride;
     }
 }
 
@@ -689,7 +810,9 @@
                               size_t srcYStride, size_t srcUVStride,
                               size_t dstYStride, size_t dstUVStride,
                               uint32_t width, uint32_t height,
-                              CONV_FORMAT_T format) {
+                              CONV_FORMAT_T format, bool isNV12) {
+  // This function assumes that dstStride/width are even.
+  // The check for this is performed by the caller
   if (format != CONV_FORMAT_I420) {
     ALOGE("No support for semi-planar8 to P210. format is %d", format);
     return;
@@ -703,13 +826,26 @@
     srcY += srcYStride;
   }
 
-  for (int32_t y = 0; y < height / 2; ++y) {
-    for (int32_t x = 0; x < width; ++x) {
-      dstUV[x] = dstUV[dstUVStride + x] =
-          ((uint16_t)((double)srcUV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+  if (isNV12) {
+    for (int32_t y = 0; y < (height + 1) / 2; ++y) {
+        for (int32_t x = 0; x < width; x++) {
+            dstUV[x] = dstUV[dstUVStride + x] =
+                ((uint16_t)((double)srcUV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+        }
+        srcUV += srcUVStride;
+        dstUV += dstUVStride << 1;
     }
-    srcUV += srcUVStride;
-    dstUV += dstUVStride << 1;
+  } else { //NV21
+    for (int32_t y = 0; y < (height + 1) / 2; ++y) {
+        for (int32_t x = 0; x < width; x+=2) {
+            dstUV[x+1] = dstUV[dstUVStride + x + 1] =
+                ((uint16_t)((double)srcUV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+            dstUV[x] = dstUV[dstUVStride + x] =
+                ((uint16_t)((double)srcUV[x + 1] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+        }
+        srcUV += srcUVStride;
+        dstUV += dstUVStride << 1;
+    }
   }
 }
 
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 5d2e8cd..960bd08 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -68,8 +68,8 @@
                                  size_t dstUStride, size_t dstVStride, size_t width,
                                  size_t height, bool isMonochrome = false);
 
-void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
-                       const uint16_t *srcUV, size_t srcUVStride, size_t dstUVStride,
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY, const uint16_t *srcUV,
+                       size_t srcYStride, size_t srcUVStride, size_t dstYStride, size_t dstUVStride,
                        size_t width, size_t height);
 
 void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
@@ -78,7 +78,13 @@
                                         C2Color::range_t colorRange);
 
 void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
-                              size_t srcRGBStride, size_t width, size_t height,
+                              size_t srcRGBStride, size_t dstYStride, size_t dstUVStride,
+                              size_t width, size_t height, C2Color::matrix_t colorMatrix,
+                              C2Color::range_t colorRange);
+
+void convertRGBToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+                              size_t srcRGBStride, size_t dstYStride, size_t dstUVStride,
+                              size_t width, size_t height,
                               C2Color::matrix_t colorMatrix, C2Color::range_t colorRange);
 
 void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
@@ -88,6 +94,11 @@
                                         std::shared_ptr<const C2ColorAspectsStruct> aspects,
                                         CONV_FORMAT_T format);
 
+void convertP210ToRGBA1010102(uint32_t* dst, const uint16_t* srcY, const uint16_t* srcUV,
+                                size_t srcYStride, size_t srcUVStride, size_t dstStride,
+                                size_t width, size_t height,
+                                std::shared_ptr<const C2ColorAspectsStruct> aspects);
+
 void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
                            const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
                            size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -110,7 +121,7 @@
                               size_t srcYStride, size_t srcUVStride,
                               size_t dstYStride, size_t dstUVStride,
                               uint32_t width, uint32_t height,
-                              CONV_FORMAT_T format);
+                              CONV_FORMAT_T format, bool isNV12);
 void convertPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
                               const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
                               size_t srcYStride, size_t srcUStride, size_t srcVStride,
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 44a8dd1..622823c 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -447,6 +447,11 @@
     mTimeStart = mTimeEnd = systemTime();
 }
 
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftDav1dDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftDav1dDec::~C2SoftDav1dDec() {
     onRelease();
 }
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index 6008325..1666a42 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -25,6 +25,7 @@
 
 #include <C2Config.h>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include <dav1d/dav1d.h>
 #include <deque>
@@ -38,6 +39,8 @@
     class IntfImpl;
 
     C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                   const std::shared_ptr<C2ReflectorHelper>& helper);
     ~C2SoftDav1dDec();
 
     // Begin SimpleC2Component overrides.
@@ -91,7 +94,7 @@
               coeffs(C2Color::MATRIX_UNSPECIFIED),
               fullRange(C2Color::RANGE_UNSPECIFIED) {}
 
-        bool operator==(const VuiColorAspects& o) {
+        bool operator==(const VuiColorAspects& o) const {
             return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
                    fullRange == o.fullRange;
         }
diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp
index 49892a4..9e4db7b 100644
--- a/media/codec2/components/flac/C2SoftFlacDec.cpp
+++ b/media/codec2/components/flac/C2SoftFlacDec.cpp
@@ -112,6 +112,13 @@
       mFLACDecoder(nullptr) {
 }
 
+C2SoftFlacDec::C2SoftFlacDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftFlacDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftFlacDec::~C2SoftFlacDec() {
     onRelease();
 }
diff --git a/media/codec2/components/flac/C2SoftFlacDec.h b/media/codec2/components/flac/C2SoftFlacDec.h
index b491bfd..1dd1670 100644
--- a/media/codec2/components/flac/C2SoftFlacDec.h
+++ b/media/codec2/components/flac/C2SoftFlacDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_FLAC_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "FLACDecoder.h"
 
@@ -28,6 +29,8 @@
 
     C2SoftFlacDec(const char *name, c2_node_id_t id,
                       const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftFlacDec(const char *name, c2_node_id_t id,
+                      const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftFlacDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 780660e..ad9d3e1 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -140,6 +140,13 @@
       mInputBufferPcm32(nullptr) {
 }
 
+C2SoftFlacEnc::C2SoftFlacEnc(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftFlacEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftFlacEnc::~C2SoftFlacEnc() {
     onRelease();
 }
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index ed9c298..129f1bd 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_FLAC_ENC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "FLAC/stream_encoder.h"
 
@@ -36,6 +37,8 @@
     class IntfImpl;
 
     C2SoftFlacEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftFlacEnc(const char *name, c2_node_id_t id,
+                  const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftFlacEnc();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index f952f22..1ba2e59 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -106,6 +106,13 @@
       mIntf(intfImpl) {
 }
 
+C2SoftG711Dec::C2SoftG711Dec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftG711Dec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftG711Dec::~C2SoftG711Dec() {
     onRelease();
 }
diff --git a/media/codec2/components/g711/C2SoftG711Dec.h b/media/codec2/components/g711/C2SoftG711Dec.h
index f93840b..af07b20 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.h
+++ b/media/codec2/components/g711/C2SoftG711Dec.h
@@ -18,7 +18,7 @@
 #define ANDROID_C2_SOFT_G711_DEC_H_
 
 #include <SimpleC2Component.h>
-
+#include <util/C2InterfaceHelper.h>
 
 namespace android {
 
@@ -27,6 +27,8 @@
 
     C2SoftG711Dec(const char *name, c2_node_id_t id,
                const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftG711Dec(const char *name, c2_node_id_t id,
+               const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftG711Dec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 5141d65..d796660 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -432,6 +432,11 @@
   mTimeStart = mTimeEnd = systemTime();
 }
 
+C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
+                             const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftGav1Dec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
 
 c2_status_t C2SoftGav1Dec::onInit() {
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 0e09fcc..68c3f61 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -23,8 +23,10 @@
 
 #include <media/stagefright/foundation/ColorUtils.h>
 
-#include <SimpleC2Component.h>
 #include <C2Config.h>
+#include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
+
 #include <gav1/decoder.h>
 #include <gav1/decoder_settings.h>
 
@@ -35,6 +37,8 @@
 
   C2SoftGav1Dec(const char* name, c2_node_id_t id,
                 const std::shared_ptr<IntfImpl>& intfImpl);
+  C2SoftGav1Dec(const char* name, c2_node_id_t id,
+                const std::shared_ptr<C2ReflectorHelper>& helper);
   ~C2SoftGav1Dec();
 
   // Begin SimpleC2Component overrides.
@@ -84,7 +88,7 @@
             coeffs(C2Color::MATRIX_UNSPECIFIED),
             fullRange(C2Color::RANGE_UNSPECIFIED) { }
 
-      bool operator==(const VuiColorAspects &o) {
+      bool operator==(const VuiColorAspects &o) const {
           return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
                   && fullRange == o.fullRange;
       }
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.cpp b/media/codec2/components/gsm/C2SoftGsmDec.cpp
index 977677d..8f85295 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.cpp
+++ b/media/codec2/components/gsm/C2SoftGsmDec.cpp
@@ -97,6 +97,11 @@
       mGsm(nullptr) {
 }
 
+C2SoftGsmDec::C2SoftGsmDec(const char *name, c2_node_id_t id,
+                     const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftGsmDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftGsmDec::~C2SoftGsmDec() {
     onRelease();
 }
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.h b/media/codec2/components/gsm/C2SoftGsmDec.h
index edd273b..817fad6 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.h
+++ b/media/codec2/components/gsm/C2SoftGsmDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_GSM_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "gsm.h"
 
@@ -34,6 +35,8 @@
 
     C2SoftGsmDec(const char *name, c2_node_id_t id,
               const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftGsmDec(const char *name, c2_node_id_t id,
+              const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftGsmDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 64aa7a4..4409379 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -341,6 +341,13 @@
         mOutIndex(0u) {
 }
 
+C2SoftHevcDec::C2SoftHevcDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftHevcDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftHevcDec::~C2SoftHevcDec() {
     onRelease();
 }
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index 6abf69e..843341a 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -22,6 +22,7 @@
 #include <atomic>
 #include <inttypes.h>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "ihevc_typedefs.h"
 #include "ihevcd_cxa.h"
@@ -49,6 +50,8 @@
 
     C2SoftHevcDec(const char* name, c2_node_id_t id,
                   const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftHevcDec(const char* name, c2_node_id_t id,
+                  const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftHevcDec();
 
     // From SimpleC2Component
@@ -132,7 +135,7 @@
         VuiColorAspects()
             : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
 
-        bool operator==(const VuiColorAspects &o) {
+        bool operator==(const VuiColorAspects &o) const {
             return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
                     && fullRange == o.fullRange;
         }
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index ec1dd14..7e5f7b6 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -597,6 +597,11 @@
     mTimeStart = mTimeEnd = systemTime();
 }
 
+C2SoftHevcEnc::C2SoftHevcEnc(const char* name, c2_node_id_t id,
+                             const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftHevcEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftHevcEnc::~C2SoftHevcEnc() {
     onRelease();
 }
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index ce9cec8..dfc2000 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_HEVC_ENC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 #include <algorithm>
 #include <inttypes.h>
 #include <map>
@@ -45,6 +46,8 @@
 
     C2SoftHevcEnc(const char* name, c2_node_id_t id,
                   const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftHevcEnc(const char* name, c2_node_id_t id,
+                  const std::shared_ptr<C2ReflectorHelper>& helper);
 
     // From SimpleC2Component
     c2_status_t onInit() override;
diff --git a/media/codec2/components/iamf/Android.bp b/media/codec2/components/iamf/Android.bp
new file mode 100644
index 0000000..8b6c8fa
--- /dev/null
+++ b/media/codec2/components/iamf/Android.bp
@@ -0,0 +1,13 @@
+package {
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_iamfdec",
+
+    srcs: [],
+
+    shared_libs: [
+        // iamf_tools library will need to go here.
+    ],
+}
diff --git a/media/codec2/components/iamf/C2SoftIamfDec.cpp b/media/codec2/components/iamf/C2SoftIamfDec.cpp
new file mode 100644
index 0000000..cfd5369
--- /dev/null
+++ b/media/codec2/components/iamf/C2SoftIamfDec.cpp
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "C2SoftIamfDec"
+#include <log/log.h>
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.iamf.decoder";
+
+}  // namespace
+
+class C2SoftIamfDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_AUDIO,
+                                            // Replace with IAMF mimetype when available
+                                            "audio/iamf") {
+        // Configure (e.g. noPrivateBuffers(), etc.)
+        // Add parameters.
+    }
+}
+
+C2SoftIamfDec::C2SoftIamfDec(const char* name, c2_node_id_t id,
+                             const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+}
+
+C2SoftIamfDec::~C2SoftIamfDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftIamfDec::onInit() {
+    return C2_BAD_STATE;
+}
+
+c2_status_t C2SoftIamfDec::onStop() {
+    return C2_NO_INIT;
+}
+
+void C2SoftIamfDec::onReset() {
+    return;
+}
+
+void C2SoftIamfDec::onRelease() {
+    return;
+}
+
+c2_status_t C2SoftIamfDec::onFlush_sm() {
+    return C2_NO_INIT;
+}
+
+void C2SoftIamfDec::process(const std::unique_ptr<C2Work>& work,
+                            const std::shared_ptr<C2BlockPool>& pool) {
+    return;
+}
+
+c2_status_t C2SoftIamfDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return C2_NO_INIT;
+}
+
+}  // namespace android
diff --git a/media/codec2/components/iamf/C2SoftIamfDec.h b/media/codec2/components/iamf/C2SoftIamfDec.h
new file mode 100644
index 0000000..547b3ba
--- /dev/null
+++ b/media/codec2/components/iamf/C2SoftIamfDec.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_IAMF_DEC_H_
+#define ANDROID_C2_SOFT_IAMF_DEC_H_
+
+#include <SimpleC2Component.h>
+
+namespace android {
+
+class C2SoftIamfDec : public SimpleC2Component {
+    // Forward declaration of the C2 interface implementation.
+    class IntfImpl;
+
+  public:
+    C2SoftIamfDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    virtual ~C2SoftIamfDec();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+}
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_IAMF_DEC_H_
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index aed5e68..ea0c0d3 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -102,6 +102,11 @@
       mDecoderBuf(nullptr) {
 }
 
+C2SoftMP3::C2SoftMP3(const char *name, c2_node_id_t id,
+                     const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftMP3(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftMP3::~C2SoftMP3() {
     onRelease();
 }
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.h b/media/codec2/components/mp3/C2SoftMp3Dec.h
index e2dfcf3..8612bb8 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.h
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_MP3_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 
 struct tPVMP3DecoderExternal;
@@ -35,6 +36,8 @@
 
     C2SoftMP3(const char *name, c2_node_id_t id,
               const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftMP3(const char *name, c2_node_id_t id,
+              const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftMP3();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 64e4bf0..8437726 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -325,6 +325,13 @@
     CREATE_DUMP_FILE(mInFile);
 }
 
+C2SoftMpeg2Dec::C2SoftMpeg2Dec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftMpeg2Dec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftMpeg2Dec::~C2SoftMpeg2Dec() {
     onRelease();
 }
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index 6d09694..72bcaf4 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -20,6 +20,7 @@
 #include <atomic>
 #include <inttypes.h>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include <media/stagefright/foundation/ColorUtils.h>
 
@@ -92,6 +93,8 @@
 
     C2SoftMpeg2Dec(const char* name, c2_node_id_t id,
                    const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftMpeg2Dec(const char* name, c2_node_id_t id,
+                   const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftMpeg2Dec();
 
     // From SimpleC2Component
@@ -173,7 +176,7 @@
         VuiColorAspects()
             : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
 
-        bool operator==(const VuiColorAspects &o) {
+        bool operator==(const VuiColorAspects &o) const {
             return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
                     && fullRange == o.fullRange;
         }
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index fd9488b..b695f4d 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -232,6 +232,13 @@
       mInitialized(false) {
 }
 
+C2SoftMpeg4Dec::C2SoftMpeg4Dec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftMpeg4Dec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftMpeg4Dec::~C2SoftMpeg4Dec() {
     onRelease();
 }
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
index fed04c9..8540b57 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
@@ -18,6 +18,7 @@
 #define C2_SOFT_MPEG4_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include <mp4dec_api.h>
 
@@ -29,6 +30,8 @@
 
     C2SoftMpeg4Dec(const char* name, c2_node_id_t id,
                    const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftMpeg4Dec(const char* name, c2_node_id_t id,
+                   const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftMpeg4Dec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index acc42e9..5074cd8 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -313,6 +313,11 @@
       mOutBufferSize(524288) {
 }
 
+C2SoftMpeg4Enc::C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftMpeg4Enc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftMpeg4Enc::~C2SoftMpeg4Enc() {
     onRelease();
 }
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
index 43461fc..a54ad1e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
@@ -21,6 +21,7 @@
 
 #include <Codec2BufferUtils.h>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "mp4enc_api.h"
 
@@ -31,6 +32,8 @@
 
     C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
                    const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
+                   const std::shared_ptr<C2ReflectorHelper>& helper);
 
     // From SimpleC2Component
     c2_status_t onInit() override;
diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp
index d4987c0..136e700 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.cpp
+++ b/media/codec2/components/opus/C2SoftOpusDec.cpp
@@ -101,6 +101,11 @@
       mDecoder(nullptr) {
 }
 
+C2SoftOpusDec::C2SoftOpusDec(const char *name, c2_node_id_t id,
+                       const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftOpusDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftOpusDec::~C2SoftOpusDec() {
     onRelease();
 }
diff --git a/media/codec2/components/opus/C2SoftOpusDec.h b/media/codec2/components/opus/C2SoftOpusDec.h
index b0715ac..98e4520 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.h
+++ b/media/codec2/components/opus/C2SoftOpusDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_OPUS_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 
 struct OpusMSDecoder;
@@ -29,6 +30,8 @@
 
     C2SoftOpusDec(const char *name, c2_node_id_t id,
                const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftOpusDec(const char *name, c2_node_id_t id,
+               const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftOpusDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index 40bb26e..e2b5cd2 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -134,6 +134,11 @@
       mOutIndex(0u) {
 }
 
+C2SoftOpusEnc::C2SoftOpusEnc(const char* name, c2_node_id_t id,
+                             const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftOpusEnc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftOpusEnc::~C2SoftOpusEnc() {
     onRelease();
 }
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 2c9f5e5..65290ee 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -19,6 +19,7 @@
 
 #include <atomic>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 #define MIN(a, b) (((a) < (b)) ? (a) : (b))
 
 struct OpusMSEncoder;
@@ -30,6 +31,8 @@
 
     C2SoftOpusEnc(const char *name, c2_node_id_t id,
                const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftOpusEnc(const char *name, c2_node_id_t id,
+               const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftOpusEnc();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index ea13071..3a61efe 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -112,6 +112,13 @@
       mIntf(intfImpl) {
 }
 
+C2SoftRawDec::C2SoftRawDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftRawDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftRawDec::~C2SoftRawDec() {
     onRelease();
 }
diff --git a/media/codec2/components/raw/C2SoftRawDec.h b/media/codec2/components/raw/C2SoftRawDec.h
index 7dfdec5..340253c 100644
--- a/media/codec2/components/raw/C2SoftRawDec.h
+++ b/media/codec2/components/raw/C2SoftRawDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_RAW_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 
 namespace android {
@@ -27,6 +28,8 @@
 
     C2SoftRawDec(const char* name, c2_node_id_t id,
               const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftRawDec(const char* name, c2_node_id_t id,
+              const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftRawDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index 899fe9b..2621dc0 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -107,6 +107,13 @@
       mVi(nullptr) {
 }
 
+C2SoftVorbisDec::C2SoftVorbisDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftVorbisDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftVorbisDec::~C2SoftVorbisDec() {
     onRelease();
 }
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.h b/media/codec2/components/vorbis/C2SoftVorbisDec.h
index 3bf7326..93a5f7c 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.h
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_VORBIS_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 
 struct vorbis_dsp_state;
@@ -30,6 +31,8 @@
 
     C2SoftVorbisDec(const char *name, c2_node_id_t id,
                  const std::shared_ptr<IntfImpl> &intfImpl);
+    C2SoftVorbisDec(const char *name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper> &helper);
     virtual ~C2SoftVorbisDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.cpp b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
index 049ec38..b5d1260 100644
--- a/media/codec2/components/vpx/C2SoftVp8Enc.cpp
+++ b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
@@ -27,6 +27,10 @@
                            const std::shared_ptr<IntfImpl>& intfImpl)
     : C2SoftVpxEnc(name, id, intfImpl), mDCTPartitions(0), mProfile(1) {}
 
+C2SoftVp8Enc::C2SoftVp8Enc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftVp8Enc(name, id, std::make_shared<IntfImpl>(helper)) {}
+
 void C2SoftVp8Enc::setCodecSpecificInterface() {
     mCodecInterface = vpx_codec_vp8_cx();
 }
diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.h b/media/codec2/components/vpx/C2SoftVp8Enc.h
index ed6f356..c7e567f 100644
--- a/media/codec2/components/vpx/C2SoftVp8Enc.h
+++ b/media/codec2/components/vpx/C2SoftVp8Enc.h
@@ -29,6 +29,8 @@
 struct C2SoftVp8Enc : public C2SoftVpxEnc {
     C2SoftVp8Enc(const char* name, c2_node_id_t id,
                  const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftVp8Enc(const char* name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper>& helper);
 
  protected:
     // Populates |mCodecInterface| with codec specific settings.
diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.cpp b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
index 6401521..0a60d76 100644
--- a/media/codec2/components/vpx/C2SoftVp9Enc.cpp
+++ b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
@@ -32,6 +32,11 @@
       mFrameParallelDecoding(false) {
 }
 
+C2SoftVp9Enc::C2SoftVp9Enc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<C2ReflectorHelper>& helper)
+    : C2SoftVp9Enc(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 void C2SoftVp9Enc::setCodecSpecificInterface() {
     mCodecInterface = vpx_codec_vp9_cx();
 }
diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.h b/media/codec2/components/vpx/C2SoftVp9Enc.h
index 77ef8fd..34ea950 100644
--- a/media/codec2/components/vpx/C2SoftVp9Enc.h
+++ b/media/codec2/components/vpx/C2SoftVp9Enc.h
@@ -31,6 +31,8 @@
 struct C2SoftVp9Enc : public C2SoftVpxEnc {
     C2SoftVp9Enc(const char* name, c2_node_id_t id,
                  const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftVp9Enc(const char* name, c2_node_id_t id,
+                 const std::shared_ptr<C2ReflectorHelper>& helper);
 
  protected:
     // Populates |mCodecInterface| with codec specific settings.
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 83cbe47..b550473 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -372,6 +372,13 @@
       mQueue(new Mutexed<ConversionQueue>) {
 }
 
+C2SoftVpxDec::C2SoftVpxDec(
+        const char *name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftVpxDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftVpxDec::~C2SoftVpxDec() {
     onRelease();
 }
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 93cc213..61e4352 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_VPX_DEC_H_
 
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 
 #include "vpx/vpx_decoder.h"
@@ -30,6 +31,8 @@
 
     C2SoftVpxDec(const char* name, c2_node_id_t id,
               const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftVpxDec(const char* name, c2_node_id_t id,
+              const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftVpxDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index 8bf4b72..d0c8aea 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -241,6 +241,13 @@
         mOutputDrainBuffer(nullptr) {
 }
 
+C2SoftXaacDec::C2SoftXaacDec(
+        const char* name,
+        c2_node_id_t id,
+        const std::shared_ptr<C2ReflectorHelper> &helper)
+    : C2SoftXaacDec(name, id, std::make_shared<IntfImpl>(helper)) {
+}
+
 C2SoftXaacDec::~C2SoftXaacDec() {
     onRelease();
 }
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.h b/media/codec2/components/xaac/C2SoftXaacDec.h
index 5c8567f..0f53567 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.h
+++ b/media/codec2/components/xaac/C2SoftXaacDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_XAAC_DEC_H_
 #include <utils/Vector.h>
 #include <SimpleC2Component.h>
+#include <util/C2InterfaceHelper.h>
 
 #include "ixheaacd_type_def.h"
 #include "ixheaacd_error_standards.h"
@@ -49,6 +50,8 @@
 
     C2SoftXaacDec(const char* name, c2_node_id_t id,
                const std::shared_ptr<IntfImpl>& intfImpl);
+    C2SoftXaacDec(const char* name, c2_node_id_t id,
+               const std::shared_ptr<C2ReflectorHelper>& helper);
     virtual ~C2SoftXaacDec();
 
     // From SimpleC2Component
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index fbd1b36..6dfe909 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -80,6 +80,7 @@
 
 struct C2PlatformConfig {
     enum encoding_quality_level_t : uint32_t; ///< encoding quality level
+    enum resource_id_t : uint32_t;          ///< resource IDs defined by the platform
     enum tunnel_peek_mode_t: uint32_t;      ///< tunnel peek mode
 };
 
@@ -303,6 +304,9 @@
     // input surface
     kParamIndexCaptureFrameRate, // input-surface, float
     kParamIndexStopTimeOffset, // input-surface, int64_t
+
+    // display processing token
+    kParamIndexDisplayProcessingToken, // int64_t
 };
 
 }
@@ -341,6 +345,8 @@
     API_SAME_INPUT_BUFFER = (1U << 16),   ///< supporting multiple input buffers
                                           ///< backed by the same allocation
 
+    API_BLOCK_FENCES     = (1U << 17),    ///< supporting block fences
+
     API_STREAMS          = (1ULL << 32),  ///< supporting variable number of streams
 
     API_TUNNELING        = (1ULL << 48)   ///< tunneling API
@@ -441,6 +447,7 @@
     _C2_PL_VP8_BASE  = 0xA000,
     _C2_PL_MPEGH_BASE = 0xB000,     // MPEG-H 3D Audio
     _C2_PL_APV_BASE = 0xC000,     // APV
+    _C2_PL_AC4_BASE  = 0xD000,
 
     C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
 };
@@ -617,6 +624,16 @@
     PROFILE_APV_4444_10,                        ///< APV 4444-10 Profile
     PROFILE_APV_4444_12,                        ///< APV 4444-12 Profile
     PROFILE_APV_400_10,                         ///< APV 400-10 Profile
+
+    // AC-4 profiles
+    // Below profiles are labelled “AC-4 Profile xx.yy” where xx is the bitstream_version
+    // and yy is the presentation_version as described in "The MIME codecs parameter", Annex E.13
+    // found at https://www.etsi.org/deliver/etsi_ts/103100_103199/10319002/01.02.01_60/ts_10319002v010201p.pdf
+    PROFILE_AC4_0_0 = _C2_PL_AC4_BASE,          ///< AC-4 Profile 00.00
+    PROFILE_AC4_1_0,                            ///< AC-4 Profile 01.00
+    PROFILE_AC4_1_1,                            ///< AC-4 Profile 01.01
+    PROFILE_AC4_2_1,                            ///< AC-4 Profile 02.01
+    PROFILE_AC4_2_2,                            ///< AC-4 Profile 02.02
 };
 
 enum C2Config::level_t : uint32_t {
@@ -834,6 +851,15 @@
     LEVEL_APV_7_BAND_3,                              ///< APV L 7, BAND 3
     LEVEL_APV_7_1_BAND_3,                            ///< APV L 7.1, BAND 3
 
+    // AC-4 levels
+    // Below levels are labelled “AC-4 Level zz” where zz is the mdcompat as described in
+    // "The MIME codecs parameter", Annex E.13
+    // found at https://www.etsi.org/deliver/etsi_ts/103100_103199/10319002/01.02.01_60/ts_10319002v010201p.pdf
+    LEVEL_AC4_0 = _C2_PL_AC4_BASE,              ///< AC-4 Level 00
+    LEVEL_AC4_1,                                ///< AC-4 Level 01
+    LEVEL_AC4_2,                                ///< AC-4 Level 02
+    LEVEL_AC4_3,                                ///< AC-4 Level 03
+    LEVEL_AC4_4,                                ///< AC-4 Level 04
 };
 
 struct C2ProfileLevelStruct {
@@ -1285,7 +1311,10 @@
  * They represent any physical or abstract entities of limited availability
  * that is required for a component instance to execute and process work.
  *
- * Each defined resource has an id.
+ * Each defined resource has an id. In general, the id is defined by the vendor,
+ * but the platform also defines a limited set of IDs. Vendor IDs SHALL start
+ * from C2PlatformConfig::resource_id_t::VENDOR_START.
+ *
  * The use of a resource is specified by the amount and the kind (e.g. whether the amount
  * of resources is required for each frame processed, or whether they are required
  * regardless of the processing rate (const amount)).
@@ -1300,7 +1329,8 @@
                            C2Config::resource_kind_t kind_,
                            uint64_t amount_)
         : id(id_), kind(kind_), amount(amount_) { }
-    uint32_t id;
+    C2SystemResourceStruct() : C2SystemResourceStruct(0, CONST, 0) {}
+    uint32_t id;            ///< resource ID (see C2PlatformConfig::resource_id_t)
     C2Config::resource_kind_t kind;
     uint64_t amount;
 
@@ -2485,6 +2515,16 @@
 constexpr char C2_PARAMKEY_PLATFORM_FEATURES[] = "api.platform-features";
 
 /**
+ * Resource IDs
+ */
+enum C2PlatformConfig::resource_id_t : uint32_t {
+    DMABUF_MEMORY = 16,  ///< memory allocated from a platform allocator (dmabuf or gralloc)
+
+    /// vendor defined resource IDs start from here
+    VENDOR_START = 0x1000,
+};
+
+/**
  * This structure describes the preferred ion allocation parameters for a given memory usage.
  */
 struct C2StoreIonUsageStruct {
@@ -2853,6 +2893,16 @@
 );
 
 /**
+ * Display processing token.
+ *
+ * An int64 token specifying the display processing configuration for the frame.
+ * This value is passed to IGraphicBufferProducer via QueueBufferInput::setPictureProfileHandle().
+ */
+typedef C2StreamParam<C2Info, C2Int64Value, kParamIndexDisplayProcessingToken>
+        C2StreamDisplayProcessingToken;
+constexpr char C2_PARAMKEY_DISPLAY_PROCESSING_TOKEN[] = "display-processing-token";
+
+/**
  * Video Encoding Statistics Export
  */
 
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index 1805464..2115bbb 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -155,6 +155,12 @@
 };
 
 /**
+ * Simple wrapper class for size_t, so that we can define a T::operator new(size_t, _C2FlexCount)
+ * because T::operator new(size_t, size_t) is disallowed by the C++ standard.
+ */
+enum class _C2FlexCount : size_t {};
+
+/**
  * \brief Helper class to check flexible struct requirements and add common operations.
  *
  * Features:
@@ -178,6 +184,11 @@
     // default constructor needed because of the disabled copy constructor
     inline _C2FlexStructCheck() = default;
 
+    /// usual delete operator, needed because the class also has a placement delete operator
+    inline void operator delete(void* ptr) noexcept {
+        ::operator delete(ptr);
+    }
+
 protected:
     // cannot copy flexible params
     _C2FlexStructCheck(const _C2FlexStructCheck<S, ParamIndex, TypeFlags> &) = delete;
@@ -202,14 +213,19 @@
     }
 
     /// dynamic new operator usable for params of type S
-    inline void* operator new(size_t size, size_t flexCount) noexcept {
+    inline void* operator new(size_t size, _C2FlexCount flexCount) noexcept {
         // TODO: assert(size == BASE_SIZE);
-        size = CalcSize(flexCount, size);
+        size = CalcSize(static_cast<size_t>(flexCount), size);
         if (size > 0) {
             return ::operator new(size);
         }
         return nullptr;
     }
+
+    /// placement delete, called during placement new if constructor throws
+    inline void operator delete(void* ptr, _C2FlexCount) noexcept {
+        ::operator delete(ptr);
+    }
 };
 
 /// Define equality (and inequality) operators for params.
@@ -262,21 +278,24 @@
 #define DEFINE_FLEXIBLE_ALLOC(_Type, S, ptr, Ptr) \
     template<typename ...Args> \
     inline static std::ptr##_ptr<_Type> Alloc##Ptr(size_t flexCount, const Args(&... args)) { \
-        return std::ptr##_ptr<_Type>(new(flexCount) _Type(flexCount, args...)); \
+        return std::ptr##_ptr<_Type>(new(static_cast<_C2FlexCount>(flexCount)) \
+            _Type(flexCount, args...)); \
     } \
     template<typename ...Args, typename U=typename S::FlexType> \
     inline static std::ptr##_ptr<_Type> Alloc##Ptr( \
             const std::initializer_list<U> &init, const Args(&... args)) { \
-        return std::ptr##_ptr<_Type>(new(init.size()) _Type(init.size(), args..., init)); \
+        return std::ptr##_ptr<_Type>(new(static_cast<_C2FlexCount>(init.size())) \
+            _Type(init.size(), args..., init)); \
     } \
     template<typename ...Args, typename U=typename S::FlexType> \
     inline static std::ptr##_ptr<_Type> Alloc##Ptr( \
             const std::vector<U> &init, const Args(&... args)) { \
-        return std::ptr##_ptr<_Type>(new(init.size()) _Type(init.size(), args..., init)); \
+        return std::ptr##_ptr<_Type>(new(static_cast<_C2FlexCount>(init.size())) \
+            _Type(init.size(), args..., init)); \
     } \
     template<typename ...Args, typename U=typename S::FlexType, unsigned N> \
     inline static std::ptr##_ptr<_Type> Alloc##Ptr(const U(&init)[N], const Args(&... args)) { \
-        return std::ptr##_ptr<_Type>(new(N) _Type(N, args..., init)); \
+        return std::ptr##_ptr<_Type>(new(static_cast<_C2FlexCount>(N)) _Type(N, args..., init)); \
     } \
 
 /**
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index ec77427..6604315 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -28,7 +28,7 @@
 cc_defaults {
     name: "C2Fuzzer-defaults",
 
-    defaults: [ "libcodec2-static-defaults" ],
+    defaults: ["libcodec2-static-defaults"],
 
     srcs: [
         "C2Fuzzer.cpp",
@@ -38,7 +38,11 @@
         "-Wall",
         "-Werror",
     ],
+}
 
+cc_defaults {
+    name: "C2Fuzzer-defaults-shipped",
+    defaults: ["C2Fuzzer-defaults"],
     fuzz_config: {
         cc: [
             "android-fwk-video@google.com",
@@ -55,9 +59,28 @@
     },
 }
 
+cc_defaults {
+    name: "C2Fuzzer-defaults-experimental",
+    defaults: ["C2Fuzzer-defaults"],
+    fuzz_config: {
+        cc: [
+            "android-fwk-video@google.com",
+        ],
+        componentid: 1344,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libcodec2",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "experimental",
+    },
+}
+
 cc_fuzz {
     name: "C2FuzzerAvcDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.avc.decoder\"",
@@ -71,7 +94,7 @@
 
 cc_fuzz {
     name: "C2FuzzerHevcDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.hevc.decoder\"",
@@ -85,7 +108,7 @@
 
 cc_fuzz {
     name: "C2FuzzerMpeg2Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.mpeg2.decoder\"",
@@ -99,7 +122,7 @@
 
 cc_fuzz {
     name: "C2FuzzerMpeg4Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.mpeg4.decoder\"",
@@ -113,7 +136,7 @@
 
 cc_fuzz {
     name: "C2FuzzerH263Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.h263.decoder\"",
@@ -127,7 +150,7 @@
 
 cc_fuzz {
     name: "C2FuzzerVp8Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.vp8.decoder\"",
@@ -141,7 +164,7 @@
 
 cc_fuzz {
     name: "C2FuzzerVp9Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.vp9.decoder\"",
@@ -155,7 +178,7 @@
 
 cc_fuzz {
     name: "C2FuzzerAV1Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.av1.decoder\"",
@@ -169,10 +192,24 @@
 }
 
 cc_fuzz {
-    name: "C2FuzzerAacDec",
+    name: "C2FuzzerAPVDec",
     defaults: ["C2Fuzzer-defaults"],
 
     cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.apv.decoder\"",
+    ],
+
+    static_libs: [
+        "libopenapv",
+        "libcodec2_soft_apvdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerAacDec",
+    defaults: ["C2Fuzzer-defaults-shipped"],
+
+    cflags: [
         "-DC2COMPONENTNAME=\"c2.android.aac.decoder\"",
     ],
 
@@ -184,7 +221,7 @@
 
 cc_fuzz {
     name: "C2FuzzerAmrnbDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.amrnb.decoder\"",
@@ -200,7 +237,7 @@
 
 cc_fuzz {
     name: "C2FuzzerAmrwbDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.amrwb.decoder\"",
@@ -216,7 +253,7 @@
 
 cc_fuzz {
     name: "C2FuzzerFlacDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.flac.decoder\"",
@@ -231,7 +268,7 @@
 
 cc_fuzz {
     name: "C2FuzzerG711AlawDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.g711.alaw.decoder\"",
@@ -245,7 +282,7 @@
 
 cc_fuzz {
     name: "C2FuzzerG711MlawDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.g711.mlaw.decoder\"",
@@ -259,7 +296,7 @@
 
 cc_fuzz {
     name: "C2FuzzerGsmDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.gsm.decoder\"",
@@ -273,7 +310,7 @@
 
 cc_fuzz {
     name: "C2FuzzerMp3Dec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.mp3.decoder\"",
@@ -287,7 +324,7 @@
 
 cc_fuzz {
     name: "C2FuzzerOpusDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.opus.decoder\"",
@@ -301,7 +338,7 @@
 
 cc_fuzz {
     name: "C2FuzzerRawDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.raw.decoder\"",
@@ -314,7 +351,7 @@
 
 cc_fuzz {
     name: "C2FuzzerVorbisDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-shipped"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.vorbis.decoder\"",
@@ -328,7 +365,7 @@
 
 cc_fuzz {
     name: "C2FuzzerXaacDec",
-    defaults: ["C2Fuzzer-defaults"],
+    defaults: ["C2Fuzzer-defaults-experimental"],
 
     cflags: [
         "-DC2COMPONENTNAME=\"c2.android.xaac.decoder\"",
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index eaabc33..c85df825 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -55,9 +55,9 @@
 }
 
 // DO NOT DEPEND ON THIS DIRECTLY
-// use libcodec2-aidl-defaults instead
+// use libcodec2-aidl-noisurface-defaults instead
 cc_library {
-    name: "libcodec2_aidl",
+    name: "libcodec2_aidl_noisurface",
     min_sdk_version: "30",
     vendor_available: true,
     apex_available: [
@@ -78,8 +78,6 @@
         "Configurable.cpp",
         "InputBufferManager.cpp",
         "ParamTypes.cpp",
-        "inputsurface/InputSurface.cpp",
-        "inputsurface/InputSurfaceConnection.cpp",
     ],
 
     header_libs: [
@@ -100,7 +98,6 @@
         "libhidlbase",
         "liblog",
         "libnativewindow",
-        "libmediandk",
         "libstagefright_aidl_bufferpool2",
         "libstagefright_bufferpool@2.0.1",
         "libui",
@@ -140,6 +137,108 @@
     ],
 }
 
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-aidl-defaults instead
+cc_library {
+    name: "libcodec2_aidl",
+    min_sdk_version: "30",
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+        "libcodec2_hal_selection",
+    ],
+
+    srcs: [
+        "BufferTypes.cpp",
+        "Component.cpp",
+        "ComponentInterface.cpp",
+        "ComponentStore.cpp",
+        "Configurable.cpp",
+        "InputBufferManager.cpp",
+        "ParamTypes.cpp",
+        "inputsurface/FrameDropper.cpp",
+        "inputsurface/InputSurface.cpp",
+        "inputsurface/InputSurfaceConnection.cpp",
+        "inputsurface/InputSurfaceSource.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.common-V2-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
+        "android.hardware.media.c2-V1-ndk",
+        "libbase",
+        "libbinder_ndk",
+        "libcodec2",
+        "libcodec2_hal_common",
+        "libcodec2_hidl_plugin_stub",
+        "libcodec2_vndk",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libnativewindow",
+        "libmediandk",
+        "libstagefright_aidl_bufferpool2",
+        "libstagefright_bufferpool@2.0.1",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libaidlcommonsupport",
+    ],
+
+    target: {
+        vendor: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin_stub",
+            ],
+            shared_libs: [
+                "libcodec2_hidl_plugin",
+            ],
+        },
+        apex: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin_stub",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+    },
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2-V1-ndk",
+        "libcodec2",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-aidl-noisurface-defaults",
+    min_sdk_version: "30",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2-V1-ndk",
+        "libbinder_ndk",
+        "libcodec2_aidl_noisurface",
+    ],
+}
+
 // public dependency for Codec 2.0 HAL service implementations
 cc_defaults {
     name: "libcodec2-aidl-defaults",
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 87c9d87..a2c45cb 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -209,6 +209,22 @@
     mInterface = SharedRefBase::make<ComponentInterface>(
             component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
+    mBlockFenceSupport = false;
+    if (mInit != C2_OK) {
+        return;
+    }
+    std::shared_ptr<C2ComponentInterface> intf = component->intf();
+    if (!intf) {
+        return;
+    }
+    c2_status_t err = C2_OK;
+    std::vector<std::unique_ptr<C2Param>> heapParams;
+    C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+    err = intf->query_vb({&features}, {}, C2_MAY_BLOCK, &heapParams);
+    if (err == C2_OK &&
+            ((features.value & C2Config::API_BLOCK_FENCES) != 0)) {
+        mBlockFenceSupport = true;
+    }
 }
 
 c2_status_t Component::status() const {
@@ -363,6 +379,7 @@
             allocatorParam.igba = allocator.gbAllocator->igba;
             allocatorParam.waitableFd.reset(
                     allocator.gbAllocator->waitableFd.dup().release());
+            allocatorParam.blockFenceSupport = mBlockFenceSupport;
         }
         break;
         default: {
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index 9725bcf..712a3e9 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -99,6 +99,7 @@
     // alive. These C2BlockPool objects can be deleted by calling
     // destroyBlockPool(), reset() or release(), or by destroying the component.
     std::map<uint64_t, std::shared_ptr<C2BlockPool>> mBlockPools;
+    bool mBlockFenceSupport;
 
     void initListener(const std::shared_ptr<Component>& self);
 
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/FrameDropper.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/FrameDropper.h
new file mode 100644
index 0000000..0a6394c
--- /dev/null
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/FrameDropper.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/Errors.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+#include <C2.h>
+
+namespace aidl::android::hardware::media::c2::implementation {
+
+/**
+ * The class decides whether to drop a frame or not for InputSurface and
+ * InputSurfaceConnection.
+ */
+struct FrameDropper {
+    FrameDropper();
+
+    ~FrameDropper();
+
+    /**
+     * Sets max frame rate, which is based on for deciding frame drop.
+     *
+     * @param[in] maxFrameRate  negative value means there is no drop
+     *                          zero value is ignored
+     */
+    void setMaxFrameRate(float maxFrameRate);
+
+    /** Returns false if max frame rate has not been set via setMaxFrameRate. */
+    bool shouldDrop(int64_t timeUs);
+
+    /** Returns true if all frame drop logic should be disabled. */
+    bool disabled() { return (mMinIntervalUs == -1ll); }
+
+private:
+    int64_t mDesiredMinTimeUs;
+    int64_t mMinIntervalUs;
+
+    DISALLOW_EVIL_CONSTRUCTORS(FrameDropper);
+};
+
+}  // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
index 5c2cc2e..8e15778 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
@@ -17,15 +17,21 @@
 #pragma once
 
 #include <aidl/android/hardware/media/c2/BnInputSurface.h>
+#include <utils/RefBase.h>
 
+#include <C2.h>
+#include <C2Config.h>
 #include <codec2/aidl/Configurable.h>
 #include <util/C2InterfaceHelper.h>
 
-#include <C2.h>
-
 #include <memory>
 
+namespace aidl::android::hardware::media::c2::implementation {
+class InputSurfaceSource;
+}
+
 namespace aidl::android::hardware::media::c2::utils {
+struct InputSurfaceConnection;
 
 struct InputSurface : public BnInputSurface {
     InputSurface();
@@ -40,6 +46,61 @@
             const std::shared_ptr<IInputSink>& sink,
             std::shared_ptr<IInputSurfaceConnection>* connection) override;
 
+    // Constant definitions.
+    // Default image size for AImageReader
+    constexpr static uint32_t kDefaultImageWidth = 1280;
+    constexpr static uint32_t kDefaultImageHeight = 720;
+    // Default # of buffers for AImageReader
+    constexpr static uint32_t kDefaultImageBufferCount = 16;
+    constexpr static uint32_t kDefaultImageDataspace = HAL_DATASPACE_BT709;
+
+    // Configs
+    // Config for AImageReader creation
+    struct ImageConfig {
+        int32_t mWidth;         // image width
+        int32_t mHeight;        // image height
+        int32_t mFormat;        // image pixel format
+        int32_t mNumBuffers;    // number of max images for AImageReader(consumer)
+        uint64_t mUsage;        // image usage
+        uint32_t mDataspace;    // image dataspace
+    };
+
+    // Config for InputSurface active buffer stream control
+    struct StreamConfig {
+        // IN PARAMS
+        float mMinFps = 0.0;        // minimum fps (repeat frame to achieve this)
+        float mMaxFps = 0.0;        // max fps (via frame drop)
+        float mCaptureFps = 0.0;    // capture fps
+        float mCodedFps = 0.0;      // coded fps
+        bool mSuspended = false;    // suspended
+        int64_t mSuspendAtUs = 0;   // suspend time
+        int64_t mResumeAtUs = 0;   // resume time
+        bool mStopped = false;      // stopped
+        int64_t mStopAtUs = 0;      // stop time
+        int64_t mStartAtUs = 0;     // start time
+        int64_t mTimeOffsetUs = 0;  // time offset (input => codec)
+
+        // IN PARAMS (CODEC WRAPPER)
+        C2TimestampGapAdjustmentStruct::mode_t
+                mAdjustedFpsMode = C2TimestampGapAdjustmentStruct::NONE;
+        int64_t mAdjustedGapUs = 0;
+        int mPriority = INT_MAX;        // priority of queue thread (if any);
+                                        // INT_MAX for no-op
+    };
+
+    // TODO: optimize this
+    // The client requests the change of these configurations now.
+    // We can request the change of these configurations from HAL directly
+    // where onWorkDone() callback is called.
+    //
+    // Config for current work status w.r.t input buffers
+    struct WorkStatusConfig {
+        int32_t mLastDoneIndex = -1;      // Last work done input buffer index
+        uint32_t mLastDoneCount = 0;      // # of work done count
+        uint64_t mEmptyCount = 0;         // # of input buffers being emptied
+    };
+
+
 protected:
     class Interface;
     class ConfigurableIntf;
@@ -50,12 +111,29 @@
 
     virtual ~InputSurface() override;
 
+private:
+    ::android::sp<implementation::InputSurfaceSource> mSource;
+    std::shared_ptr<InputSurfaceConnection> mConnection;
 
-    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
-    static void OnBinderDied(void *cookie);
-    static void OnBinderUnlinked(void *cookie);
-    struct DeathContext;
-    DeathContext *mDeathContext;
+    ImageConfig mImageConfig;
+    StreamConfig mStreamConfig;
+    WorkStatusConfig mWorkStatusConfig;
+
+    std::mutex mLock;
+
+    friend class ConfigurableIntf;
+
+    bool updateConfig(
+            ImageConfig &imageConfig,
+            StreamConfig &streamConfig,
+            WorkStatusConfig &workStatusConfig,
+            int64_t *inputDelayUs);
+
+    void updateImageConfig(ImageConfig &config);
+    bool updateStreamConfig(StreamConfig &config, int64_t *inputDelayUs);
+    void updateWorkStatusConfig(WorkStatusConfig &config);
+
+    void release();
 };
 
 }  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
index 59361e1..7a57f18 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
@@ -16,17 +16,25 @@
 
 #pragma once
 
+#include <aidl/android/hardware/media/c2/BnInputSink.h>
 #include <aidl/android/hardware/media/c2/BnInputSurfaceConnection.h>
 #include <media/NdkImage.h>
+#include <utils/RefBase.h>
 
 #include <C2.h>
 
 #include <memory>
 
+namespace aidl::android::hardware::media::c2::implementation {
+class InputSurfaceSource;
+}
+
 namespace aidl::android::hardware::media::c2::utils {
 
 struct InputSurfaceConnection : public BnInputSurfaceConnection {
-    InputSurfaceConnection();
+    InputSurfaceConnection(
+            const std::shared_ptr<IInputSink>& sink,
+            ::android::sp<c2::implementation::InputSurfaceSource> const &source);
     c2_status_t status() const;
 
     // Methods from IInputSurfaceConnection follow.
@@ -51,6 +59,10 @@
 
 protected:
     virtual ~InputSurfaceConnection() override;
+
+private:
+    std::weak_ptr<IInputSink> mSink;
+    ::android::sp<c2::implementation::InputSurfaceSource> mSource;
 };
 
 }  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceSource.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceSource.h
new file mode 100644
index 0000000..25a55e7
--- /dev/null
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceSource.h
@@ -0,0 +1,529 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <list>
+#include <map>
+#include <mutex>
+
+#include <utils/RefBase.h>
+
+#include <media-vndk/VndkImageReader.h>
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <media/stagefright/foundation/ALooper.h>
+
+#include <codec2/aidl/inputsurface/InputSurfaceConnection.h>
+
+namespace aidl::android::hardware::media::c2::implementation {
+
+struct FrameDropper;
+/**
+ * This class is used to feed codecs from ANativeWindow via AImageReader
+ * for InputSurface and InputSurfaceConnection.
+ *
+ * Instances of the class don't run on a dedicated thread.  Instead,
+ * various events trigger data movement:
+ *
+ *  - Availability of a new frame of data from the AImageReader (notified
+ *    via the onFrameAvailable callback).
+ *  - The return of a codec buffer.
+ *  - Application signaling end-of-stream.
+ *  - Transition to or from "executing" state.
+ *
+ * Frames of data (and, perhaps, the end-of-stream indication) can arrive
+ * before the codec is in the "executing" state, so we need to queue
+ * things up until we're ready to go.
+ *
+ * The InputSurfaceSource can be configure dynamically to discard frames
+ * from the source:
+ *
+ * - if their timestamp is less than a start time
+ * - if the source is suspended or stopped and the suspend/stop-time is reached
+ * - if EOS was signaled
+ * - if there is no encoder connected to it
+ *
+ * The source, furthermore, may choose to not encode (drop) frames if:
+ *
+ * - to throttle the frame rate (keep it under a certain limit)
+ *
+ * Finally the source may optionally hold onto the last non-discarded frame
+ * (even if it was dropped) to reencode it after an interval if no further
+ * frames are sent by the producer.
+ */
+class InputSurfaceSource : public ::android::RefBase {
+// TODO: remove RefBase dependency and AHanderReflector.
+public:
+    // creates an InputSurfaceSource.
+    // init() have to be called prior to use the class.
+    InputSurfaceSource();
+
+    virtual ~InputSurfaceSource();
+
+    // Initialize with the default parameter. (persistent surface or init params
+    // are not decided yet.)
+    void init();
+
+    // Initialize with the specified parameters. (non-persistent surface)
+    void initWithParams(int32_t width, int32_t height, int32_t format,
+                       int32_t maxImages, uint64_t usage);
+
+    // We can't throw an exception if the constructor fails, so we just set
+    // this and require that the caller test the value.
+    c2_status_t initCheck() const {
+        return mInitCheck;
+    }
+
+    /**
+     * Returns the handle of ANativeWindow of the AImageReader.
+     */
+    ANativeWindow *getNativeWindow();
+
+    // This is called when component transitions to running state, which means
+    // we can start handing it buffers.  If we already have buffers of data
+    // sitting in the AImageReader, this will send them to the codec.
+    c2_status_t start();
+
+    // This is called when component transitions to stopped, indicating that
+    // the codec is meant to return all buffers back to the client for them
+    // to be freed. Do NOT submit any more buffers to the component.
+    c2_status_t stop();
+
+    // This is called when component transitions to released, indicating that
+    // we are shutting down.
+    c2_status_t release();
+
+    // A "codec buffer", i.e. a buffer that can be used to pass data into
+    // the encoder, has been allocated.  (This call does not call back into
+    // component.)
+    c2_status_t onInputBufferAdded(int32_t bufferId);
+
+    // Called when encoder is no longer using the buffer.  If we have an
+    // AImageReader buffer available, fill it with a new frame of data;
+    // otherwise, just mark it as available.
+    c2_status_t onInputBufferEmptied(int32_t bufferId, int fenceFd);
+
+    // Configure the buffer source to be used with a component with the default
+    // data space.
+    c2_status_t configure(
+        const std::shared_ptr<c2::utils::InputSurfaceConnection> &component,
+        int32_t dataSpace,
+        int32_t bufferCount,
+        uint32_t frameWidth,
+        uint32_t frameHeight,
+        uint64_t consumerUsage);
+
+    // This is called after the last input frame has been submitted or buffer
+    // timestamp is greater or equal than stopTimeUs. We need to submit an empty
+    // buffer with the EOS flag set.  If we don't have a codec buffer ready,
+    // we just set the mEndOfStream flag.
+    c2_status_t signalEndOfInputStream();
+
+    // If suspend is true, all incoming buffers (including those currently
+    // in the BufferQueue) with timestamp larger than timeUs will be discarded
+    // until the suspension is lifted. If suspend is false, all incoming buffers
+    // including those currently in the BufferQueue) with timestamp larger than
+    // timeUs will be processed. timeUs uses SYSTEM_TIME_MONOTONIC time base.
+    c2_status_t setSuspend(bool suspend, int64_t timeUs);
+
+    // Specifies the interval after which we requeue the buffer previously
+    // queued to the encoder. This is useful in the case of surface flinger
+    // providing the input surface if the resulting encoded stream is to
+    // be displayed "live". If we were not to push through the extra frame
+    // the decoder on the remote end would be unable to decode the latest frame.
+    // This API must be called before transitioning the encoder to "executing"
+    // state and once this behaviour is specified it cannot be reset.
+    c2_status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
+
+    // Sets the input buffer timestamp offset.
+    // When set, the sample's timestamp will be adjusted with the timeOffsetUs.
+    c2_status_t setTimeOffsetUs(int64_t timeOffsetUs);
+
+    /*
+     * Set the maximum frame rate on the source.
+     *
+     * When maxFps is a positive number, it indicates the maximum rate at which
+     * the buffers from this source will be sent to the encoder. Excessive
+     * frames will be dropped to meet the frame rate requirement.
+     *
+     * When maxFps is a negative number, any frame drop logic will be disabled
+     * and all frames from this source will be sent to the encoder, even when
+     * the timestamp goes backwards. Note that some components may still drop
+     * out-of-order frames silently, so this usually has to be used in
+     * conjunction with OMXNodeInstance::setMaxPtsGapUs() workaround.
+     *
+     * When maxFps is 0, this call will fail with BAD_VALUE.
+     */
+    c2_status_t setMaxFps(float maxFps);
+
+    // Sets the time lapse (or slow motion) parameters.
+    // When set, the sample's timestamp will be modified to playback framerate,
+    // and capture timestamp will be modified to capture rate.
+    c2_status_t setTimeLapseConfig(double fps, double captureFps);
+
+    // Sets the start time us (in system time), samples before which should
+    // be dropped and not submitted to encoder
+    c2_status_t setStartTimeUs(int64_t startTimeUs);
+
+    // Sets the stop time us (in system time), samples after which should be dropped
+    // and not submitted to encoder. timeUs uses SYSTEM_TIME_MONOTONIC time base.
+    c2_status_t setStopTimeUs(int64_t stopTimeUs);
+
+    // Gets the stop time offset in us. This is the time offset between latest buffer
+    // time and the stopTimeUs. If stop time is not set, INVALID_OPERATION will be returned.
+    // If return is OK, *stopTimeOffsetUs will contain the valid offset. Otherwise,
+    // *stopTimeOffsetUs will not be modified. Positive stopTimeOffsetUs means buffer time
+    // larger than stopTimeUs.
+    c2_status_t getStopTimeOffsetUs(int64_t *stopTimeOffsetUs);
+
+    // Sets the desired color aspects, e.g. to be used when producer does not specify a dataspace.
+    c2_status_t setColorAspects(int32_t aspectsPacked);
+
+protected:
+
+    // Called from AImageReader_ImageListener::onImageAvailable when a new frame
+    // of  data is available. If we're executing and a codec buffer is
+    // available, we acquire the buffer as an AImage, copy the AImage into the codec
+    // buffer, and call Empty[This]Buffer.  If we're not yet executing or
+    // there's no codec buffer available, we just increment mNumFramesAvailable
+    // and return.
+    void onFrameAvailable() ;
+
+    // Called from AImageReader_BufferRemovedListener::onBufferRemoved when a
+    // buffer is removed. We clear an appropriate cached buffer.
+    void onBufferReleased(uint64_t bid) ;
+
+private:
+
+    // AImageReader listener interface
+    struct ImageReaderListener;
+    AImageReader_ImageListener mImageListener;
+    AImageReader_BufferRemovedListener mBufferRemovedListener;
+
+    // Lock, covers all member variables.
+    mutable std::mutex mMutex;
+
+    // Used to report constructor failure regarding AImageReader creation.
+    c2_status_t mInitCheck;
+
+    // Graphic buffer reference objects
+    // --------------------------------
+
+    // These are used to keep a reference to AImage and gralloc handles owned by the
+    // InputSurfaceSource as well as to manage the cache slots. Separate references are owned by
+    // the buffer cache (controlled by the buffer queue/buffer producer) and the codec.
+
+    // When we get a buffer from the producer (BQ) it designates them to be cached into specific
+    // slots. Each slot owns a shared reference to the graphic buffer (we track these using
+    // CachedBuffer) that is in that slot, but the producer controls the slots.
+    struct CachedBuffer;
+
+    // When we acquire a buffer, we must release it back to the producer once we (or the codec)
+    // no longer uses it (as long as the buffer is still in the cache slot). We use shared
+    // AcquiredBuffer instances for this purpose - and we call release buffer when the last
+    // reference is relinquished.
+    struct AcquiredBuffer;
+
+    // We also need to keep some extra metadata (other than the buffer reference) for acquired
+    // buffers. These are tracked in VideoBuffer struct.
+    struct VideoBuffer {
+        std::shared_ptr<AcquiredBuffer> mBuffer;
+        nsecs_t mTimestampNs;
+        android_dataspace_t mDataspace;
+    };
+
+    // Cached and acquired buffers
+    // --------------------------------
+
+    typedef uint64_t ahwb_id;
+    typedef std::map<ahwb_id, std::shared_ptr<CachedBuffer>> BufferIdMap;
+
+    // TODO: refactor(or remove) this not to have the buffer,
+    // since it is no longer slot based
+    // Maps a AHardwareBuffer id to the cached buffer
+    BufferIdMap mBufferIds;
+
+    // Queue of buffers acquired in chronological order that are not yet submitted to the codec
+    ::std::list<VideoBuffer> mAvailableBuffers;
+
+    // Number of buffers that have been signaled by the producer that they are available, but
+    // we've been unable to acquire them due to our max acquire count
+    int32_t mNumAvailableUnacquiredBuffers;
+
+    // Number of frames acquired from consumer (debug only)
+    // (as in acquireBuffer called, and release needs to be called)
+    int32_t mNumOutstandingAcquires;
+
+    // Acquire a buffer from the BQ and store it in |item| if successful
+    // \return OK on success, or error on failure.
+    c2_status_t acquireBuffer_l(VideoBuffer *item);
+
+    // Called when a buffer was acquired from the producer
+    void onBufferAcquired_l(const VideoBuffer &buffer);
+
+    // marks the buffer of the id no longer cached, and accounts for the outstanding
+    // acquire count. Returns true if the slot was populated; otherwise, false.
+    bool discardBufferInId_l(ahwb_id id);
+
+    // marks the buffer at the id index no longer cached, and accounts for the outstanding
+    // acquire count
+    void discardBufferAtIter_l(BufferIdMap::iterator &bit);
+
+    // release all acquired and unacquired available buffers
+    // This method will return if it fails to acquire an unacquired available buffer, which will
+    // leave mNumAvailableUnacquiredBuffers positive on return.
+    void releaseAllAvailableBuffers_l();
+
+    // returns whether we have any available buffers (acquired or not-yet-acquired)
+    bool haveAvailableBuffers_l() const {
+        return !mAvailableBuffers.empty() || mNumAvailableUnacquiredBuffers > 0;
+    }
+
+    // Codec buffers
+    // -------------
+
+    // When we queue buffers to the encoder, we must hold the references to the graphic buffers
+    // in those buffers - as the producer may free the slots.
+
+    typedef int32_t codec_buffer_id;
+
+    // set of codec buffer ID-s of buffers available to fill
+    std::list<codec_buffer_id> mFreeCodecBuffers;
+
+    // maps codec buffer ID-s to buffer info submitted to the codec. Used to keep a reference for
+    // the graphics buffer.
+    std::map<codec_buffer_id, std::shared_ptr<AcquiredBuffer>> mSubmittedCodecBuffers;
+
+    // Processes the next acquired frame. If there is no available codec buffer, it returns false
+    // without any further action.
+    //
+    // Otherwise, it consumes the next acquired frame and determines if it needs to be discarded or
+    // dropped. If neither are needed, it submits it to the codec. It also saves the latest
+    // non-dropped frame and submits it for repeat encoding (if this is enabled).
+    //
+    // \require there must be an acquired frame (i.e. we're in the onFrameAvailable callback,
+    // or if we're in codecBufferEmptied and mNumFramesAvailable is nonzero).
+    // \require codec must be executing
+    // \returns true if acquired (and handled) the next frame. Otherwise, false.
+    bool fillCodecBuffer_l();
+
+    // Calculates the media timestamp for |item| and on success it submits the buffer to the codec,
+    // while also keeping a reference for it in mSubmittedCodecBuffers.
+    // Returns UNKNOWN_ERROR if the buffer was not submitted due to buffer timestamp. Otherwise,
+    // it returns any submit success or error value returned by the codec.
+    c2_status_t submitBuffer_l(const VideoBuffer &item);
+
+    // Submits an empty buffer, with the EOS flag set if there is an available codec buffer and
+    // sets mEndOfStreamSent flag. Does nothing if there is no codec buffer available.
+    void submitEndOfInputStream_l();
+
+    // Set to true if we want to send end-of-stream after we run out of available frames from the
+    // producer
+    bool mEndOfStream;
+
+    // Flag that the EOS was submitted to the encoder
+    bool mEndOfStreamSent;
+
+    // Dataspace for the last frame submitted to the codec
+    android_dataspace mLastDataspace;
+
+    // Default color aspects for this source
+    int32_t mDefaultColorAspectsPacked;
+
+    // called when the data space of the input buffer changes
+    void onDataspaceChanged_l(android_dataspace dataspace, android_pixel_format pixelFormat);
+
+    // Pointer back to the component that created us.  We send buffers here.
+    std::shared_ptr<c2::utils::InputSurfaceConnection> mComponent;
+
+    // Set by start() / stop().
+    bool mExecuting;
+
+    bool mSuspended;
+
+    // returns true if this source is unconditionally discarding acquired buffers at the moment
+    // regardless of the metadata of those buffers
+    bool areWeDiscardingAvailableBuffers_l();
+
+    int64_t mLastFrameTimestampUs;
+
+    // AImageReader creates ANativeWindow. The created ANativeWindow is passed
+    // to the producer, and mImageReader is used internally to retrieve the
+    // buffers queued by the producer.
+    AImageReader *mImageReader;
+    ANativeWindow *mImageWindow;
+
+    // AImageReader creation parameters
+    // maxImages cannot be changed after AImageReader is created.
+    struct ImageReaderConfig {
+        int32_t width;
+        int32_t height;
+        int32_t format;
+        int32_t maxImages;
+        uint64_t usage;
+    } mImageReaderConfig;
+
+    // The time to stop sending buffers.
+    int64_t mStopTimeUs;
+
+    struct ActionItem {
+        typedef enum {
+            PAUSE,
+            RESUME,
+            STOP
+        } ActionType;
+        ActionType mAction;
+        int64_t mActionTimeUs;
+    };
+
+    // Maintain last action timestamp to ensure all the action timestamps are
+    // monotonically increasing.
+    int64_t mLastActionTimeUs;
+
+    // An action queue that queue up all the actions sent to InputSurfaceSource.
+    // STOP action should only show up at the end of the list as all the actions
+    // after a STOP action will be discarded. mActionQueue is protected by mMutex.
+    std::list<ActionItem> mActionQueue;
+
+    ////
+    friend struct ::android::AHandlerReflector<InputSurfaceSource>;
+
+    enum {
+        kWhatRepeatLastFrame,   ///< queue last frame for reencoding
+    };
+    enum {
+        kRepeatLastFrameCount = 10,
+    };
+
+    int64_t mSkipFramesBeforeNs;
+
+    std::shared_ptr<FrameDropper> mFrameDropper;
+
+    ::android::sp<::android::ALooper> mLooper;
+    ::android::sp<::android::AHandlerReflector<InputSurfaceSource> > mReflector;
+
+    // Repeat last frame feature
+    // -------------------------
+    // configuration parameter: repeat interval for frame repeating (<0 if repeating is disabled)
+    int64_t mFrameRepeatIntervalUs;
+
+    // current frame repeat generation - used to cancel a pending frame repeat
+    int32_t mRepeatLastFrameGeneration;
+
+    // number of times to repeat latest frame (0 = none)
+    int32_t mOutstandingFrameRepeatCount;
+
+    // The previous buffer should've been repeated but
+    // no codec buffer was available at the time.
+    bool mFrameRepeatBlockedOnCodecBuffer;
+
+    // hold a reference to the last acquired (and not discarded) frame for frame repeating
+    VideoBuffer mLatestBuffer;
+
+    // queue last frame for reencode after the repeat interval.
+    void queueFrameRepeat_l();
+
+    // save |item| as the latest buffer and queue it for reencode (repeat)
+    void setLatestBuffer_l(const VideoBuffer &item);
+
+    // submit last frame to encoder and queue it for reencode
+    // \return true if buffer was submitted, false if it wasn't (e.g. source is suspended, there
+    // is no available codec buffer)
+    bool repeatLatestBuffer_l();
+
+    // Time lapse / slow motion configuration
+    // --------------------------------------
+
+    // desired frame rate for encoding - value <= 0 if undefined
+    double mFps;
+
+    // desired frame rate for capture - value <= 0 if undefined
+    double mCaptureFps;
+
+    // Time lapse mode is enabled if the capture frame rate is defined and it is
+    // smaller than half the encoding frame rate (if defined). In this mode,
+    // frames that come in between the capture interval (the reciprocal of the
+    // capture frame rate) are dropped and the encoding timestamp is adjusted to
+    // match the desired encoding frame rate.
+    //
+    // Slow motion mode is enabled if both encoding and capture frame rates are
+    // defined and the encoding frame rate is less than half the capture frame
+    // rate. In this mode, the source is expected to produce frames with an even
+    // timestamp interval (after rounding) with the configured capture fps.
+    //
+    // These modes must be configured by calling setTimeLapseConfig() before
+    // using this source.
+    //
+    // Timestamp snapping for slow motion recording
+    // ============================================
+    //
+    // When the slow motion mode is configured with setTimeLapseConfig(), the
+    // property "debug.stagefright.snap_timestamps" will be checked. If the
+    // value of the property is set to any value other than 1, mSnapTimestamps
+    // will be set to false. Otherwise, mSnapTimestamps will be set to true.
+    // (mSnapTimestamps will be false for time lapse recording regardless of the
+    // value of the property.)
+    //
+    // If mSnapTimestamps is true, i.e., timestamp snapping is enabled, the
+    // first source timestamp will be used as the source base time; afterwards,
+    // the timestamp of each source frame will be snapped to the nearest
+    // expected capture timestamp and scaled to match the configured encoding
+    // frame rate.
+    //
+    // If timestamp snapping is disabled, the timestamp of source frames will
+    // be scaled to match the ratio between the configured encoding frame rate
+    // and the configured capture frame rate.
+
+    // whether timestamps will be snapped
+    bool mSnapTimestamps{true};
+
+    // adjusted capture timestamp of the base frame
+    int64_t mBaseCaptureUs;
+
+    // adjusted encoding timestamp of the base frame
+    int64_t mBaseFrameUs;
+
+    // number of frames from the base time
+    int64_t mFrameCount;
+
+    // adjusted capture timestamp for previous frame (negative if there were
+    // none)
+    int64_t mPrevCaptureUs;
+
+    // adjusted media timestamp for previous frame (negative if there were none)
+    int64_t mPrevFrameUs;
+
+    // desired offset between media time and capture time
+    int64_t mInputBufferTimeOffsetUs;
+
+    // Calculates and outputs the timestamp to use for a buffer with a specific buffer timestamp
+    // |bufferTimestampNs|. Returns false on failure (buffer too close or timestamp is moving
+    // backwards). Otherwise, stores the media timestamp in |*codecTimeUs| and returns true.
+    //
+    // This method takes into account the start time offset and any time lapse or slow motion time
+    // adjustment requests.
+    bool calculateCodecTimestamp_l(nsecs_t bufferTimeNs, int64_t *codecTimeUs);
+
+    void onMessageReceived(const ::android::sp<::android::AMessage> &msg);
+
+    void createImageListeners();
+
+    DISALLOW_EVIL_CONSTRUCTORS(InputSurfaceSource);
+};
+
+}  // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/aidl/inputsurface/FrameDropper.cpp b/media/codec2/hal/aidl/inputsurface/FrameDropper.cpp
new file mode 100644
index 0000000..f5fcf05
--- /dev/null
+++ b/media/codec2/hal/aidl/inputsurface/FrameDropper.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AIDL-FrameDropper"
+#include <utils/Log.h>
+
+#include <codec2/aidl/inputsurface/FrameDropper.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace aidl::android::hardware::media::c2::implementation {
+
+static const int64_t kMaxJitterUs = 2000;
+
+FrameDropper::FrameDropper()
+    : mDesiredMinTimeUs(-1),
+      mMinIntervalUs(0) {
+}
+
+FrameDropper::~FrameDropper() {
+}
+
+void FrameDropper::setMaxFrameRate(float maxFrameRate) {
+    if (maxFrameRate < 0) {
+        mMinIntervalUs = -1LL;
+        return;
+    }
+
+    if (maxFrameRate == 0) {
+        ALOGW("framerate should be positive but got %f.", maxFrameRate);
+        return;
+    }
+    mMinIntervalUs = (int64_t) (1000000.0f / maxFrameRate);
+}
+
+bool FrameDropper::shouldDrop(int64_t timeUs) {
+    if (mMinIntervalUs <= 0) {
+        return false;
+    }
+
+    if (mDesiredMinTimeUs < 0) {
+        mDesiredMinTimeUs = timeUs + mMinIntervalUs;
+        ALOGV("first frame %lld, next desired frame %lld",
+                (long long)timeUs, (long long)mDesiredMinTimeUs);
+        return false;
+    }
+
+    if (timeUs < (mDesiredMinTimeUs - kMaxJitterUs)) {
+        ALOGV("drop frame %lld, desired frame %lld, diff %lld",
+                (long long)timeUs, (long long)mDesiredMinTimeUs,
+                (long long)(mDesiredMinTimeUs - timeUs));
+        return true;
+    }
+
+    int64_t n = (timeUs - mDesiredMinTimeUs + kMaxJitterUs) / mMinIntervalUs;
+    mDesiredMinTimeUs += (n + 1) * mMinIntervalUs;
+    ALOGV("keep frame %lld, next desired frame %lld, diff %lld",
+            (long long)timeUs, (long long)mDesiredMinTimeUs,
+            (long long)(mDesiredMinTimeUs - timeUs));
+    return false;
+}
+
+}  // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurface.cpp b/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
index 5f6d176..ce694ee 100644
--- a/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
+++ b/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
@@ -17,11 +17,29 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2-InputSurface"
 #include <android-base/logging.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_interface_utils.h>
+
+#include <mutex>
+
+#include <C2Config.h>
 
 #include <codec2/aidl/inputsurface/InputSurface.h>
+#include <codec2/aidl/inputsurface/InputSurfaceConnection.h>
+#include <codec2/aidl/inputsurface/InputSurfaceSource.h>
+
 
 namespace aidl::android::hardware::media::c2::utils {
 
+using ImageConfig = InputSurface::ImageConfig;
+using StreamConfig = InputSurface::StreamConfig;
+using WorkStatusConfig = InputSurface::WorkStatusConfig;
+
+template <typename T>
+static C2R BasicSetter(bool, C2InterfaceHelper::C2P<T> &) {
+    return C2R::Ok();
+}
+
 // Derived class of C2InterfaceHelper
 class InputSurface::Interface : public C2InterfaceHelper {
 public:
@@ -31,51 +49,524 @@
 
         setDerivedInstance(this);
 
+        addParameter(
+                DefineParam(mBlockSize, C2_PARAMKEY_BLOCK_SIZE)
+                .withDefault(new C2StreamBlockSizeInfo::output(
+                        0u, kDefaultImageWidth, kDefaultImageHeight))
+                .withFields({
+                        C2F(mBlockSize, width).inRange(2, 8192, 2),
+                        C2F(mBlockSize, height).inRange(2, 8192, 2),})
+                .withSetter(BlockSizeSetter)
+                .build());
+        addParameter(
+                DefineParam(mBlockCount, C2_PARAMKEY_BLOCK_COUNT)
+                .withDefault(new C2StreamBlockCountInfo::output(
+                    0u, kDefaultImageBufferCount))
+                .withFields({C2F(mBlockCount, value).any()})
+                .withSetter(BlockCountSetter)
+                .build());
+        addParameter(
+                DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                .withDefault(new C2StreamPixelFormatInfo::output(
+                        0u, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED))
+                .withFields({C2F(mPixelFormat, value).any()})
+                .withSetter(BasicSetter<decltype(mPixelFormat)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mUsage, C2_PARAMKEY_OUTPUT_STREAM_USAGE)
+                .withDefault(new C2StreamUsageTuning::output(0u, 0ULL))
+                .withFields({C2F(mUsage, value).any()})
+                .withSetter(BasicSetter<decltype(mUsage)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mDataspace, C2_PARAMKEY_DATA_SPACE)
+                .withDefault(new C2StreamDataSpaceInfo::output(
+                        0u, kDefaultImageDataspace))
+                .withFields({C2F(mDataspace, value).any()})
+                .withSetter(BasicSetter<decltype(mDataspace)::element_type>)
+                .build());
+
+        addParameter(
+                DefineParam(mMinFps, C2_PARAMKEY_INPUT_SURFACE_MIN_FRAME_RATE)
+                .withDefault(new C2PortMinFrameRateTuning::output(0.0))
+                .withFields({C2F(mMinFps, value).any()})
+                .withSetter(BasicSetter<decltype(mMinFps)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mMaxFps, C2_PARAMKEY_INPUT_SURFACE_MAX_FRAME_RATE)
+                .withDefault(new C2PortMaxFrameRateTuning::output(0.0))
+                .withFields({C2F(mMaxFps, value).any()})
+                .withSetter(BasicSetter<decltype(mMaxFps)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mCaptureFps, C2_PARAMKEY_INPUT_SURFACE_CAPTURE_FRAME_RATE)
+                .withDefault(new C2PortCaptureFrameRateTuning::output(0.0))
+                .withFields({C2F(mCaptureFps, value).any()})
+                .withSetter(BasicSetter<decltype(mCaptureFps)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mCodedFps, C2_PARAMKEY_FRAME_RATE)
+                .withDefault(new C2StreamFrameRateInfo::output(0u, 0.0))
+                .withFields({C2F(mCodedFps, value).any()})
+                .withSetter(BasicSetter<decltype(mCodedFps)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mTimeOffset, C2_PARAMKEY_FRAME_RATE)
+                .withDefault(new C2ComponentTimeOffsetTuning(0ULL))
+                .withFields({C2F(mTimeOffset, value).any()})
+                .withSetter(BasicSetter<decltype(mTimeOffset)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mStarted, C2_PARAMKEY_INPUT_SURFACE_START_AT)
+                .withDefault(new C2PortStartTimestampTuning::output(0ULL))
+                .withFields({
+                        C2F(mStarted, enabled).any(),
+                        C2F(mStarted, timestamp).any()})
+                .withSetter(BasicSetter<decltype(mStarted)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mStopped, C2_PARAMKEY_INPUT_SURFACE_STOP_AT)
+                .withDefault(new C2PortStopTimestampTuning::output())
+                .withFields({
+                        C2F(mStopped, enabled).any(),
+                        C2F(mStopped, timestamp).any()})
+                .withSetter(BasicSetter<decltype(mStopped)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mSuspended, C2_PARAMKEY_INPUT_SURFACE_SUSPEND_AT)
+                .withDefault(new C2PortSuspendTimestampTuning::output())
+                .withFields({
+                        C2F(mSuspended, enabled).any(),
+                        C2F(mSuspended, timestamp).any()})
+                .withSetter(BasicSetter<decltype(mSuspended)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mResumed, C2_PARAMKEY_INPUT_SURFACE_RESUME_AT)
+                .withDefault(new C2PortResumeTimestampTuning::output(0ULL))
+                .withFields({
+                        C2F(mResumed, enabled).any(),
+                        C2F(mResumed, timestamp).any()})
+                .withSetter(BasicSetter<decltype(mResumed)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mGap, C2_PARAMKEY_INPUT_SURFACE_TIMESTAMP_ADJUSTMENT)
+                .withDefault(new C2PortTimestampGapTuning::output(
+                        C2TimestampGapAdjustmentStruct::NONE, 0ULL))
+                .withFields({
+                        C2F(mGap, mode)
+                                .oneOf({
+                                        C2TimestampGapAdjustmentStruct::NONE,
+                                        C2TimestampGapAdjustmentStruct::MIN_GAP,
+                                        C2TimestampGapAdjustmentStruct::FIXED_GAP}),
+                        C2F(mGap, value).any()})
+                .withSetter(BasicSetter<decltype(mGap)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mStopTimeOffset, C2_PARAMKEY_INPUT_SURFACE_STOP_TIME_OFFSET)
+                .withDefault(new C2PortStopTimeOffset::output(0ULL))
+                .withFields({C2F(mStopTimeOffset, value).any()})
+                .withSetter(BasicSetter<decltype(mStopTimeOffset)::element_type>)
+                .build());
+
+        addParameter(
+                DefineParam(mInputDone, C2_PARAMKEY_LAYER_INDEX)
+                .withDefault(new C2StreamLayerIndexInfo::output(0u, UINT32_MAX))
+                .withFields({C2F(mInputDone, value).any()})
+                .withSetter(BasicSetter<decltype(mInputDone)::element_type>)
+                .build());
+        addParameter(
+                DefineParam(mInputDoneCount, C2_PARAMKEY_LAYER_INDEX)
+                .withDefault(new C2StreamLayerCountInfo::input(0u, 0))
+                .withFields({C2F(mInputDoneCount, value).any()})
+                .withSetter(InputDoneCountSetter)
+                .build());
+        addParameter(
+                DefineParam(mEmptyCount, C2_PARAMKEY_LAYER_COUNT)
+                .withDefault(new C2StreamLayerCountInfo::output(0u, 0))
+                .withFields({C2F(mEmptyCount, value).any()})
+                .withSetter(EmptyCountSetter)
+                .build());
+    }
+
+    void getImageConfig(ImageConfig* _Nonnull config) {
+        config->mWidth = mBlockSize->width;
+        config->mHeight = mBlockSize->height;
+        config->mFormat = mPixelFormat->value;
+        config->mNumBuffers = mBlockCount->value;
+        config->mUsage = mUsage->value;
+        config->mDataspace = mDataspace->value;
+    }
+
+    void getStreamConfig(StreamConfig* _Nonnull config) {
+        config->mMinFps = mMinFps->value;
+        config->mMaxFps = mMaxFps->value;
+        config->mCaptureFps = mCaptureFps->value;
+        config->mCodedFps = mCodedFps->value;
+        config->mTimeOffsetUs = mTimeOffset->value;
+
+        bool suspended = mSuspended->enabled;
+        bool resumed = mResumed->enabled;
+        CHECK(resumed != suspended);
+        config->mSuspended = suspended;
+        config->mSuspendAtUs = mSuspended->timestamp;
+        config->mResumeAtUs = mResumed->timestamp;
+        bool stopped = mStopped->enabled;
+        bool started = mStarted->enabled;
+        CHECK(stopped != started);
+        config->mStopped = stopped;
+        config->mStopAtUs = mStopped->timestamp;
+        config->mStartAtUs = mStarted->timestamp;
+
+        config->mAdjustedFpsMode = mGap->mode;
+        config->mAdjustedGapUs = mGap->value;
+    }
+
+    void getWorkStatusConfig(WorkStatusConfig* _Nonnull config) {
+        if (mInputDone->value == UINT32_MAX) {
+            config->mLastDoneIndex = -1;
+        } else {
+            config->mLastDoneIndex = mInputDone->value;
+        }
+        config->mLastDoneCount = mInputDoneCount->value;
+        config->mEmptyCount = mEmptyCount->value;
     }
 
 private:
+    // setters
+    static C2R BlockSizeSetter(bool mayBlock,
+            C2InterfaceHelper::C2P<C2StreamBlockSizeInfo::output> &me) {
+        (void)mayBlock;
+        uint32_t width_ = c2_min(me.v.width, 8192u);
+        uint32_t height_ = c2_min(me.v.height, 8192u);
+        if (width_ % 2 != 0) width_++;
+        if (height_ % 2 != 0) height_++;
+        me.set().width = width_;
+        me.set().height = height_;
+        return C2R::Ok();
+    }
+    static C2R BlockCountSetter(bool mayBlock,
+            C2InterfaceHelper::C2P<C2StreamBlockCountInfo::output> &me) {
+        (void)mayBlock;
+        me.set().value = c2_min(me.v.value, kDefaultImageBufferCount);
+        return C2R::Ok();
+    }
+
+    static C2R InputDoneCountSetter(bool mayBlock,
+            C2InterfaceHelper::C2P<C2StreamLayerCountInfo::input> &me) {
+        (void)mayBlock;
+        me.set().value = me.v.value + 1;
+        return C2R::Ok();
+    }
+
+    static C2R EmptyCountSetter(bool mayBlock,
+            C2InterfaceHelper::C2P<C2StreamLayerCountInfo::output> &me) {
+        (void)mayBlock;
+        me.set().value = me.v.value + 1;
+        return C2R::Ok();
+    }
+
+private:
+    // buffer configuraration
+    std::shared_ptr<C2StreamBlockSizeInfo::output> mBlockSize;
+    std::shared_ptr<C2StreamBlockCountInfo::output> mBlockCount;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamUsageTuning::output> mUsage;
+    std::shared_ptr<C2StreamDataSpaceInfo::output> mDataspace;
+
+    // input surface source configuration
+    std::shared_ptr<C2PortMinFrameRateTuning::output> mMinFps;
+    std::shared_ptr<C2PortMaxFrameRateTuning::output> mMaxFps;
+    std::shared_ptr<C2PortCaptureFrameRateTuning::output> mCaptureFps;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mCodedFps;
+    std::shared_ptr<C2ComponentTimeOffsetTuning> mTimeOffset; // unsigned, but
+                                                              // signed
+    std::shared_ptr<C2PortSuspendTimestampTuning::output> mSuspended;
+    std::shared_ptr<C2PortResumeTimestampTuning::output> mResumed;
+    std::shared_ptr<C2PortStartTimestampTuning::output> mStarted;
+    std::shared_ptr<C2PortStopTimestampTuning::output> mStopped;
+    std::shared_ptr<C2PortTimestampGapTuning::output> mGap;
+    std::shared_ptr<C2PortStopTimeOffset::output> mStopTimeOffset; // query
+
+    // current work status configuration
+    // TODO: remove this and move this to onWorkDone()
+    std::shared_ptr<C2StreamLayerIndexInfo::output> mInputDone;
+    std::shared_ptr<C2StreamLayerCountInfo::input> mInputDoneCount;
+    std::shared_ptr<C2StreamLayerCountInfo::output> mEmptyCount;
 };
 
 class InputSurface::ConfigurableIntf : public ConfigurableC2Intf {
 public:
+    ConfigurableIntf(
+            const std::shared_ptr<InputSurface::Interface> &intf,
+            const std::shared_ptr<InputSurface> &surface)
+        : ConfigurableC2Intf("input-surface", 0),
+          mIntf(intf), mSurface(surface) {
+    }
+
+    virtual ~ConfigurableIntf() override = default;
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index> &indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const params
+            ) const override {
+        // std::lock_guard<std::mutex> l(mConfigLock);
+        std::lock_guard<std::mutex> l(mConfigLock);
+        return mIntf->query({}, indices, mayBlock, params);
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures
+            ) override {
+        auto surface = mSurface.lock();
+        if (!surface) {
+            return C2_CORRUPTED;
+        }
+
+        c2_status_t err;
+        {
+            ImageConfig imageConfig;
+            StreamConfig streamConfig;
+            WorkStatusConfig workStatusConfig;
+            int64_t inputDelayUs = 0;
+
+            std::lock_guard<std::mutex> l(mConfigLock);
+            err = mIntf->config(params, mayBlock, failures);
+
+            mIntf->getImageConfig(&imageConfig);
+            mIntf->getStreamConfig(&streamConfig);
+            mIntf->getWorkStatusConfig(&workStatusConfig);
+            if (surface->updateConfig(
+                   imageConfig, streamConfig, workStatusConfig, &inputDelayUs)) {
+                C2PortStopTimeOffset::output offsetConfig(inputDelayUs);
+                std::vector<std::unique_ptr<C2SettingResult>> fail;
+                c2_status_t updateErr = mIntf->config({&offsetConfig}, mayBlock, &fail);
+            }
+        }
+        return err;
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override {
+        std::lock_guard<std::mutex> l(mConfigLock);
+        return mIntf->querySupportedParams(params);
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override {
+        std::lock_guard<std::mutex> l(mConfigLock);
+        return mIntf->querySupportedValues(fields, mayBlock);
+    }
+
+private:
+    const std::shared_ptr<InputSurface::Interface> mIntf;
+    const std::weak_ptr<InputSurface> mSurface;
+
+    mutable std::mutex mConfigLock;
 };
 
-struct InputSurface::DeathContext {
-    // TODO;
-};
+InputSurface::InputSurface() {
+    mIntf = std::make_shared<Interface>(
+            std::make_shared<C2ReflectorHelper>());
 
-void InputSurface::OnBinderDied(void *cookie) {
-    (void) cookie;
-}
-
-void InputSurface::OnBinderUnlinked(void *cookie) {
-    (void) cookie;
-}
-
-InputSurface::InputSurface() : mDeathContext(nullptr) {
-    mInit = C2_OK;
+    // mConfigurable is initialized lazily.
+    // mInit indicates the initialization status of mConfigurable.
+    mInit = C2_NO_INIT;
 }
 
 InputSurface::~InputSurface() {
+    release();
 }
 
 ::ndk::ScopedAStatus InputSurface::getSurface(::aidl::android::view::Surface* surface) {
-    (void) surface;
-    return ::ndk::ScopedAStatus::ok();
+    std::lock_guard<std::mutex> l(mLock);
+    ANativeWindow *window = mSource->getNativeWindow();
+    if (window) {
+        surface->reset(window);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(C2_CORRUPTED);
 }
 
 ::ndk::ScopedAStatus InputSurface::getConfigurable(
         std::shared_ptr<IConfigurable>* configurable) {
-    *configurable = mConfigurable;
-    return ::ndk::ScopedAStatus::ok();
+    if (mInit == C2_NO_INIT) {
+        mConfigurable = SharedRefBase::make<CachedConfigurable>(
+                std::make_unique<ConfigurableIntf>(mIntf, this->ref<InputSurface>()));
+        mInit = C2_OK;
+    }
+    if (mConfigurable) {
+        *configurable = mConfigurable;
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(C2_CORRUPTED);
 }
 
 ::ndk::ScopedAStatus InputSurface::connect(
         const std::shared_ptr<IInputSink>& sink,
         std::shared_ptr<IInputSurfaceConnection>* connection) {
-    (void) sink;
-    (void) connection;
+    mConnection = SharedRefBase::make<InputSurfaceConnection>(sink, mSource);
+    *connection = mConnection;
     return ::ndk::ScopedAStatus::ok();
 }
 
+void InputSurface::updateImageConfig(ImageConfig &config) {
+    std::unique_lock<std::mutex> l(mLock);
+    if (mImageConfig.mWidth != config.mWidth) {
+        mImageConfig.mWidth = config.mWidth;
+    }
+    if (mImageConfig.mHeight != config.mHeight) {
+        mImageConfig.mHeight = config.mHeight;
+    }
+    if (mImageConfig.mFormat != config.mFormat) {
+        mImageConfig.mFormat = config.mFormat;
+    }
+    if (mImageConfig.mNumBuffers != config.mNumBuffers) {
+        mImageConfig.mNumBuffers = config.mNumBuffers;
+    }
+    if (mImageConfig.mUsage != config.mUsage) {
+        mImageConfig.mUsage = config.mUsage;
+    }
+    if (mImageConfig.mDataspace != config.mDataspace) {
+        mImageConfig.mDataspace = config.mDataspace;
+    }
+}
+
+bool InputSurface::updateStreamConfig(
+        StreamConfig &config, int64_t *inputDelayUs) {
+    std::stringstream status;
+    c2_status_t err = C2_OK;
+    bool inputDelayUpdated = false;
+
+    std::unique_lock<std::mutex> l(mLock);
+    // handle StreamConfig changes.
+    // TRICKY: we do not unset frame delay repeating
+    if (config.mMinFps > 0 && config.mMinFps != mStreamConfig.mMinFps) {
+        int64_t us = 1e6 / config.mMinFps + 0.5;
+        c2_status_t res = mSource->setRepeatPreviousFrameDelayUs(us);
+        status << " minFps=" << config.mMinFps << " => repeatDelayUs=" << us;
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        }
+        mStreamConfig.mMinFps = config.mMinFps;
+    }
+    bool fixedModeUpdate = false;
+    if (config.mAdjustedFpsMode != C2TimestampGapAdjustmentStruct::NONE && (
+            config.mAdjustedFpsMode != mStreamConfig.mAdjustedFpsMode ||
+            config.mAdjustedGapUs != mStreamConfig.mAdjustedGapUs)) {
+        // TODO: configure GapUs to connection
+        // The original codes do not update config, figure out why.
+        mStreamConfig.mAdjustedFpsMode = config.mAdjustedFpsMode;
+        mStreamConfig.mAdjustedGapUs = config.mAdjustedGapUs;
+        fixedModeUpdate = (config.mAdjustedFpsMode == C2TimestampGapAdjustmentStruct::FIXED_GAP);
+        // TODO: update Gap to Connection.
+    }
+    // TRICKY: we do not unset max fps to 0 unless using fixed fps
+    if ((config.mMaxFps > 0 || (fixedModeUpdate && config.mMaxFps == -1))
+            && config.mMaxFps != mStreamConfig.mMaxFps) {
+        c2_status_t res = mSource->setMaxFps(config.mMaxFps);
+        status << " maxFps=" << config.mMaxFps;
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        }
+        mStreamConfig.mMaxFps = config.mMaxFps;
+    }
+    if (config.mTimeOffsetUs != mStreamConfig.mTimeOffsetUs) {
+        c2_status_t res = mSource->setTimeOffsetUs(config.mTimeOffsetUs);
+        status << " timeOffset " << config.mTimeOffsetUs << "us";
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        }
+        mStreamConfig.mTimeOffsetUs = config.mTimeOffsetUs;
+    }
+    if (config.mCaptureFps != mStreamConfig.mCaptureFps ||
+            config.mCodedFps != mStreamConfig.mCodedFps) {
+        c2_status_t res = mSource->setTimeLapseConfig(
+                config.mCodedFps, config.mCaptureFps);
+        status << " timeLapse " << config.mCaptureFps << "fps as "
+               << config.mCodedFps << "fps";
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        }
+        mStreamConfig.mCaptureFps = config.mCaptureFps;
+        mStreamConfig.mCodedFps = config.mCodedFps;
+    }
+    if (config.mStartAtUs != mStreamConfig.mStartAtUs ||
+            (config.mStopped != mStreamConfig.mStopped && !config.mStopped)) {
+        c2_status_t res = mSource->setStartTimeUs(config.mStartAtUs);
+        status << " start at " << config.mStartAtUs << "us";
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        }
+        mStreamConfig.mStartAtUs = config.mStartAtUs;
+        mStreamConfig.mStopped = config.mStopped;
+    }
+    if (config.mSuspended != mStreamConfig.mSuspended) {
+        c2_status_t res = mSource->setSuspend(config.mSuspended, config.mSuspendAtUs);
+        status << " " << (config.mSuspended ? "suspend" : "resume")
+                << " at " << config.mSuspendAtUs << "us";
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        }
+        mStreamConfig.mSuspended = config.mSuspended;
+        mStreamConfig.mSuspendAtUs = config.mSuspendAtUs;
+    }
+    if (config.mStopped != mStreamConfig.mStopped && config.mStopped) {
+        // start time has changed or started from stop.
+        c2_status_t res = mSource->setStopTimeUs(config.mStopAtUs);
+        status << " stop at " << config.mStopAtUs << "us";
+        if (res != C2_OK) {
+            status << " (=> " << asString(res) << ")";
+            err = res;
+        } else {
+            status << " delayUs";
+            res = mSource->getStopTimeOffsetUs(inputDelayUs);
+            if (res != C2_OK) {
+                status << " (=> " << asString(res) << ")";
+            } else {
+                status << "=" << *inputDelayUs << "us";
+                inputDelayUpdated = true;
+            }
+        }
+        mStreamConfig.mStopAtUs = config.mStopAtUs;
+        mStreamConfig.mStopped = config.mStopped;
+    }
+    if (status.str().empty()) {
+        ALOGD("StreamConfig not changed");
+    } else {
+        ALOGD("StreamConfig%s", status.str().c_str());
+    }
+    return inputDelayUpdated;
+}
+
+void InputSurface::updateWorkStatusConfig(WorkStatusConfig &config) {
+    (void)config;
+    // TODO
+}
+
+bool InputSurface::updateConfig(
+        ImageConfig &imageConfig, StreamConfig &streamConfig,
+        WorkStatusConfig &workStatusConfig, int64_t *inputDelayUs) {
+    updateImageConfig(imageConfig);
+    bool ret = updateStreamConfig(streamConfig, inputDelayUs);
+    updateWorkStatusConfig(workStatusConfig);
+
+    return ret;
+}
+
+void InputSurface::release() {
+    ALOGD("all refs are gone");
+    // TODO clean up
+}
+
 }  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp b/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
index 44ca924..6a95472 100644
--- a/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
+++ b/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
@@ -19,15 +19,24 @@
 #include <android-base/logging.h>
 
 #include <codec2/aidl/inputsurface/InputSurfaceConnection.h>
+#include <codec2/aidl/inputsurface/InputSurfaceSource.h>
 
 namespace aidl::android::hardware::media::c2::utils {
 
-InputSurfaceConnection::InputSurfaceConnection() {
+InputSurfaceConnection::InputSurfaceConnection(
+        const std::shared_ptr<IInputSink>& sink,
+        ::android::sp<c2::implementation::InputSurfaceSource> const &source)
+        : mSink{sink}, mSource{source} {
 }
 
 InputSurfaceConnection::~InputSurfaceConnection() {
 }
 
+c2_status_t InputSurfaceConnection::status() const {
+    // TODO;
+    return C2_OK;
+}
+
 ::ndk::ScopedAStatus InputSurfaceConnection::disconnect() {
     return ::ndk::ScopedAStatus::ok();
 }
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurfaceSource.cpp b/media/codec2/hal/aidl/inputsurface/InputSurfaceSource.cpp
new file mode 100644
index 0000000..953790e
--- /dev/null
+++ b/media/codec2/hal/aidl/inputsurface/InputSurfaceSource.cpp
@@ -0,0 +1,1572 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "InputSurfaceSource"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#define STRINGIFY_ENUMS // for asString in HardwareAPI.h/VideoAPI.h
+
+#include <codec2/aidl/inputsurface/FrameDropper.h>
+#include <codec2/aidl/inputsurface/InputSurfaceSource.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/FileDescriptor.h>
+
+#include <android-base/no_destructor.h>
+#include <android-base/properties.h>
+#include <media/hardware/HardwareAPI.h>
+#include <ui/Fence.h>
+
+#include <inttypes.h>
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <cmath>
+
+// TODO: remove CHECK() since this works in HAL process,
+// we don't want to kill the HAL process when there is a irrecoverable runtime
+// error.
+
+namespace aidl::android::hardware::media::c2::implementation {
+
+using ::android::AHandlerReflector;
+using ::android::ALooper;
+using ::android::AMessage;
+using ::android::ColorAspects;
+using ::android::ColorUtils;
+using ::android::Fence;
+using ::android::FileDescriptor;
+using ::android::List;
+using ::android::Mutex;
+using ::android::String8;
+using ::android::Vector;
+using ::android::sp;
+using ::android::wp;
+
+using c2::utils::InputSurfaceConnection;
+
+namespace {
+// kTimestampFluctuation is an upper bound of timestamp fluctuation from the
+// source that InputSurfaceSource allows. The unit of kTimestampFluctuation is
+// frames. More specifically, InputSurfaceSource will drop a frame if
+//
+// expectedNewFrametimestamp - actualNewFrameTimestamp <
+//     (0.5 - kTimestampFluctuation) * expectedtimePeriodBetweenFrames
+//
+// where
+// - expectedNewFrameTimestamp is the calculated ideal timestamp of the new
+//   incoming frame
+// - actualNewFrameTimestamp is the timestamp received from the source
+// - expectedTimePeriodBetweenFrames is the ideal difference of the timestamps
+//   of two adjacent frames
+//
+// See InputSurfaceSource::calculateCodecTimestamp_l() for more detail about
+// how kTimestampFluctuation is used.
+//
+// kTimestampFluctuation should be non-negative. A higher value causes a smaller
+// chance of dropping frames, but at the same time a higher bound on the
+// difference between the source timestamp and the interpreted (snapped)
+// timestamp.
+//
+// The value of 0.05 means that InputSurfaceSource expects the input timestamps
+// to fluctuate no more than 5% from the regular time period.
+//
+// TODO: Justify the choice of this value, or make it configurable.
+constexpr double kTimestampFluctuation = 0.05;
+}
+
+/**
+ * A copiable object managing a buffer in the buffer cache managed by the producer. This object
+ * holds a reference to the buffer, and maintains which buffer slot it belongs to (if any), and
+ * whether it is still in a buffer slot. It also maintains whether there are any outstanging acquire
+ * references to it (by buffers acquired from the slot) mainly so that we can keep a debug
+ * count of how many buffers we need to still release back to the producer.
+ */
+struct InputSurfaceSource::CachedBuffer {
+    /**
+     * Token that is used to track acquire counts (as opposed to all references to this object).
+     */
+    struct Acquirable { };
+
+    /**
+     * Create using a buffer cached in a slot.
+     */
+    CachedBuffer(ahwb_id id, AImage *image)
+        : mIsCached(true),
+          mId(id),
+          mImage(image),
+          mAcquirable(std::make_shared<Acquirable>()) {}
+
+    /**
+     * Returns the id of buffer which is cached in, or 0 if it is no longer cached.
+     *
+     * This assumes that 0 id is invalid; though, it is just a benign collision used for
+     * debugging. This object explicitly manages whether it is still cached.
+     */
+    ahwb_id getId() const {
+        return mIsCached ? mId : 0;
+    }
+
+    /**
+     * Returns the cached buffer(AImage).
+     */
+    AImage *getImage() const {
+        return mImage;
+    }
+
+    /**
+     * Checks whether this buffer is still in the buffer cache.
+     */
+    bool isCached() const {
+        return mIsCached;
+    }
+
+    /**
+     * Checks whether this buffer has an acquired reference.
+     */
+    bool isAcquired() const {
+        return mAcquirable.use_count() > 1;
+    }
+
+    /**
+     * Gets and returns a shared acquired reference.
+     */
+    std::shared_ptr<Acquirable> getAcquirable() {
+        return mAcquirable;
+    }
+
+private:
+    friend void InputSurfaceSource::discardBufferAtIter_l(BufferIdMap::iterator&);
+
+    /**
+     * This method to be called when the buffer is no longer in the buffer cache.
+     * Called from discardBufferAtIter_l.
+     */
+    void onDroppedFromCache() {
+        CHECK_DBG(mIsCached);
+        mIsCached = false;
+    }
+
+    bool mIsCached;
+    ahwb_id mId;
+    AImage *mImage;
+    std::shared_ptr<Acquirable> mAcquirable;
+};
+
+/**
+ * A copiable object managing a buffer acquired from the producer. This must always be a cached
+ * buffer. This objects also manages its acquire fence and any release fences that may be returned
+ * by the encoder for this buffer (this buffer may be queued to the encoder multiple times).
+ * If no release fences are added by the encoder, the acquire fence is returned as the release
+ * fence for this - as it is assumed that noone waited for the acquire fence. Otherwise, it is
+ * assumed that the encoder has waited for the acquire fence (or returned it as the release
+ * fence).
+ */
+struct InputSurfaceSource::AcquiredBuffer {
+    AcquiredBuffer(
+            const std::shared_ptr<CachedBuffer> &buffer,
+            std::function<void(AcquiredBuffer *)> onReleased,
+            const sp<Fence> &acquireFence)
+        : mBuffer(buffer),
+          mAcquirable(buffer->getAcquirable()),
+          mAcquireFence(acquireFence),
+          mGotReleaseFences(false),
+          mOnReleased(onReleased) {
+    }
+
+    /**
+     * Adds a release fence returned by the encoder to this object. If this is called with an
+     * valid file descriptor, it is added to the list of release fences. These are returned to the
+     * producer on release() as a merged fence. Regardless of the validity of the file descriptor,
+     * we take note that a release fence was attempted to be added and the acquire fence can now be
+     * assumed as acquired.
+     */
+    void addReleaseFenceFd(int fenceFd) {
+        // save all release fences - these will be propagated to the producer if this buffer is
+        // ever released to it
+        if (fenceFd >= 0) {
+            mReleaseFenceFds.push_back(fenceFd);
+        }
+        mGotReleaseFences = true;
+    }
+
+    /**
+     * Returns the acquire fence file descriptor associated with this object.
+     */
+    int getAcquireFenceFd() {
+        if (mAcquireFence == nullptr || !mAcquireFence->isValid()) {
+            return -1;
+        }
+        return mAcquireFence->dup();
+    }
+
+    /**
+     * Returns whether the buffer is still in the buffer cache.
+     */
+    bool isCached() const {
+        return mBuffer->isCached();
+    }
+
+    /**
+     * Returns the acquired buffer.
+     */
+    AImage *getImage() const {
+        return mBuffer->getImage();
+    }
+
+    /**
+     * Returns the id of buffer which is cached in, or 0 otherwise.
+     *
+     * This assumes that 0 id is invalid; though, it is just a benign collision used for
+     * debugging. This object explicitly manages whether it is still cached.
+     */
+    ahwb_id getId() const {
+        return mBuffer->getId();
+    }
+
+    /**
+     * Creates and returns a release fence object from the acquire fence and/or any release fences
+     * added. If no release fences were added (even if invalid), returns the acquire fence.
+     * Otherwise, it returns a merged fence from all the valid release fences added.
+     */
+    sp<Fence> getReleaseFence() {
+        // If did not receive release fences, we assume this buffer was not consumed (it was
+        // discarded or dropped). In this case release the acquire fence as the release fence.
+        // We do this here to avoid a dup, close and recreation of the Fence object.
+        if (!mGotReleaseFences) {
+            return mAcquireFence;
+        }
+        sp<Fence> ret = getReleaseFence(0, mReleaseFenceFds.size());
+        // clear fds as fence took ownership of them
+        mReleaseFenceFds.clear();
+        return ret;
+    }
+
+    // this video buffer is no longer referenced by the codec (or kept for later encoding)
+    // it is now safe to release to the producer
+    ~AcquiredBuffer() {
+        //mAcquirable.clear();
+        mOnReleased(this);
+        // mOnRelease method should call getReleaseFence() that releases all fds but just in case
+        ALOGW_IF(!mReleaseFenceFds.empty(), "release fences were not obtained, closing fds");
+        for (int fildes : mReleaseFenceFds) {
+            ::close(fildes);
+            TRESPASS_DBG();
+        }
+    }
+
+private:
+    std::shared_ptr<InputSurfaceSource::CachedBuffer> mBuffer;
+    std::shared_ptr<InputSurfaceSource::CachedBuffer::Acquirable> mAcquirable;
+    sp<Fence> mAcquireFence;
+    Vector<int> mReleaseFenceFds;
+    bool mGotReleaseFences;
+    std::function<void(AcquiredBuffer *)> mOnReleased;
+
+    /**
+     * Creates and returns a release fence from 0 or more release fence file descriptors in from
+     * the specified range in the array.
+     *
+     * @param start start index
+     * @param num   number of release fds to merge
+     */
+    sp<Fence> getReleaseFence(size_t start, size_t num) const {
+        if (num == 0) {
+            return Fence::NO_FENCE;
+        } else if (num == 1) {
+            return new Fence(mReleaseFenceFds[start]);
+        } else {
+            return Fence::merge("GBS::AB",
+                                getReleaseFence(start, num >> 1),
+                                getReleaseFence(start + (num >> 1), num - (num >> 1)));
+        }
+    }
+};
+
+struct InputSurfaceSource::ImageReaderListener {
+private:
+    std::map<uint64_t, wp<InputSurfaceSource>> listeners;
+    std::mutex mutex;
+    uint64_t seqId{0};
+
+    sp<InputSurfaceSource> getSource(void *context) {
+        sp<InputSurfaceSource> source;
+        uint64_t key = reinterpret_cast<uint64_t>(context);
+        std::lock_guard<std::mutex> l(mutex);
+        auto it = listeners.find(key);
+        if (it->first) {
+            source = it->second.promote();
+            if (!source) {
+                listeners.erase(it);
+            }
+        }
+        return source;
+    }
+
+public:
+    static InputSurfaceSource::ImageReaderListener& GetInstance() {
+        static ::android::base::NoDestructor<
+              InputSurfaceSource::ImageReaderListener> sImageListener{};
+        return *sImageListener;
+    }
+
+    void *add(const sp<InputSurfaceSource> &source) {
+        wp<InputSurfaceSource> wsource = source;
+        std::lock_guard<std::mutex> l(mutex);
+        uint64_t key = seqId++;
+        listeners[key] = wsource;
+        return reinterpret_cast<void *>(key);
+    }
+
+    void remove(void *context) {
+        std::lock_guard<std::mutex> l(mutex);
+        uint64_t key = reinterpret_cast<uint64_t>(context);
+        listeners.erase(key);
+    }
+
+    void onImageAvailable(void *context) {
+        sp<InputSurfaceSource> source = getSource(context);
+        if (source) {
+            source->onFrameAvailable();
+        }
+    }
+
+    void onBufferRemoved(void *context, AHardwareBuffer *buf) {
+        sp<InputSurfaceSource> source = getSource(context);
+        if (source) {
+            if (__builtin_available(android __ANDROID_API_T__, *)) {
+                uint64_t bid;
+                if (AHardwareBuffer_getId(buf, &bid) == ::android::OK) {
+                    source->onBufferReleased(bid);
+                }
+            }
+        }
+    }
+};
+
+InputSurfaceSource::InputSurfaceSource() :
+    mInitCheck(C2_NO_INIT),
+    mNumAvailableUnacquiredBuffers(0),
+    mNumOutstandingAcquires(0),
+    mEndOfStream(false),
+    mEndOfStreamSent(false),
+    mLastDataspace(HAL_DATASPACE_UNKNOWN),
+    mExecuting(false),
+    mSuspended(false),
+    mLastFrameTimestampUs(-1),
+    mImageReader(nullptr),
+    mImageWindow(nullptr),
+    mStopTimeUs(-1),
+    mLastActionTimeUs(-1LL),
+    mSkipFramesBeforeNs(-1LL),
+    mFrameRepeatIntervalUs(-1LL),
+    mRepeatLastFrameGeneration(0),
+    mOutstandingFrameRepeatCount(0),
+    mFrameRepeatBlockedOnCodecBuffer(false),
+    mFps(-1.0),
+    mCaptureFps(-1.0),
+    mBaseCaptureUs(-1LL),
+    mBaseFrameUs(-1LL),
+    mFrameCount(0),
+    mPrevCaptureUs(-1LL),
+    mPrevFrameUs(-1LL),
+    mInputBufferTimeOffsetUs(0LL) {
+    ALOGV("InputSurfaceSource");
+
+    String8 name("InputSurfaceSource");
+
+    // default parameters for ImageReader.
+    mImageReaderConfig.width = 1920;
+    mImageReaderConfig.height = 1080;
+    mImageReaderConfig.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    mImageReaderConfig.maxImages = 16;
+    mImageReaderConfig.usage = AHARDWAREBUFFER_USAGE_VIDEO_ENCODE;
+
+    memset(&mDefaultColorAspectsPacked, 0, sizeof(mDefaultColorAspectsPacked));
+}
+
+void InputSurfaceSource::initWithParams(
+        int32_t width, int32_t height, int32_t format,
+        int32_t maxImages, uint64_t usage) {
+    mImageReaderConfig.width = width;
+    mImageReaderConfig.height = height;
+    mImageReaderConfig.format = format;
+    mImageReaderConfig.maxImages = maxImages;
+    mImageReaderConfig.usage = (AHARDWAREBUFFER_USAGE_VIDEO_ENCODE | usage);
+    init();
+}
+
+void InputSurfaceSource::init() {
+    if (mInitCheck != C2_NO_INIT) {
+        return;
+    }
+    media_status_t err = AImageReader_newWithUsage(
+            mImageReaderConfig.width,
+            mImageReaderConfig.height,
+            mImageReaderConfig.format,
+            mImageReaderConfig.maxImages,
+            mImageReaderConfig.usage, &mImageReader);
+    if (err != AMEDIA_OK) {
+        if (err == AMEDIA_ERROR_INVALID_PARAMETER) {
+            mInitCheck = C2_BAD_VALUE;
+        } else {
+            mInitCheck = C2_CORRUPTED;
+        }
+        ALOGE("Error constructing AImageReader: %d", err);
+        return;
+    }
+    createImageListeners();
+    (void)AImageReader_setImageListener(mImageReader, &mImageListener);
+    (void)AImageReader_setBufferRemovedListener(mImageReader, &mBufferRemovedListener);
+
+    if (AImageReader_getWindow(mImageReader, &mImageWindow) == AMEDIA_OK) {
+        mInitCheck = C2_OK;
+    } else {
+        ALOGE("Error getting window from AImageReader: %d", err);
+        mInitCheck = C2_CORRUPTED;
+    }
+}
+
+InputSurfaceSource::~InputSurfaceSource() {
+    ALOGV("~InputSurfaceSource");
+    {
+        // all acquired buffers must be freed with the mutex locked otherwise our debug assertion
+        // may trigger
+        std::lock_guard<std::mutex> autoLock(mMutex);
+        mAvailableBuffers.clear();
+        mSubmittedCodecBuffers.clear();
+        mLatestBuffer.mBuffer.reset();
+    }
+
+    if (mNumOutstandingAcquires != 0) {
+        ALOGW("potential buffer leak: acquired=%d", mNumOutstandingAcquires);
+        TRESPASS_DBG();
+    }
+
+    if (mImageReader != nullptr) {
+        AImageReader_delete(mImageReader);
+        mImageReader = nullptr;
+        mImageWindow = nullptr;
+    }
+}
+
+void InputSurfaceSource::createImageListeners() {
+    void *context = ImageReaderListener::GetInstance().add(
+            sp<InputSurfaceSource>::fromExisting(this));
+    mImageListener = {
+        context,
+        [](void *key, AImageReader *imageReader) {
+            (void)imageReader;
+            ImageReaderListener::GetInstance().onImageAvailable(key); }
+    };
+    mBufferRemovedListener = {
+        context,
+        [](void *key, AImageReader *imageReader, AHardwareBuffer *buffer) {
+            (void)imageReader;
+            ImageReaderListener::GetInstance().onBufferRemoved(key, buffer); }
+    };
+}
+
+ANativeWindow *InputSurfaceSource::getNativeWindow() {
+    return mImageWindow;
+}
+
+c2_status_t InputSurfaceSource::start() {
+    if (mInitCheck != C2_OK) {
+        ALOGE("start() was called without initialization");
+        return C2_CORRUPTED;
+    }
+    std::lock_guard<std::mutex> autoLock(mMutex);
+    ALOGV("--> start; available=%zu, submittable=%zd",
+            mAvailableBuffers.size(), mFreeCodecBuffers.size());
+    CHECK(!mExecuting);
+    mExecuting = true;
+    mLastDataspace = HAL_DATASPACE_UNKNOWN;
+    ALOGV("clearing last dataSpace");
+
+    // Start by loading up as many buffers as possible.  We want to do this,
+    // rather than just submit the first buffer, to avoid a degenerate case:
+    // if all BQ buffers arrive before we start executing, and we only submit
+    // one here, the other BQ buffers will just sit until we get notified
+    // that the codec buffer has been released.  We'd then acquire and
+    // submit a single additional buffer, repeatedly, never using more than
+    // one codec buffer simultaneously.  (We could instead try to submit
+    // all BQ buffers whenever any codec buffer is freed, but if we get the
+    // initial conditions right that will never be useful.)
+    while (haveAvailableBuffers_l()) {
+        if (!fillCodecBuffer_l()) {
+            ALOGV("stop load with available=%zu+%d",
+                    mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers);
+            break;
+        }
+    }
+
+    ALOGV("done loading initial frames, available=%zu+%d",
+            mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers);
+
+    // If EOS has already been signaled, and there are no more frames to
+    // submit, try to send EOS now as well.
+    if (mStopTimeUs == -1 && mEndOfStream && !haveAvailableBuffers_l()) {
+        submitEndOfInputStream_l();
+    }
+
+    if (mFrameRepeatIntervalUs > 0LL && mLooper == NULL) {
+        mReflector = new AHandlerReflector<InputSurfaceSource>(this);
+
+        mLooper = new ALooper;
+        mLooper->registerHandler(mReflector);
+        mLooper->start();
+
+        if (mLatestBuffer.mBuffer != nullptr) {
+            queueFrameRepeat_l();
+        }
+    }
+
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::stop() {
+    ALOGV("stop");
+
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mExecuting) {
+        // We are only interested in the transition from executing->idle,
+        // not loaded->idle.
+        mExecuting = false;
+    }
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::release(){
+    sp<ALooper> looper;
+    {
+        std::lock_guard<std::mutex> autoLock(mMutex);
+        looper = mLooper;
+        if (mLooper != NULL) {
+            mLooper->unregisterHandler(mReflector->id());
+            mReflector.clear();
+
+            mLooper.clear();
+        }
+
+        ALOGV("--> release; available=%zu+%d eos=%d eosSent=%d acquired=%d",
+                mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers,
+                mEndOfStream, mEndOfStreamSent, mNumOutstandingAcquires);
+
+        // Codec is no longer executing.  Releasing all buffers to bq.
+        mFreeCodecBuffers.clear();
+        mSubmittedCodecBuffers.clear();
+        mLatestBuffer.mBuffer.reset();
+        mComponent.reset();
+        mExecuting = false;
+    }
+    if (looper != NULL) {
+        looper->stop();
+    }
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::onInputBufferAdded(codec_buffer_id bufferId) {
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mExecuting) {
+        // This should never happen -- buffers can only be allocated when
+        // transitioning from "loaded" to "idle".
+        ALOGE("addCodecBuffer: buffer added while executing");
+        return C2_BAD_STATE;
+    }
+
+    ALOGV("addCodecBuffer: bufferId=%u", bufferId);
+
+    mFreeCodecBuffers.push_back(bufferId);
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::onInputBufferEmptied(codec_buffer_id bufferId, int fenceFd) {
+    std::lock_guard<std::mutex> autoLock(mMutex);
+    FileDescriptor::Autoclose fence(fenceFd);
+
+    auto it = mSubmittedCodecBuffers.find(bufferId);
+    if (it == mSubmittedCodecBuffers.end()) {
+        // This should never happen.
+        ALOGE("onInputBufferEmptied: buffer not recognized (bufferId=%u)", bufferId);
+        return C2_BAD_VALUE;
+    }
+
+    std::shared_ptr<AcquiredBuffer> buffer = it->second;
+
+    // Move buffer to available buffers
+    mSubmittedCodecBuffers.erase(it);
+    mFreeCodecBuffers.push_back(bufferId);
+
+    // header->nFilledLen may not be the original value, so we can't compare
+    // that to zero to see of this was the EOS buffer.  Instead we just
+    // see if there is a null AcquiredBuffer, which should only ever happen for EOS.
+    if (buffer == nullptr) {
+        if (!(mEndOfStream && mEndOfStreamSent)) {
+            // This can happen when broken code sends us the same buffer twice in a row.
+            ALOGE("onInputBufferEmptied: non-EOS null buffer (bufferId=%u)", bufferId);
+        } else {
+            ALOGV("onInputBufferEmptied: EOS null buffer (bufferId=%u)", bufferId);
+        }
+        // No GraphicBuffer to deal with, no additional input or output is expected, so just return.
+        return C2_BAD_VALUE;
+    }
+
+    if (!mExecuting) {
+        // this is fine since this could happen when going from Idle to Loaded
+        ALOGV("onInputBufferEmptied: no longer executing (bufferId=%u)", bufferId);
+        return C2_OK;
+    }
+
+    ALOGV("onInputBufferEmptied: bufferId=%d [id=%llu, useCount=%ld] acquired=%d",
+            bufferId, (unsigned long long)buffer->getId(), buffer.use_count(),
+            mNumOutstandingAcquires);
+
+    buffer->addReleaseFenceFd(fence.release());
+    // release codec reference for video buffer just in case remove does not it
+    buffer.reset();
+
+    if (haveAvailableBuffers_l()) {
+        // Fill this codec buffer.
+        CHECK(!mEndOfStreamSent);
+        ALOGV("onInputBufferEmptied: buffer freed, feeding codec (available=%zu+%d, eos=%d)",
+                mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers, mEndOfStream);
+        fillCodecBuffer_l();
+    } else if (mEndOfStream && mStopTimeUs == -1) {
+        // No frames available, but EOS is pending and no stop time, so use this buffer to
+        // send that.
+        ALOGV("onInputBufferEmptied: buffer freed, submitting EOS");
+        submitEndOfInputStream_l();
+    } else if (mFrameRepeatBlockedOnCodecBuffer) {
+        bool success = repeatLatestBuffer_l();
+        ALOGV("onInputBufferEmptied: completing deferred repeatLatestBuffer_l %s",
+                success ? "SUCCESS" : "FAILURE");
+        mFrameRepeatBlockedOnCodecBuffer = false;
+    }
+
+    // releaseReleasableBuffers_l();
+    return C2_OK;
+}
+
+void InputSurfaceSource::onDataspaceChanged_l(
+        android_dataspace dataspace, android_pixel_format pixelFormat) {
+    ALOGD("got buffer with new dataSpace %#x", dataspace);
+    mLastDataspace = dataspace;
+
+    if (ColorUtils::convertDataSpaceToV0(dataspace)) {
+        mComponent->dispatchDataSpaceChanged(
+                mLastDataspace, mDefaultColorAspectsPacked, pixelFormat);
+    }
+}
+
+bool InputSurfaceSource::fillCodecBuffer_l() {
+    CHECK(mExecuting && haveAvailableBuffers_l());
+
+    if (mFreeCodecBuffers.empty()) {
+        // No buffers available, bail.
+        ALOGV("fillCodecBuffer_l: no codec buffers, available=%zu+%d",
+                mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers);
+        return false;
+    }
+
+    VideoBuffer item;
+    if (mAvailableBuffers.empty()) {
+        ALOGV("fillCodecBuffer_l: acquiring available buffer, available=%zu+%d",
+                mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers);
+        if (acquireBuffer_l(&item) != C2_OK) {
+            ALOGE("fillCodecBuffer_l: failed to acquire available buffer");
+            return false;
+        }
+    } else {
+        ALOGV("fillCodecBuffer_l: getting available buffer, available=%zu+%d",
+                mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers);
+        item = *mAvailableBuffers.begin();
+        mAvailableBuffers.erase(mAvailableBuffers.begin());
+    }
+
+    int64_t itemTimeUs = item.mTimestampNs / 1000;
+
+    // Process ActionItem in the Queue if there is any. If a buffer's timestamp
+    // is smaller than the first action's timestamp, no action need to be performed.
+    // If buffer's timestamp is larger or equal than the last action's timestamp,
+    // only the last action needs to be performed as all the acitions before the
+    // the action are overridden by the last action. For the other cases, traverse
+    // the Queue to find the newest action that with timestamp smaller or equal to
+    // the buffer's timestamp. For example, an action queue like
+    // [pause 1us], [resume 2us], [pause 3us], [resume 4us], [pause 5us].... Upon
+    // receiving a buffer with timestamp 3.5us, only the action [pause, 3us] needs
+    // to be handled and [pause, 1us], [resume 2us] will be discarded.
+    bool done = false;
+    bool seeStopAction = false;
+    if (!mActionQueue.empty()) {
+        // First scan to check if bufferTimestamp is smaller than first action's timestamp.
+        ActionItem nextAction = *(mActionQueue.begin());
+        if (itemTimeUs < nextAction.mActionTimeUs) {
+            ALOGV("No action. buffer timestamp %lld us < action timestamp: %lld us",
+                (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+            // All the actions are ahead. No action need to perform now.
+            // Release the buffer if is in suspended state, or process the buffer
+            // if not in suspended state.
+            done = true;
+        }
+
+        if (!done) {
+            // Find the newest action that with timestamp smaller than itemTimeUs. Then
+            // remove all the actions before and include the newest action.
+          std::list<ActionItem>::iterator it = mActionQueue.begin();
+            while (it != mActionQueue.end() && it->mActionTimeUs <= itemTimeUs
+                    && nextAction.mAction != ActionItem::STOP) {
+                nextAction = *it;
+                ++it;
+            }
+            mActionQueue.erase(mActionQueue.begin(), it);
+
+            CHECK(itemTimeUs >= nextAction.mActionTimeUs);
+            switch (nextAction.mAction) {
+                case ActionItem::PAUSE:
+                {
+                    mSuspended = true;
+                    ALOGV("RUNNING/PAUSE -> PAUSE at buffer %lld us  PAUSE Time: %lld us",
+                            (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+                    break;
+                }
+                case ActionItem::RESUME:
+                {
+                    mSuspended = false;
+                    ALOGV("PAUSE/RUNNING -> RUNNING at buffer %lld us  RESUME Time: %lld us",
+                            (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+                    break;
+                }
+                case ActionItem::STOP:
+                {
+                    ALOGV("RUNNING/PAUSE -> STOP at buffer %lld us  STOP Time: %lld us",
+                            (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+                    // Clear the whole ActionQueue as recording is done
+                    mActionQueue.clear();
+                    seeStopAction = true;
+                    break;
+                }
+                default:
+                    TRESPASS_DBG("Unknown action type");
+                    // return true here because we did consume an available buffer, so the
+                    // loop in start will eventually terminate even if we hit this.
+                    return false;
+            }
+        }
+    }
+
+    if (seeStopAction) {
+        // Clear all the buffers before setting mEndOfStream and signal EndOfInputStream.
+        releaseAllAvailableBuffers_l();
+        mEndOfStream = true;
+        submitEndOfInputStream_l();
+        return true;
+    }
+
+    if (mSuspended) {
+        return true;
+    }
+
+    c2_status_t err = C2_CORRUPTED;
+
+    // only submit sample if start time is unspecified, or sample
+    // is queued after the specified start time
+    if (mSkipFramesBeforeNs < 0LL || item.mTimestampNs >= mSkipFramesBeforeNs) {
+        // if start time is set, offset time stamp by start time
+        if (mSkipFramesBeforeNs > 0) {
+            item.mTimestampNs -= mSkipFramesBeforeNs;
+        }
+
+        int64_t timeUs = item.mTimestampNs / 1000;
+        if (mFrameDropper != NULL && mFrameDropper->shouldDrop(timeUs)) {
+            ALOGV("skipping frame (%lld) to meet max framerate", static_cast<long long>(timeUs));
+            // set err to OK so that the skipped frame can still be saved as the latest frame
+            err = C2_OK;
+        } else {
+            err = submitBuffer_l(item); // this takes shared ownership of
+                                        // the acquired buffer on success
+        }
+    }
+
+    if (err != C2_OK) {
+        ALOGV("submitBuffer_l failed, will release buffer id %llu",
+                (unsigned long long)item.mBuffer->getId());
+        return true;
+    } else {
+        // Don't set the last buffer id if we're not repeating,
+        // we'll be holding on to the last buffer for nothing.
+        if (mFrameRepeatIntervalUs > 0LL) {
+            setLatestBuffer_l(item);
+        }
+        ALOGV("buffer submitted [id=%llu, useCount=%ld] acquired=%d",
+                (unsigned long long)item.mBuffer->getId(),
+                item.mBuffer.use_count(), mNumOutstandingAcquires);
+        mLastFrameTimestampUs = itemTimeUs;
+    }
+
+    return true;
+}
+
+bool InputSurfaceSource::repeatLatestBuffer_l() {
+    CHECK(mExecuting && !haveAvailableBuffers_l());
+
+    if (mLatestBuffer.mBuffer == nullptr || mSuspended) {
+        return false;
+    }
+
+    if (mFreeCodecBuffers.empty()) {
+        // No buffers available, bail.
+        ALOGV("repeatLatestBuffer_l: no codec buffers.");
+        return false;
+    }
+
+    if (!mLatestBuffer.mBuffer->isCached()) {
+        ALOGV("repeatLatestBuffer_l: slot was discarded, but repeating our own reference");
+    }
+
+    // it is ok to update the timestamp of latest buffer as it is only used for submission
+    c2_status_t err = submitBuffer_l(mLatestBuffer);
+    if (err != C2_OK) {
+        return false;
+    }
+
+    /* repeat last frame up to kRepeatLastFrameCount times.
+     * in case of static scene, a single repeat might not get rid of encoder
+     * ghosting completely, refresh a couple more times to get better quality
+     */
+    if (--mOutstandingFrameRepeatCount > 0) {
+        // set up timestamp for repeat frame
+        mLatestBuffer.mTimestampNs += mFrameRepeatIntervalUs * 1000;
+        queueFrameRepeat_l();
+    }
+
+    return true;
+}
+
+void InputSurfaceSource::setLatestBuffer_l(const VideoBuffer &item) {
+    mLatestBuffer = item;
+
+    ALOGV("setLatestBuffer_l: [id=%llu, useCount=%ld]",
+            (unsigned long long)mLatestBuffer.mBuffer->getId(), mLatestBuffer.mBuffer.use_count());
+
+    mOutstandingFrameRepeatCount = kRepeatLastFrameCount;
+    // set up timestamp for repeat frame
+    mLatestBuffer.mTimestampNs += mFrameRepeatIntervalUs * 1000;
+    queueFrameRepeat_l();
+}
+
+void InputSurfaceSource::queueFrameRepeat_l() {
+    mFrameRepeatBlockedOnCodecBuffer = false;
+
+    if (mReflector != NULL) {
+        sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector);
+        msg->setInt32("generation", ++mRepeatLastFrameGeneration);
+        msg->post(mFrameRepeatIntervalUs);
+    }
+}
+
+#ifdef __clang__
+__attribute__((no_sanitize("integer")))
+#endif
+bool InputSurfaceSource::calculateCodecTimestamp_l(
+        nsecs_t bufferTimeNs, int64_t *codecTimeUs) {
+    int64_t timeUs = bufferTimeNs / 1000;
+    timeUs += mInputBufferTimeOffsetUs;
+
+    if (mCaptureFps > 0.
+            && (mFps > 2 * mCaptureFps
+            || mCaptureFps > 2 * mFps)) {
+        // Time lapse or slow motion mode
+        if (mPrevCaptureUs < 0LL) {
+            // first capture
+            mPrevCaptureUs = mBaseCaptureUs = timeUs;
+            // adjust the first sample timestamp.
+            mPrevFrameUs = mBaseFrameUs =
+                    std::llround((timeUs * mCaptureFps) / mFps);
+            mFrameCount = 0;
+        } else if (mSnapTimestamps) {
+            double nFrames = (timeUs - mPrevCaptureUs) * mCaptureFps / 1000000;
+            if (nFrames < 0.5 - kTimestampFluctuation) {
+                // skip this frame as it's too close to previous capture
+                ALOGD("skipping frame, timeUs %lld",
+                      static_cast<long long>(timeUs));
+                return false;
+            }
+            // snap to nearest capture point
+            if (nFrames <= 1.0) {
+                nFrames = 1.0;
+            }
+            mFrameCount += std::llround(nFrames);
+            mPrevCaptureUs = mBaseCaptureUs + std::llround(
+                    mFrameCount * 1000000 / mCaptureFps);
+            mPrevFrameUs = mBaseFrameUs + std::llround(
+                    mFrameCount * 1000000 / mFps);
+        } else {
+            if (timeUs <= mPrevCaptureUs) {
+                if (mFrameDropper != NULL && mFrameDropper->disabled()) {
+                    // Warn only, client has disabled frame drop logic possibly for image
+                    // encoding cases where camera's ZSL mode could send out of order frames.
+                    ALOGW("Received frame that's going backward in time");
+                } else {
+                    // Drop the frame if it's going backward in time. Bad timestamp
+                    // could disrupt encoder's rate control completely.
+                    ALOGW("Dropping frame that's going backward in time");
+                    return false;
+                }
+            }
+            mPrevCaptureUs = timeUs;
+            mPrevFrameUs = mBaseFrameUs + std::llround(
+                    (timeUs - mBaseCaptureUs) * (mCaptureFps / mFps));
+        }
+
+        ALOGV("timeUs %lld, captureUs %lld, frameUs %lld",
+                static_cast<long long>(timeUs),
+                static_cast<long long>(mPrevCaptureUs),
+                static_cast<long long>(mPrevFrameUs));
+    } else {
+        if (timeUs <= mPrevFrameUs) {
+            if (mFrameDropper != NULL && mFrameDropper->disabled()) {
+                // Warn only, client has disabled frame drop logic possibly for image
+                // encoding cases where camera's ZSL mode could send out of order frames.
+                ALOGW("Received frame that's going backward in time");
+            } else {
+                // Drop the frame if it's going backward in time. Bad timestamp
+                // could disrupt encoder's rate control completely.
+                ALOGW("Dropping frame that's going backward in time");
+                return false;
+            }
+        }
+
+        mPrevFrameUs = timeUs;
+    }
+
+    *codecTimeUs = mPrevFrameUs;
+    return true;
+}
+
+c2_status_t InputSurfaceSource::submitBuffer_l(const VideoBuffer &item) {
+    CHECK(!mFreeCodecBuffers.empty());
+    uint32_t codecBufferId = *mFreeCodecBuffers.begin();
+
+    ALOGV("submitBuffer_l [id=%llu, codecbufferId=%d]",
+            (unsigned long long)item.mBuffer->getId(), codecBufferId);
+
+    int64_t codecTimeUs;
+    if (!calculateCodecTimestamp_l(item.mTimestampNs, &codecTimeUs)) {
+        return C2_CORRUPTED;
+    }
+
+    std::shared_ptr<AcquiredBuffer> buffer = item.mBuffer;
+    int32_t imageFormat = 0;
+    AHardwareBuffer *ahwb = nullptr;
+    AImage_getFormat(buffer->getImage(), &imageFormat);
+    AImage_getHardwareBuffer(buffer->getImage(), &ahwb);
+
+    if ((android_dataspace)item.mDataspace != mLastDataspace) {
+        onDataspaceChanged_l(
+                item.mDataspace,
+                (android_pixel_format)imageFormat);
+    }
+
+    c2_status_t err = mComponent->submitBuffer(
+            codecBufferId, buffer->getImage(), codecTimeUs, buffer->getAcquireFenceFd());
+
+    if (err != C2_OK) {
+        ALOGW("WARNING: emptyGraphicBuffer failed: 0x%x", err);
+        return err;
+    }
+
+    mFreeCodecBuffers.erase(mFreeCodecBuffers.begin());
+
+    auto res = mSubmittedCodecBuffers.emplace(codecBufferId, buffer);
+    if (!res.second) {
+        auto it = res.first;
+        it->second = buffer;
+    }
+    ALOGV("emptyImageBuffer succeeded, bufferId=%u@%d bufhandle=%p",
+            codecBufferId, res.second, ahwb);
+    return C2_OK;
+}
+
+void InputSurfaceSource::submitEndOfInputStream_l() {
+    CHECK(mEndOfStream);
+    if (mEndOfStreamSent) {
+        ALOGV("EOS already sent");
+        return;
+    }
+
+    if (mFreeCodecBuffers.empty()) {
+        ALOGV("submitEndOfInputStream_l: no codec buffers available");
+        return;
+    }
+    uint32_t codecBufferId = *mFreeCodecBuffers.begin();
+
+    // We reject any additional incoming graphic buffers. There is no acquired buffer used for EOS
+    c2_status_t err = mComponent->submitEos(codecBufferId);
+    if (err != C2_OK) {
+        ALOGW("emptyDirectBuffer EOS failed: 0x%x", err);
+    } else {
+        mFreeCodecBuffers.erase(mFreeCodecBuffers.begin());
+        auto res = mSubmittedCodecBuffers.emplace(codecBufferId, nullptr);
+        if (!res.second) {
+            auto it = res.first;
+            it->second = nullptr;
+        }
+        ALOGV("submitEndOfInputStream_l: buffer submitted, bufferId=%u@%d",
+                codecBufferId, res.second);
+        mEndOfStreamSent = true;
+
+        // no need to hold onto any buffers for frame repeating
+        ++mRepeatLastFrameGeneration;
+        mLatestBuffer.mBuffer.reset();
+    }
+}
+
+c2_status_t InputSurfaceSource::acquireBuffer_l(VideoBuffer *ab) {
+    //BufferItem bi;
+    int fenceFd = -1;
+    AImage *image = nullptr;
+
+    media_status_t err = AImageReader_acquireNextImageAsync(mImageReader, &image, &fenceFd);
+    if (err == AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) {
+        // shouldn't happen
+        ALOGW("acquireBuffer_l: frame was not available");
+        return C2_NOT_FOUND;
+    } else if (err == AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED) {
+        ALOGW("acquireBuffer_l: already acquired max frames");
+        return C2_BLOCKING;
+    } else if (err != AMEDIA_OK) {
+        ALOGW("acquireBuffer_l: failed with err=%d", err);
+        return C2_CORRUPTED;
+    }
+    CHECK(image != nullptr);
+
+    --mNumAvailableUnacquiredBuffers;
+
+    AHardwareBuffer *ahwbBuffer = nullptr;
+    ahwb_id bid = 0;
+    (void)AImage_getHardwareBuffer(image, &ahwbBuffer);
+    CHECK(ahwbBuffer != nullptr);
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        (void)AHardwareBuffer_getId(ahwbBuffer, &bid);
+    } else {
+        LOG_ALWAYS_FATAL(
+                "AHardwareBuffer_getId must be available for this implementation to work");
+    }
+
+    sp<Fence> acqFence(new Fence(fenceFd));
+
+
+    // Manage our buffer cache.
+    std::shared_ptr<CachedBuffer> buffer;
+
+    auto it = mBufferIds.find(bid);
+
+    // replace/initialize the bufferId cache with a new buffer
+    ALOGV("acquireBuffer_l: %s buffer id %llu",
+            it == mBufferIds.end() ? "setting" : "UPDATING",
+            (unsigned long long)bid);
+    if (it != mBufferIds.end()) {
+        discardBufferAtIter_l(it);
+    } else {
+        auto res = mBufferIds.emplace(bid, nullptr);
+        it = res.first;
+    }
+    buffer = std::make_shared<CachedBuffer>(bid, image);
+    it->second = buffer;
+
+    int64_t imageTimestamp = -1;
+    int32_t imageDataspace = 0;
+    (void)AImage_getTimestamp(image, &imageTimestamp);
+    if (__builtin_available(android __ANDROID_API_U__, *)) {
+        (void)AImage_getDataSpace(image, &imageDataspace);
+    } else {
+        LOG_ALWAYS_FATAL(
+                "AHardwareBuffer_getId must be available for this implementation to work");
+    }
+
+    std::shared_ptr<AcquiredBuffer> acquiredBuffer =
+        std::make_shared<AcquiredBuffer>(
+                buffer,
+                [this](AcquiredBuffer *buffer){
+                    // AcquiredBuffer's destructor should always be called when mMutex is locked.
+                    // If we had a reentrant mutex, we could just lock it again to ensure this.
+                    if (mMutex.try_lock()) {
+                        TRESPASS_DBG();
+                        mMutex.unlock();
+                    }
+
+                    // we can release buffers immediately if not using adapters
+                    // alternately, we could add them to mSlotsToRelease, but we would
+                    // somehow need to propagate frame number to that queue
+                    if (buffer->isCached()) {
+                        --mNumOutstandingAcquires;
+                        AImage_deleteAsync(buffer->getImage(), buffer->getReleaseFence()->dup());
+                    }
+                },
+                acqFence);
+    VideoBuffer videoBuffer{
+        acquiredBuffer, imageTimestamp,
+        static_cast<android_dataspace_t>(imageDataspace)};
+    *ab = videoBuffer;
+    ++mNumOutstandingAcquires;
+    return C2_OK;
+}
+
+// AImageReader callback calls this interface
+void InputSurfaceSource::onFrameAvailable() {
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    ALOGV("onFrameAvailable: executing=%d available=%zu+%d",
+            mExecuting, mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers);
+    ++mNumAvailableUnacquiredBuffers;
+
+    // For BufferQueue we cannot acquire a buffer if we cannot immediately feed it to the codec
+    // UNLESS we are discarding this buffer (acquiring and immediately releasing it), which makes
+    // this an ugly logic.
+    // NOTE: We could also rely on our debug counter but that is meant only as a debug counter.
+    if (!areWeDiscardingAvailableBuffers_l() && mFreeCodecBuffers.empty()) {
+        // we may not be allowed to acquire a possibly encodable buffer, so just note that
+        // it is available
+        ALOGV("onFrameAvailable: cannot acquire buffer right now, do it later");
+
+        ++mRepeatLastFrameGeneration; // cancel any pending frame repeat
+        return;
+    }
+
+    VideoBuffer buffer;
+    c2_status_t err = acquireBuffer_l(&buffer);
+    if (err != C2_OK) {
+        ALOGE("onFrameAvailable: acquireBuffer returned err=%d", err);
+    } else {
+        onBufferAcquired_l(buffer);
+    }
+}
+
+bool InputSurfaceSource::areWeDiscardingAvailableBuffers_l() {
+    return mEndOfStreamSent // already sent EOS to codec
+            || mComponent == nullptr // there is no codec connected
+            || (mSuspended && mActionQueue.empty()) // we are suspended and not waiting for
+                                                    // any further action
+            || !mExecuting;
+}
+
+void InputSurfaceSource::onBufferAcquired_l(const VideoBuffer &buffer) {
+    if (mEndOfStreamSent) {
+        // This should only be possible if a new buffer was queued after
+        // EOS was signaled, i.e. the app is misbehaving.
+        ALOGW("onFrameAvailable: EOS is sent, ignoring frame");
+    } else if (mComponent == NULL || (mSuspended && mActionQueue.empty())) {
+        // FIXME: if we are suspended but have a resume queued we will stop repeating the last
+        // frame. Is that the desired behavior?
+        ALOGV("onFrameAvailable: suspended, ignoring frame");
+    } else {
+        ++mRepeatLastFrameGeneration; // cancel any pending frame repeat
+        mAvailableBuffers.push_back(buffer);
+        if (mExecuting) {
+            fillCodecBuffer_l();
+        }
+    }
+}
+
+// AImageReader callback calls this interface
+void InputSurfaceSource::onBufferReleased(InputSurfaceSource::ahwb_id id) {
+    std::lock_guard<std::mutex> lock(mMutex);
+
+    if (!discardBufferInId_l(id)) {
+        ALOGW("released buffer not cached %llu", (unsigned long long)id);
+    }
+}
+
+bool InputSurfaceSource::discardBufferInId_l(InputSurfaceSource::ahwb_id id) {
+    auto it = mBufferIds.find(id);
+    if (it != mBufferIds.end()) {
+        return false;
+    } else {
+        discardBufferAtIter_l(it);
+        mBufferIds.erase(it);
+        return true;
+    }
+}
+
+void InputSurfaceSource::discardBufferAtIter_l(BufferIdMap::iterator &it) {
+    const std::shared_ptr<CachedBuffer>& buffer = it->second;
+    // use -1 if there is no latest buffer, and 0 if it is no longer cached
+    ahwb_id latestBufferId =
+        mLatestBuffer.mBuffer == nullptr ? -1 : mLatestBuffer.mBuffer->getId();
+    ALOGV("releasing acquired buffer: [id=%llu, useCount=%ld], latest: [id=%llu]",
+            (unsigned long long)buffer->getId(), buffer.use_count(),
+            (unsigned long long)latestBufferId);
+    buffer->onDroppedFromCache();
+
+    // If the slot of an acquired buffer is discarded, that buffer will not have to be
+    // released to the producer, so account it here. However, it is possible that the
+    // acquired buffer has already been discarded so check if it still is.
+    if (buffer->isAcquired()) {
+        --mNumOutstandingAcquires;
+    }
+
+    // clear the buffer reference (not technically needed as caller either replaces or deletes
+    // it; done here for safety).
+    it->second.reset();
+    CHECK_DBG(buffer == nullptr);
+}
+
+void InputSurfaceSource::releaseAllAvailableBuffers_l() {
+    mAvailableBuffers.clear();
+    while (mNumAvailableUnacquiredBuffers > 0) {
+        VideoBuffer item;
+        if (acquireBuffer_l(&item) != C2_OK) {
+            ALOGW("releaseAllAvailableBuffers: failed to acquire available unacquired buffer");
+            break;
+        }
+    }
+}
+
+c2_status_t InputSurfaceSource::configure(
+        const std::shared_ptr<InputSurfaceConnection>& component,
+        int32_t dataSpace,
+        int32_t bufferCount,
+        uint32_t frameWidth,
+        uint32_t frameHeight,
+        uint64_t consumerUsage) {
+    if (mInitCheck != C2_OK) {
+        ALOGE("configure() was called without initialization");
+        return C2_CORRUPTED;
+    }
+    if (component == NULL) {
+        return C2_BAD_VALUE;
+    }
+
+    {
+        std::lock_guard<std::mutex> autoLock(mMutex);
+        mComponent = component;
+
+        if (bufferCount != mImageReaderConfig.maxImages) {
+            ALOGW("bufferCount %d cannot be changed after ImageReader creation to %d",
+                    mImageReaderConfig.maxImages, bufferCount);
+        }
+        if (frameWidth != mImageReaderConfig.width ||
+                frameHeight != mImageReaderConfig.height) {
+            // NOTE:  ImageReader will handle the resolution change without explicit reconfig.
+            mImageReaderConfig.width = frameWidth;
+            mImageReaderConfig.height = frameHeight;
+            ALOGD("Maybe an implicit ImageReader resolution change: "
+                  "frameWidth %d -> %d: frameHeight %d -> %d",
+                    mImageReaderConfig.width, frameWidth, mImageReaderConfig.height, frameHeight);
+        }
+
+        consumerUsage |= AHARDWAREBUFFER_USAGE_VIDEO_ENCODE;
+        if (consumerUsage != mImageReaderConfig.usage) {
+            if (__builtin_available(android 36, *)) {
+                media_status_t err = AImageReader_setUsage(mImageReader, consumerUsage);
+                if (err != AMEDIA_OK) {
+                    ALOGE("media_err(%d), failed to configure usage to %llu from %llu",
+                            err, (unsigned long long)consumerUsage,
+                            (unsigned long long)mImageReaderConfig.usage);
+                    return C2_BAD_VALUE;
+                }
+            }
+            mImageReaderConfig.usage = consumerUsage;
+        }
+
+        // Set impl. defined format as default. Depending on the usage flags
+        // the device-specific implementation will derive the exact format.
+        mImageReaderConfig.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+
+        // Sets the default buffer data space
+        ALOGD("setting dataspace: %#x, acquired=%d", dataSpace, mNumOutstandingAcquires);
+        mLastDataspace = (android_dataspace)dataSpace;
+
+        mExecuting = false;
+        mSuspended = false;
+        mEndOfStream = false;
+        mEndOfStreamSent = false;
+        mSkipFramesBeforeNs = -1LL;
+        mFrameDropper.reset();
+        mFrameRepeatIntervalUs = -1LL;
+        mRepeatLastFrameGeneration = 0;
+        mOutstandingFrameRepeatCount = 0;
+        mLatestBuffer.mBuffer.reset();
+        mFrameRepeatBlockedOnCodecBuffer = false;
+        mFps = -1.0;
+        mCaptureFps = -1.0;
+        mBaseCaptureUs = -1LL;
+        mBaseFrameUs = -1LL;
+        mPrevCaptureUs = -1LL;
+        mPrevFrameUs = -1LL;
+        mFrameCount = 0;
+        mInputBufferTimeOffsetUs = 0;
+        mStopTimeUs = -1;
+        mActionQueue.clear();
+    }
+
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setSuspend(bool suspend, int64_t suspendStartTimeUs) {
+    ALOGV("setSuspend=%d at time %lld us", suspend, (long long)suspendStartTimeUs);
+
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mStopTimeUs != -1) {
+        ALOGE("setSuspend failed as STOP action is pending");
+        return C2_CANNOT_DO;
+    }
+
+    // Push the action to the queue.
+    if (suspendStartTimeUs != -1) {
+        // suspendStartTimeUs must be smaller or equal to current systemTime.
+        int64_t currentSystemTimeUs = systemTime() / 1000;
+        if (suspendStartTimeUs > currentSystemTimeUs) {
+            ALOGE("setSuspend failed. %lld is larger than current system time %lld us",
+                    (long long)suspendStartTimeUs, (long long)currentSystemTimeUs);
+            return C2_BAD_VALUE;
+        }
+        if (mLastActionTimeUs != -1 && suspendStartTimeUs < mLastActionTimeUs) {
+            ALOGE("setSuspend failed. %lld is smaller than last action time %lld us",
+                    (long long)suspendStartTimeUs, (long long)mLastActionTimeUs);
+            return C2_BAD_VALUE;
+        }
+        mLastActionTimeUs = suspendStartTimeUs;
+        ActionItem action;
+        action.mAction = suspend ? ActionItem::PAUSE : ActionItem::RESUME;
+        action.mActionTimeUs = suspendStartTimeUs;
+        ALOGV("Push %s action into actionQueue", suspend ? "PAUSE" : "RESUME");
+        mActionQueue.push_back(action);
+    } else {
+        if (suspend) {
+            mSuspended = true;
+            releaseAllAvailableBuffers_l();
+            return C2_OK;
+        } else {
+            mSuspended = false;
+            if (mExecuting && !haveAvailableBuffers_l()
+                    && mFrameRepeatBlockedOnCodecBuffer) {
+                if (repeatLatestBuffer_l()) {
+                    ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");
+                    mFrameRepeatBlockedOnCodecBuffer = false;
+                } else {
+                    ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");
+                }
+            }
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) {
+    ALOGV("setRepeatPreviousFrameDelayUs: delayUs=%lld", (long long)repeatAfterUs);
+
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mExecuting) {
+        return C2_BAD_STATE;
+    }
+    if (repeatAfterUs <= 0LL) {
+        return C2_BAD_VALUE;
+    }
+
+    mFrameRepeatIntervalUs = repeatAfterUs;
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setTimeOffsetUs(int64_t timeOffsetUs) {
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    // timeOffsetUs must be negative for adjustment.
+    if (timeOffsetUs >= 0LL) {
+        return C2_BAD_VALUE;
+    }
+
+    mInputBufferTimeOffsetUs = timeOffsetUs;
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setMaxFps(float maxFps) {
+    ALOGV("setMaxFps: maxFps=%lld", (long long)maxFps);
+
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mExecuting) {
+        return C2_BAD_STATE;
+    }
+
+    mFrameDropper = std::make_shared<FrameDropper>();
+    mFrameDropper->setMaxFrameRate(maxFps);
+
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setStartTimeUs(int64_t skipFramesBeforeUs) {
+    ALOGV("setStartTimeUs: skipFramesBeforeUs=%lld", (long long)skipFramesBeforeUs);
+
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    mSkipFramesBeforeNs =
+            (skipFramesBeforeUs > 0 && skipFramesBeforeUs <= INT64_MAX / 1000) ?
+            (skipFramesBeforeUs * 1000) : -1LL;
+
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setStopTimeUs(int64_t stopTimeUs) {
+    ALOGV("setStopTimeUs: %lld us", (long long)stopTimeUs);
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mStopTimeUs != -1) {
+        // Ignore if stop time has already been set
+        return C2_OK;
+    }
+
+    // stopTimeUs must be smaller or equal to current systemTime.
+    int64_t currentSystemTimeUs = systemTime() / 1000;
+    if (stopTimeUs > currentSystemTimeUs) {
+        ALOGE("setStopTimeUs failed. %lld is larger than current system time %lld us",
+            (long long)stopTimeUs, (long long)currentSystemTimeUs);
+        return C2_BAD_VALUE;
+    }
+    if (mLastActionTimeUs != -1 && stopTimeUs < mLastActionTimeUs) {
+        ALOGE("setSuspend failed. %lld is smaller than last action time %lld us",
+            (long long)stopTimeUs, (long long)mLastActionTimeUs);
+        return C2_BAD_VALUE;
+    }
+    mLastActionTimeUs = stopTimeUs;
+    ActionItem action;
+    action.mAction = ActionItem::STOP;
+    action.mActionTimeUs = stopTimeUs;
+    mActionQueue.push_back(action);
+    mStopTimeUs = stopTimeUs;
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::getStopTimeOffsetUs(int64_t *stopTimeOffsetUs) {
+    ALOGV("getStopTimeOffsetUs");
+    std::lock_guard<std::mutex> autoLock(mMutex);
+    if (mStopTimeUs == -1) {
+        ALOGW("Fail to return stopTimeOffsetUs as stop time is not set");
+        return C2_CANNOT_DO;
+    }
+    *stopTimeOffsetUs =
+        mLastFrameTimestampUs == -1 ? 0 : mStopTimeUs - mLastFrameTimestampUs;
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setTimeLapseConfig(double fps, double captureFps) {
+    ALOGV("setTimeLapseConfig: fps=%lg, captureFps=%lg",
+            fps, captureFps);
+    std::lock_guard<std::mutex> autoLock(mMutex);
+
+    if (mExecuting) {
+        return C2_BAD_STATE;
+    }
+    if (!(fps > 0) || !(captureFps > 0)) {
+        return C2_BAD_VALUE;
+    }
+
+    mFps = fps;
+    mCaptureFps = captureFps;
+    if (captureFps > fps) {
+        mSnapTimestamps = 1 == ::android::base::GetIntProperty(
+                "debug.stagefright.snap_timestamps", int64_t(0));
+    } else {
+        mSnapTimestamps = false;
+    }
+
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::setColorAspects(int32_t aspectsPacked) {
+    std::lock_guard<std::mutex> autoLock(mMutex);
+    mDefaultColorAspectsPacked = aspectsPacked;
+    ColorAspects colorAspects = ColorUtils::unpackToColorAspects(aspectsPacked);
+    ALOGD("requesting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s))",
+            colorAspects.mRange, asString(colorAspects.mRange),
+            colorAspects.mPrimaries, asString(colorAspects.mPrimaries),
+            colorAspects.mMatrixCoeffs, asString(colorAspects.mMatrixCoeffs),
+            colorAspects.mTransfer, asString(colorAspects.mTransfer));
+
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceSource::signalEndOfInputStream() {
+    std::lock_guard<std::mutex> autoLock(mMutex);
+    ALOGV("signalEndOfInputStream: executing=%d available=%zu+%d eos=%d",
+            mExecuting, mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers, mEndOfStream);
+
+    if (mEndOfStream) {
+        ALOGE("EOS was already signaled");
+        return C2_DUPLICATE;
+    }
+
+    // Set the end-of-stream flag.  If no frames are pending from the
+    // BufferQueue, and a codec buffer is available, and we're executing,
+    // and there is no stop timestamp, we initiate the EOS from here.
+    // Otherwise, we'll let codecBufferEmptied() (or start) do it.
+    //
+    // Note: if there are no pending frames and all codec buffers are
+    // available, we *must* submit the EOS from here or we'll just
+    // stall since no future events are expected.
+    mEndOfStream = true;
+
+    if (mStopTimeUs == -1 && mExecuting && !haveAvailableBuffers_l()) {
+        submitEndOfInputStream_l();
+    }
+
+    return C2_OK;
+}
+
+void InputSurfaceSource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatRepeatLastFrame:
+        {
+            std::lock_guard<std::mutex> autoLock(mMutex);
+
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mRepeatLastFrameGeneration) {
+                // stale
+                break;
+            }
+
+            if (!mExecuting || haveAvailableBuffers_l()) {
+                break;
+            }
+
+            bool success = repeatLatestBuffer_l();
+            if (success) {
+                ALOGV("repeatLatestBuffer_l SUCCESS");
+            } else {
+                ALOGV("repeatLatestBuffer_l FAILURE");
+                mFrameRepeatBlockedOnCodecBuffer = true;
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+}  // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index 029044f..9796a5d 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -34,16 +34,9 @@
         "libcodec2-aidl-client-defaults",
     ],
 
-    // http://b/343951602#comment4 Explicitly set cpp_std to gnu++20.  The
-    // default inherited from libcodec2-impl-defaults sets it to gnu++17 which
-    // causes a segfault when mixing global std::string symbols built with
-    // gnu++17 and gnu++20.  TODO(b/343951602): clean this after
-    // libcodec2-impl-defaults opt into gnu++17 is removed.
-    cpp_std: "gnu++20",
-
     header_libs: [
-        "libapexcodecs-header",
         "libcodec2_internal", // private
+        "libcom.android.media.swcodec.apexcodecs-header",
     ],
 
     shared_libs: [
@@ -82,7 +75,7 @@
     ],
 
     export_header_lib_headers: [
-        "libapexcodecs-header",
+        "libcom.android.media.swcodec.apexcodecs-header",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/codec2/hal/client/ApexCodecsLazy.cpp b/media/codec2/hal/client/ApexCodecsLazy.cpp
index cd7953e..7847985 100644
--- a/media/codec2/hal/client/ApexCodecsLazy.cpp
+++ b/media/codec2/hal/client/ApexCodecsLazy.cpp
@@ -30,30 +30,43 @@
 
 namespace {
 
-// This file provides a lazy interface to libapexcodecs.so to address early boot dependencies.
+// This file provides a lazy interface to libcom.android.media.swcodec.apexcodecs.so
+// to address early boot dependencies.
 
-// Method pointers to libapexcodecs methods are held in an array which simplifies checking
-// all pointers are initialized.
+// Method pointers to libcom.android.media.swcodec.apexcodecs methods are held in an array
+// which simplifies checking all pointers are initialized.
 enum MethodIndex {
+    k_ApexCodec_Buffer_clear,
+    k_ApexCodec_Buffer_create,
+    k_ApexCodec_Buffer_destroy,
+    k_ApexCodec_Buffer_getBufferInfo,
+    k_ApexCodec_Buffer_getConfigUpdates,
+    k_ApexCodec_Buffer_getGraphicBuffer,
+    k_ApexCodec_Buffer_getLinearBuffer,
+    k_ApexCodec_Buffer_getType,
+    k_ApexCodec_Buffer_setBufferInfo,
+    k_ApexCodec_Buffer_setConfigUpdates,
+    k_ApexCodec_Buffer_setGraphicBuffer,
+    k_ApexCodec_Buffer_setLinearBuffer,
     k_ApexCodec_Component_create,
     k_ApexCodec_Component_destroy,
     k_ApexCodec_Component_flush,
     k_ApexCodec_Component_getConfigurable,
     k_ApexCodec_Component_process,
-    k_ApexCodec_Component_start,
     k_ApexCodec_Component_reset,
+    k_ApexCodec_Component_start,
     k_ApexCodec_Configurable_config,
     k_ApexCodec_Configurable_query,
     k_ApexCodec_Configurable_querySupportedParams,
     k_ApexCodec_Configurable_querySupportedValues,
     k_ApexCodec_GetComponentStore,
+    k_ApexCodec_ParamDescriptors_destroy,
     k_ApexCodec_ParamDescriptors_getDescriptor,
     k_ApexCodec_ParamDescriptors_getIndices,
-    k_ApexCodec_ParamDescriptors_release,
+    k_ApexCodec_SettingResults_destroy,
     k_ApexCodec_SettingResults_getResultAtIndex,
-    k_ApexCodec_SettingResults_release,
+    k_ApexCodec_SupportedValues_destroy,
     k_ApexCodec_SupportedValues_getTypeAndValues,
-    k_ApexCodec_SupportedValues_release,
     k_ApexCodec_Traits_get,
 
     // Marker for count of methods
@@ -84,14 +97,15 @@
     }
 
 private:
-    static void* LoadLibapexcodecs(int dlopen_flags) {
-        return dlopen("libapexcodecs.so", dlopen_flags);
+    static void* LoadApexCodecs(int dlopen_flags) {
+        return dlopen("libcom.android.media.swcodec.apexcodecs.so", dlopen_flags);
     }
 
     // Initialization and symbol binding.
     void bindSymbol_l(void* handle, const char* name, enum MethodIndex index) {
         void* symbol = dlsym(handle, name);
-        ALOGI_IF(symbol == nullptr, "Failed to find symbol '%s' in libapexcodecs.so: %s",
+        ALOGI_IF(symbol == nullptr,
+                "Failed to find symbol '%s' in libcom.android.media.swcodec.apexcodecs.so: %s",
                  name, dlerror());
         mMethods[index] = symbol;
     }
@@ -103,41 +117,53 @@
                 return true;
             }
         }
-        void* handle = LoadLibapexcodecs(RTLD_NOW);
+        void* handle = LoadApexCodecs(RTLD_NOW);
         if (handle == nullptr) {
-            ALOGI("Failed to load libapexcodecs.so: %s", dlerror());
+            ALOGI("Failed to load libcom.android.media.swcodec.apexcodecs.so: %s", dlerror());
             return false;
         }
 
         RWLock::AutoWLock l(mLock);
 #undef BIND_SYMBOL
 #define BIND_SYMBOL(name) bindSymbol_l(handle, #name, k_##name);
+        BIND_SYMBOL(ApexCodec_Buffer_clear);
+        BIND_SYMBOL(ApexCodec_Buffer_create);
+        BIND_SYMBOL(ApexCodec_Buffer_destroy);
+        BIND_SYMBOL(ApexCodec_Buffer_getBufferInfo);
+        BIND_SYMBOL(ApexCodec_Buffer_getConfigUpdates);
+        BIND_SYMBOL(ApexCodec_Buffer_getGraphicBuffer);
+        BIND_SYMBOL(ApexCodec_Buffer_getLinearBuffer);
+        BIND_SYMBOL(ApexCodec_Buffer_getType);
+        BIND_SYMBOL(ApexCodec_Buffer_setConfigUpdates);
+        BIND_SYMBOL(ApexCodec_Buffer_setGraphicBuffer);
+        BIND_SYMBOL(ApexCodec_Buffer_setLinearBuffer);
         BIND_SYMBOL(ApexCodec_Component_create);
         BIND_SYMBOL(ApexCodec_Component_destroy);
         BIND_SYMBOL(ApexCodec_Component_flush);
         BIND_SYMBOL(ApexCodec_Component_getConfigurable);
         BIND_SYMBOL(ApexCodec_Component_process);
-        BIND_SYMBOL(ApexCodec_Component_start);
         BIND_SYMBOL(ApexCodec_Component_reset);
+        BIND_SYMBOL(ApexCodec_Component_start);
         BIND_SYMBOL(ApexCodec_Configurable_config);
         BIND_SYMBOL(ApexCodec_Configurable_query);
         BIND_SYMBOL(ApexCodec_Configurable_querySupportedParams);
         BIND_SYMBOL(ApexCodec_Configurable_querySupportedValues);
         BIND_SYMBOL(ApexCodec_GetComponentStore);
+        BIND_SYMBOL(ApexCodec_ParamDescriptors_destroy);
         BIND_SYMBOL(ApexCodec_ParamDescriptors_getDescriptor);
         BIND_SYMBOL(ApexCodec_ParamDescriptors_getIndices);
-        BIND_SYMBOL(ApexCodec_ParamDescriptors_release);
+        BIND_SYMBOL(ApexCodec_SettingResults_destroy);
         BIND_SYMBOL(ApexCodec_SettingResults_getResultAtIndex);
-        BIND_SYMBOL(ApexCodec_SettingResults_release);
+        BIND_SYMBOL(ApexCodec_SupportedValues_destroy);
         BIND_SYMBOL(ApexCodec_SupportedValues_getTypeAndValues);
-        BIND_SYMBOL(ApexCodec_SupportedValues_release);
         BIND_SYMBOL(ApexCodec_Traits_get);
 #undef BIND_SYMBOL
 
         // Check every symbol is bound.
         for (int i = 0; i < k_MethodCount; ++i) {
             if (mMethods[i] == nullptr) {
-                ALOGI("Uninitialized method in libapexcodecs_lazy at index: %d", i);
+                ALOGI("Uninitialized method in "
+                      "libcom.android.media.swcodec.apexcodecs_lazy at index: %d", i);
                 return false;
             }
         }
@@ -146,7 +172,7 @@
     }
 
     RWLock mLock;
-    // Table of methods pointers in libapexcodecs APIs.
+    // Table of methods pointers in libcom.android.media.swcodec.apexcodecs APIs.
     void* mMethods[k_MethodCount];
     bool mInit{false};
 };
@@ -173,6 +199,82 @@
     INVOKE_METHOD(ApexCodec_Traits_get, nullptr, store, index);
 }
 
+ApexCodec_Buffer *ApexCodec_Buffer_create() {
+    INVOKE_METHOD(ApexCodec_Buffer_create, nullptr);
+}
+
+void ApexCodec_Buffer_destroy(ApexCodec_Buffer *buffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_destroy, void(), buffer);
+}
+
+void ApexCodec_Buffer_clear(ApexCodec_Buffer *buffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_clear, void(), buffer);
+}
+
+ApexCodec_BufferType ApexCodec_Buffer_getType(ApexCodec_Buffer *buffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_getType, APEXCODEC_BUFFER_TYPE_EMPTY, buffer);
+}
+
+void ApexCodec_Buffer_setBufferInfo(
+        ApexCodec_Buffer *_Nonnull buffer,
+        ApexCodec_BufferFlags flags,
+        uint64_t frameIndex,
+        uint64_t timestampUs) {
+    INVOKE_METHOD(ApexCodec_Buffer_setBufferInfo, void(),
+                  buffer, flags, frameIndex, timestampUs);
+}
+
+ApexCodec_Status ApexCodec_Buffer_setLinearBuffer(
+        ApexCodec_Buffer *buffer,
+        const ApexCodec_LinearBuffer *linearBuffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_setLinearBuffer, APEXCODEC_STATUS_OMITTED,
+                  buffer, linearBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_setGraphicBuffer(
+        ApexCodec_Buffer *buffer,
+        AHardwareBuffer *graphicBuffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_setGraphicBuffer, APEXCODEC_STATUS_OMITTED,
+                  buffer, graphicBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_setConfigUpdates(
+        ApexCodec_Buffer *buffer,
+        const ApexCodec_LinearBuffer *configUpdates) {
+    INVOKE_METHOD(ApexCodec_Buffer_setConfigUpdates, APEXCODEC_STATUS_OMITTED,
+                  buffer, configUpdates);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getBufferInfo(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_BufferFlags *outFlags,
+        uint64_t *outFrameIndex,
+        uint64_t *outTimestampUs) {
+    INVOKE_METHOD(ApexCodec_Buffer_getBufferInfo, APEXCODEC_STATUS_OMITTED,
+                  buffer, outFlags, outFrameIndex, outTimestampUs);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getLinearBuffer(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_LinearBuffer *outLinearBuffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_getLinearBuffer, APEXCODEC_STATUS_OMITTED,
+                  buffer, outLinearBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getGraphicBuffer(
+        ApexCodec_Buffer *buffer,
+        AHardwareBuffer **outGraphicBuffer) {
+    INVOKE_METHOD(ApexCodec_Buffer_getGraphicBuffer, APEXCODEC_STATUS_OMITTED,
+                  buffer, outGraphicBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getConfigUpdates(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_LinearBuffer *outConfigUpdates,
+        bool *outOwnedByClient) {
+    INVOKE_METHOD(ApexCodec_Buffer_getConfigUpdates, APEXCODEC_STATUS_OMITTED,
+                  buffer, outConfigUpdates, outOwnedByClient);
+}
 ApexCodec_Status ApexCodec_Component_create(
         ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
     INVOKE_METHOD(ApexCodec_Component_create, APEXCODEC_STATUS_OMITTED, store, name, comp);
@@ -209,8 +311,8 @@
                   supportedValues, type, numberType, values, numValues);
 }
 
-void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {
-    INVOKE_METHOD(ApexCodec_SupportedValues_release, void(), values);
+void ApexCodec_SupportedValues_destroy(ApexCodec_SupportedValues *values) {
+    INVOKE_METHOD(ApexCodec_SupportedValues_destroy, void(), values);
 }
 
 ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
@@ -224,8 +326,8 @@
                   results, index, failure, field, conflicts, numConflicts);
 }
 
-void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {
-    INVOKE_METHOD(ApexCodec_SettingResults_release, void(), results);
+void ApexCodec_SettingResults_destroy(ApexCodec_SettingResults *results) {
+    INVOKE_METHOD(ApexCodec_SettingResults_destroy, void(), results);
 }
 
 ApexCodec_Status ApexCodec_Component_process(
@@ -274,9 +376,9 @@
                   descriptors, index, attr, name, dependencies, numDependencies);
 }
 
-ApexCodec_Status ApexCodec_ParamDescriptors_release(
+void ApexCodec_ParamDescriptors_destroy(
         ApexCodec_ParamDescriptors *descriptors) {
-    INVOKE_METHOD(ApexCodec_ParamDescriptors_release, APEXCODEC_STATUS_OMITTED, descriptors);
+    INVOKE_METHOD(ApexCodec_ParamDescriptors_destroy, void(), descriptors);
 }
 
 ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index ff356fc..6f4e834 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -18,6 +18,9 @@
 #include <fcntl.h>
 #include <unistd.h>
 
+#include <gui/BufferItemConsumer.h>
+#include <gui/BufferQueue.h>
+#include <gui/Surface.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <private/android/AHardwareBufferHelpers.h>
 #include <vndk/hardware_buffer.h>
@@ -57,6 +60,82 @@
 
 } // anonymous namespace
 
+using ::android::BufferQueue;
+using ::android::BufferItemConsumer;
+using ::android::ConsumerListener;
+using ::android::IConsumerListener;
+using ::android::IGraphicBufferProducer;
+using ::android::IGraphicBufferConsumer;
+using ::android::Surface;
+
+class GraphicsTracker::PlaceHolderSurface {
+public:
+    static const int kMaxAcquiredBuffer = 2;
+    // Enough number to allocate in stop/release status.
+    static const int kMaxDequeuedBuffer = 16;
+
+    explicit PlaceHolderSurface(uint64_t usage) : mUsage(usage) {}
+
+    ~PlaceHolderSurface() {
+        if (mInit == C2_NO_INIT) {
+            return;
+        }
+        if (mSurface) {
+            mSurface->disconnect(NATIVE_WINDOW_API_MEDIA);
+        }
+    }
+
+    c2_status_t allocate(uint32_t width, uint32_t height,
+            uint32_t format, uint64_t usage,
+            AHardwareBuffer **pBuf, sp<Fence> *fence) {
+        std::unique_lock<std::mutex> l(mLock);
+        if (mInit == C2_NO_INIT) {
+            mInit = init();
+        }
+
+        if (!mBufferItemConsumer || !mSurface) {
+            ALOGE("PlaceHolderSurface not properly initialized");
+            return C2_CORRUPTED;
+        }
+
+        native_window_set_usage(mSurface.get(), usage);
+        native_window_set_buffers_format(mSurface.get(), format);
+        native_window_set_buffers_dimensions(mSurface.get(), width, height);
+
+        ::android::status_t res;
+        std::vector<Surface::BatchBuffer> buffers(1);
+        res = mSurface->dequeueBuffers(&buffers);
+        if (res != ::android::OK) {
+            ALOGE("dequeueBuffers failed from PlaceHolderSurface %d", res);
+            return C2_CORRUPTED;
+        }
+        sp<GraphicBuffer> gb = GraphicBuffer::from(buffers[0].buffer);
+        *pBuf = AHardwareBuffer_from_GraphicBuffer(gb.get());
+        AHardwareBuffer_acquire(*pBuf);
+        *fence = new Fence(buffers[0].fenceFd);
+        return C2_OK;
+    }
+
+private:
+    uint64_t mUsage;
+    sp<Surface> mSurface;
+    sp<BufferItemConsumer> mBufferItemConsumer;
+    c2_status_t mInit = C2_NO_INIT;
+    std::mutex mLock;
+
+    c2_status_t init() {
+        std::tie(mBufferItemConsumer, mSurface) =
+                BufferItemConsumer::create(mUsage, kMaxAcquiredBuffer);
+
+        if (mSurface) {
+            mSurface->connect(NATIVE_WINDOW_API_MEDIA, nullptr);
+            mSurface->setMaxDequeuedBufferCount(kMaxDequeuedBuffer);
+        }
+        return C2_OK;
+    }
+};
+
+
 GraphicsTracker::BufferItem::BufferItem(
         uint32_t generation, int slot, const sp<GraphicBuffer>& buf, const sp<Fence>& fence) :
         mInit{false}, mGeneration{generation}, mSlot{slot} {
@@ -256,9 +335,13 @@
     // to the old surface in MediaCodec and allocate from the new surface from
     // GraphicsTracker cannot be synchronized properly.
     uint64_t bqId{0ULL};
+    uint64_t bqUsage{0ULL};
     ::android::status_t ret = ::android::OK;
     if (igbp) {
         ret = igbp->getUniqueId(&bqId);
+        if (ret == ::android::OK) {
+            (void)igbp->getConsumerUsage(&bqUsage);
+        }
     }
     if (ret != ::android::OK ||
             prevCache->mGeneration == generation) {
@@ -287,7 +370,8 @@
     }
     ALOGD("new surface configured with id:%llu gen:%lu maxDequeue:%d",
           (unsigned long long)bqId, (unsigned long)generation, prevDequeueCommitted);
-    std::shared_ptr<BufferCache> newCache = std::make_shared<BufferCache>(bqId, generation, igbp);
+    std::shared_ptr<BufferCache> newCache =
+            std::make_shared<BufferCache>(bqId, bqUsage, generation, igbp);
     {
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
@@ -501,6 +585,9 @@
     if (mStopRequested) {
         return;
     }
+    if (mBufferCache && mBufferCache->mBqId != 0) {
+        mReleaseSurface.reset(new PlaceHolderSurface(mBufferCache->mUsage));
+    }
     mStopRequested = true;
     writeIncDequeueableLocked(kMaxDequeueMax - 1);
 }
@@ -764,9 +851,7 @@
         }
     }
 
-    int alloced = mAllocAfterStopRequested++;
     *rFence = Fence::NO_FENCE;
-    ALOGD("_allocateDirect() allocated %d buffer", alloced);
     return C2_OK;
 }
 
@@ -783,7 +868,14 @@
         std::unique_lock<std::mutex> l(mLock);
         if (mStopRequested) {
             l.unlock();
-            res = _allocateDirect(width, height, format, usage, buf, rFence);
+            if (mReleaseSurface) {
+                res = mReleaseSurface->allocate(width, height, format, usage, buf, rFence);
+            } else {
+                res = _allocateDirect(width, height, format, usage, buf, rFence);
+            }
+            if (res == C2_OK) {
+                ALOGD("allocateed %d buffer after stop", ++mAllocAfterStopRequested);
+            }
             // Delay a little bit for HAL to receive stop()/release() request.
             ::usleep(kAllocateDirectDelayUs);
             return res;
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 17e5b62..e2af0d9 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -77,12 +77,14 @@
 #include <private/android/AHardwareBufferHelpers.h>
 #include <system/window.h> // for NATIVE_WINDOW_QUERY_*
 
+#include <algorithm>
 #include <deque>
 #include <iterator>
 #include <limits>
 #include <map>
 #include <mutex>
 #include <optional>
+#include <ranges>
 #include <sstream>
 #include <thread>
 #include <type_traits>
@@ -1066,7 +1068,7 @@
                 mBase, &config, &result);
         base::ScopeGuard guard([result] {
             if (result) {
-                ApexCodec_SettingResults_release(result);
+                ApexCodec_SettingResults_destroy(result);
             }
         });
         size_t index = 0;
@@ -1115,7 +1117,7 @@
         ApexCodec_Configurable_querySupportedParams(mBase, &paramDescs);
         base::ScopeGuard guard([paramDescs] {
             if (paramDescs) {
-                ApexCodec_ParamDescriptors_release(paramDescs);
+                ApexCodec_ParamDescriptors_destroy(paramDescs);
             }
         });
         uint32_t *indices = nullptr;
@@ -1173,7 +1175,7 @@
             fields[i].status = (c2_status_t)queries[i].status;
             FromApex(queries[i].values, &fields[i].values);
             if (queries[i].values) {
-                ApexCodec_SupportedValues_release(queries[i].values);
+                ApexCodec_SupportedValues_destroy(queries[i].values);
                 queries[i].values = nullptr;
             }
         }
@@ -1434,7 +1436,7 @@
             mListener(listener),
             mComponent(comp),
             mStopped(false),
-            mOutputBufferType(APEXCODEC_BUFFER_TYPE_INVALID) {
+            mOutputBufferType(APEXCODEC_BUFFER_TYPE_EMPTY) {
     }
 
     void start() {
@@ -1455,7 +1457,7 @@
             LOG(ERROR) << "ApexHandler::start -- unrecognized component kind " << kind.value;
             return;
         }
-        ApexCodec_BufferType outputBufferType = APEXCODEC_BUFFER_TYPE_INVALID;
+        ApexCodec_BufferType outputBufferType = APEXCODEC_BUFFER_TYPE_EMPTY;
         if (domain.value == C2Component::DOMAIN_AUDIO) {
             // For both encoders and decoders the output buffer type is linear.
             outputBufferType = APEXCODEC_BUFFER_TYPE_LINEAR;
@@ -1528,10 +1530,11 @@
                 LOG(DEBUG) << "handleWork -- listener died.";
                 return;
             }
-            ApexCodec_Buffer input;
-            input.flags = (ApexCodec_BufferFlags)workItem->input.flags;
-            input.frameIndex = workItem->input.ordinal.frameIndex.peekll();
-            input.timestampUs = workItem->input.ordinal.timestamp.peekll();
+            thread_local ApexCodec_Buffer *input = ApexCodec_Buffer_create();
+            ApexCodec_Buffer_clear(input);
+            ApexCodec_BufferFlags flags = (ApexCodec_BufferFlags)workItem->input.flags;
+            uint64_t frameIndex = workItem->input.ordinal.frameIndex.peekll();
+            uint64_t timestampUs = workItem->input.ordinal.timestamp.peekll();
 
             if (workItem->input.buffers.size() > 1) {
                 LOG(ERROR) << "handleWork -- input buffer size is "
@@ -1543,7 +1546,7 @@
             if (!workItem->input.buffers.empty()) {
                 buffer = workItem->input.buffers[0];
             }
-            if (!FillMemory(buffer, &input, &linearView)) {
+            if (!FillMemory(buffer, input, &linearView, flags, frameIndex, timestampUs)) {
                 LOG(ERROR) << "handleWork -- failed to map input";
                 return;
             }
@@ -1553,31 +1556,46 @@
                 listener->onError(mComponent, C2_CORRUPTED);
                 return;
             }
-            input.configUpdates.data = configUpdatesVector.data();
-            input.configUpdates.size = configUpdatesVector.size();
+            ApexCodec_LinearBuffer configUpdates;
+            configUpdates.data = configUpdatesVector.data();
+            configUpdates.size = configUpdatesVector.size();
+            ApexCodec_Buffer_setConfigUpdates(input, &configUpdates);
             mWorkMap.insert_or_assign(
                     workItem->input.ordinal.frameIndex.peekll(), std::move(workItem));
 
             std::list<std::unique_ptr<C2Work>> workItems;
             bool inputDrained = false;
             while (!inputDrained) {
-                ApexCodec_Buffer output;
+                thread_local ApexCodec_Buffer *output = ApexCodec_Buffer_create();
+                ApexCodec_Buffer_clear(output);
                 std::shared_ptr<C2LinearBlock> linearBlock;
                 std::optional<C2WriteView> linearView;
                 std::shared_ptr<C2GraphicBlock> graphicBlock;
-                allocOutputBuffer(&output, &linearBlock, &linearView, &graphicBlock);
+                allocOutputBuffer(output, &linearBlock, &linearView, &graphicBlock);
                 size_t consumed = 0;
                 size_t produced = 0;
                 ApexCodec_Status status = ApexCodec_Component_process(
-                        mApexComponent, &input, &output, &consumed, &produced);
+                        mApexComponent, input, output, &consumed, &produced);
                 if (status == APEXCODEC_STATUS_NO_MEMORY) {
                     continue;
+                } else if (status != APEXCODEC_STATUS_OK) {
+                    LOG(ERROR) << "handleWork -- component process failed with status " << status;
+                    produced = 0;
                 }
                 if (produced > 0) {
-                    auto it = mWorkMap.find(output.frameIndex);
+                    ApexCodec_BufferFlags outputFlags;
+                    uint64_t outputFrameIndex;
+                    uint64_t outputTimestampUs;
+                    ApexCodec_Status status = ApexCodec_Buffer_getBufferInfo(
+                            output, &outputFlags, &outputFrameIndex, &outputTimestampUs);
+                    if (status != APEXCODEC_STATUS_OK) {
+                        LOG(WARNING) << "handleWork -- failed to get output buffer info";
+                        outputFrameIndex = ~(uint64_t(0));
+                    }
+                    auto it = mWorkMap.find(outputFrameIndex);
                     std::unique_ptr<C2Work> outputWorkItem;
                     if (it != mWorkMap.end()) {
-                        if (output.flags & APEXCODEC_FLAG_INCOMPLETE) {
+                        if (outputFlags & APEXCODEC_FLAG_INCOMPLETE) {
                             outputWorkItem = std::make_unique<C2Work>();
                             outputWorkItem->input.ordinal = it->second->input.ordinal;
                             outputWorkItem->input.flags = it->second->input.flags;
@@ -1587,10 +1605,10 @@
                         }
                     } else {
                         LOG(WARNING) << "handleWork -- no work item found for output frame index "
-                                    << output.frameIndex;
+                                    << outputFrameIndex;
                         outputWorkItem = std::make_unique<C2Work>();
-                        outputWorkItem->input.ordinal.frameIndex = output.frameIndex;
-                        outputWorkItem->input.ordinal.timestamp = output.timestampUs;
+                        outputWorkItem->input.ordinal.frameIndex = outputFrameIndex;
+                        outputWorkItem->input.ordinal.timestamp = outputTimestampUs;
                     }
                     outputWorkItem->worklets.emplace_back(new C2Worklet);
                     const std::unique_ptr<C2Worklet> &worklet = outputWorkItem->worklets.front();
@@ -1598,35 +1616,52 @@
                         LOG(ERROR) << "handleWork -- output work item has null worklet";
                         return;
                     }
-                    worklet->output.ordinal.frameIndex = output.frameIndex;
-                    worklet->output.ordinal.timestamp = output.timestampUs;
+                    worklet->output.ordinal.frameIndex = outputFrameIndex;
+                    worklet->output.ordinal.timestamp = outputTimestampUs;
+                    ApexCodec_LinearBuffer outputConfigUpdates;
+                    bool ownedByClient = false;
+                    status = ApexCodec_Buffer_getConfigUpdates(
+                            output, &outputConfigUpdates, &ownedByClient);
+                    if (status != APEXCODEC_STATUS_OK) {
+                        LOG(WARNING) << "handleWork -- failed to get output config updates";
+                        return;
+                    } else if (ownedByClient) {
+                        LOG(WARNING) << "handleWork -- output config updates are owned by client";
+                        return;
+                    }
                     // non-owning hidl_vec<> to wrap around the output config updates
-                    hidl_vec<uint8_t> outputConfigUpdates;
-                    outputConfigUpdates.setToExternal(
-                            output.configUpdates.data, output.configUpdates.size);
+                    hidl_vec<uint8_t> outputConfigUpdatesVec;
+                    outputConfigUpdatesVec.setToExternal(
+                            outputConfigUpdates.data, outputConfigUpdates.size);
                     std::vector<C2Param*> outputConfigUpdatePtrs;
-                    parseParamsBlob(&outputConfigUpdatePtrs, outputConfigUpdates);
+                    parseParamsBlob(&outputConfigUpdatePtrs, outputConfigUpdatesVec);
                     worklet->output.configUpdate.clear();
                     std::ranges::transform(
                             outputConfigUpdatePtrs,
                             std::back_inserter(worklet->output.configUpdate),
                             [](C2Param* param) { return C2Param::Copy(*param); });
-                    worklet->output.flags = (C2FrameData::flags_t)output.flags;
+                    worklet->output.flags = (C2FrameData::flags_t)outputFlags;
 
                     workItems.push_back(std::move(outputWorkItem));
                 }
 
+                ApexCodec_BufferType inputType = ApexCodec_Buffer_getType(input);
                 // determine whether the input buffer is drained
-                if (input.type == APEXCODEC_BUFFER_TYPE_LINEAR) {
-                    if (input.memory.linear.size < consumed) {
+                if (inputType == APEXCODEC_BUFFER_TYPE_LINEAR) {
+                    ApexCodec_LinearBuffer inputBuffer;
+                    status = ApexCodec_Buffer_getLinearBuffer(input, &inputBuffer);
+                    if (status != APEXCODEC_STATUS_OK) {
+                        LOG(WARNING) << "handleWork -- failed to get input linear buffer";
+                        inputDrained = true;
+                    } else if (inputBuffer.size < consumed) {
                         LOG(WARNING) << "handleWork -- component consumed more bytes "
-                                    << "than the input buffer size";
+                                     << "than the input buffer size";
                         inputDrained = true;
                     } else {
-                        input.memory.linear.data += consumed;
-                        input.memory.linear.size -= consumed;
+                        inputBuffer.data += consumed;
+                        inputBuffer.size -= consumed;
                     }
-                } else if (input.type == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
+                } else if (inputType == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
                     inputDrained = (consumed > 0);
                 }
             }
@@ -1669,124 +1704,171 @@
             std::shared_ptr<C2LinearBlock> *linearBlock,
             std::optional<C2WriteView> *linearView,
             std::shared_ptr<C2GraphicBlock> *graphicBlock) {
-        if (mOutputBufferType == APEXCODEC_BUFFER_TYPE_LINEAR) {
-            if (!ensureBlockPool()) {
-                return;
-            }
-            {
-                std::shared_ptr<Component> comp = mComponent.lock();
-                if (!comp) {
+        if (__builtin_available(android 36, *)) {
+            switch (mOutputBufferType) {
+                case APEXCODEC_BUFFER_TYPE_LINEAR: {
+                    if (!ensureBlockPool()) {
+                        return;
+                    }
+                    {
+                        std::shared_ptr<Component> comp = mComponent.lock();
+                        if (!comp) {
+                            return;
+                        }
+                        C2StreamMaxBufferSizeInfo::output maxBufferSize(0u /* stream */);
+                        comp->query({&maxBufferSize}, {}, C2_MAY_BLOCK, {});
+                        mLinearBlockCapacity = maxBufferSize ? maxBufferSize.value : 1024 * 1024;
+                    }
+                    c2_status_t status = mBlockPool->fetchLinearBlock(
+                            mLinearBlockCapacity,
+                            C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
+                            linearBlock);
+                    if (!(*linearBlock)) {
+                        return;
+                    }
+                    linearView->emplace((*linearBlock)->map().get());
+                    if ((*linearView)->error() != C2_OK) {
+                        return;
+                    }
+                    ApexCodec_LinearBuffer linear;
+                    linear.data = (*linearView)->data();
+                    linear.size = (*linearView)->capacity();
+                    ApexCodec_Status apexStatus = ApexCodec_Buffer_setLinearBuffer(
+                            output, &linear);
+                    if (apexStatus != APEXCODEC_STATUS_OK) {
+                        LOG(ERROR) << "allocOutputBuffer -- failed to set linear buffer";
+                        return;
+                    }
+                    break;
+                }
+                case APEXCODEC_BUFFER_TYPE_GRAPHIC: {
+                    if (!ensureBlockPool()) {
+                        return;
+                    }
+                    {
+                        std::shared_ptr<Component> comp = mComponent.lock();
+                        if (!comp) {
+                            return;
+                        }
+                        C2StreamMaxPictureSizeTuning::output maxPictureSize(0u /* stream */);
+                        C2StreamPictureSizeInfo::output pictureSize(0u /* stream */);
+                        C2StreamPixelFormatInfo::output pixelFormat(0u /* stream */);
+                        comp->query({&maxPictureSize, &pictureSize, &pixelFormat},
+                                    {}, C2_MAY_BLOCK, {});
+                        mWidth = maxPictureSize ? maxPictureSize.width : pictureSize.width;
+                        mHeight = maxPictureSize ? maxPictureSize.height : pictureSize.height;
+                        mFormat = pixelFormat ? pixelFormat.value : HAL_PIXEL_FORMAT_YCBCR_420_888;
+                    }
+                    c2_status_t status = mBlockPool->fetchGraphicBlock(
+                            mWidth, mHeight, mFormat,
+                            C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
+                            graphicBlock);
+                    if (!(*graphicBlock)) {
+                        return;
+                    }
+                    const C2Handle *handle = (*graphicBlock)->handle();
+                    uint32_t width, height, format, stride, igbp_slot, generation;
+                    uint64_t usage, igbp_id;
+                    _UnwrapNativeCodec2GrallocMetadata(
+                            handle, &width, &height, &format, &usage, &stride, &generation,
+                            &igbp_id, &igbp_slot);
+                    native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
+                    sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+                            grallocHandle, GraphicBuffer::CLONE_HANDLE,
+                            width, height, format, 1, usage, stride);
+                    native_handle_delete(grallocHandle);
+                    AHardwareBuffer *hardwareBuffer =
+                        AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
+                    AHardwareBuffer_acquire(hardwareBuffer);
+                    ApexCodec_Status apexStatus = ApexCodec_Buffer_setGraphicBuffer(
+                            output, hardwareBuffer);
+                    if (apexStatus != APEXCODEC_STATUS_OK) {
+                        LOG(ERROR) << "allocOutputBuffer -- failed to set graphic buffer";
+                        return;
+                    }
+                    break;
+                }
+                default: {
+                    LOG(ERROR) << "allocOutputBuffer -- unsupported output buffer type: "
+                            << mOutputBufferType;
                     return;
                 }
-                C2StreamMaxBufferSizeInfo::output maxBufferSize(0u /* stream */);
-                comp->query({&maxBufferSize}, {}, C2_MAY_BLOCK, {});
-                mLinearBlockCapacity = maxBufferSize ? maxBufferSize.value : 1024 * 1024;
             }
-            output->type = APEXCODEC_BUFFER_TYPE_LINEAR;
-            c2_status_t status = mBlockPool->fetchLinearBlock(
-                    mLinearBlockCapacity,
-                    C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
-                    linearBlock);
-            if (!(*linearBlock)) {
-                return;
-            }
-            linearView->emplace((*linearBlock)->map().get());
-            if ((*linearView)->error() != C2_OK) {
-                return;
-            }
-            output->memory.linear.data = (*linearView)->data();
-            output->memory.linear.size = (*linearView)->capacity();
-        } else if (mOutputBufferType == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
-            if (!ensureBlockPool()) {
-                return;
-            }
-            {
-                std::shared_ptr<Component> comp = mComponent.lock();
-                if (!comp) {
-                    return;
-                }
-                C2StreamMaxPictureSizeTuning::output maxPictureSize(0u /* stream */);
-                C2StreamPictureSizeInfo::output pictureSize(0u /* stream */);
-                C2StreamPixelFormatInfo::output pixelFormat(0u /* stream */);
-                comp->query({&maxPictureSize, &pictureSize, &pixelFormat}, {}, C2_MAY_BLOCK, {});
-                mWidth = maxPictureSize ? maxPictureSize.width : pictureSize.width;
-                mHeight = maxPictureSize ? maxPictureSize.height : pictureSize.height;
-                mFormat = pixelFormat ? pixelFormat.value : HAL_PIXEL_FORMAT_YCBCR_420_888;
-            }
-            output->type = APEXCODEC_BUFFER_TYPE_GRAPHIC;
-            c2_status_t status = mBlockPool->fetchGraphicBlock(
-                    mWidth, mHeight, mFormat,
-                    C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
-                    graphicBlock);
-            if (!(*graphicBlock)) {
-                return;
-            }
-            const C2Handle *handle = (*graphicBlock)->handle();
-            uint32_t width, height, format, stride, igbp_slot, generation;
-            uint64_t usage, igbp_id;
-            _UnwrapNativeCodec2GrallocMetadata(
-                    handle, &width, &height, &format, &usage, &stride, &generation,
-                    &igbp_id, &igbp_slot);
-            native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
-            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
-                    grallocHandle, GraphicBuffer::CLONE_HANDLE,
-                    width, height, format, 1, usage, stride);
-            native_handle_delete(grallocHandle);
-            AHardwareBuffer *hardwareBuffer =
-                AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
-            AHardwareBuffer_acquire(hardwareBuffer);
-            output->memory.graphic = hardwareBuffer;
         } else {
-            LOG(ERROR) << "allocOutputBuffer -- unsupported output buffer type: "
-                       << mOutputBufferType;
-            return;
+            LOG(ERROR) << "allocOutputBuffer -- ApexCodec is not supported";
         }
     }
 
     static bool FillMemory(
             const std::shared_ptr<C2Buffer>& buffer,
             ApexCodec_Buffer* apexBuffer,
-            std::optional<C2ReadView>* linearView) {
-        if (buffer->data().type() == C2BufferData::LINEAR) {
-            apexBuffer->type = APEXCODEC_BUFFER_TYPE_LINEAR;
-            if (buffer->data().linearBlocks().empty()) {
-                apexBuffer->memory.linear.data = nullptr;
-                apexBuffer->memory.linear.size = 0;
+            std::optional<C2ReadView>* linearView,
+            ApexCodec_BufferFlags flags,
+            uint64_t frameIndex,
+            uint64_t timestampUs) {
+        if (__builtin_available(android 36, *)) {
+            if (buffer->data().type() == C2BufferData::LINEAR) {
+                if (buffer->data().linearBlocks().empty()) {
+                    ApexCodec_Status status = ApexCodec_Buffer_setLinearBuffer(apexBuffer, nullptr);
+                    if (status != APEXCODEC_STATUS_OK) {
+                        LOG(ERROR) << "FillMemory -- failed to set linear buffer";
+                        return false;
+                    }
+                    ApexCodec_Buffer_setBufferInfo(apexBuffer, flags, frameIndex, timestampUs);
+                    return true;
+                } else if (buffer->data().linearBlocks().size() > 1) {
+                    return false;
+                }
+                linearView->emplace(buffer->data().linearBlocks().front().map().get());
+                if ((*linearView)->error() != C2_OK) {
+                    return false;
+                }
+                ApexCodec_LinearBuffer linear;
+                linear.data = const_cast<uint8_t*>((*linearView)->data());
+                linear.size = (*linearView)->capacity();
+                ApexCodec_Status status = ApexCodec_Buffer_setLinearBuffer(apexBuffer, &linear);
+                if (status != APEXCODEC_STATUS_OK) {
+                    LOG(ERROR) << "FillMemory -- failed to set linear buffer";
+                    return false;
+                }
+                ApexCodec_Buffer_setBufferInfo(apexBuffer, flags, frameIndex, timestampUs);
                 return true;
-            } else if (buffer->data().linearBlocks().size() > 1) {
-                return false;
-            }
-            linearView->emplace(buffer->data().linearBlocks().front().map().get());
-            if ((*linearView)->error() != C2_OK) {
-                return false;
-            }
-            apexBuffer->memory.linear.data = const_cast<uint8_t*>((*linearView)->data());
-            apexBuffer->memory.linear.size = (*linearView)->capacity();
-            return true;
-        } else if (buffer->data().type() == C2BufferData::GRAPHIC) {
-            apexBuffer->type = APEXCODEC_BUFFER_TYPE_GRAPHIC;
-            if (buffer->data().graphicBlocks().empty()) {
-                apexBuffer->memory.graphic = nullptr;
+            } else if (buffer->data().type() == C2BufferData::GRAPHIC) {
+                if (buffer->data().graphicBlocks().empty()) {
+                    ApexCodec_Status status = ApexCodec_Buffer_setGraphicBuffer(
+                            apexBuffer, nullptr);
+                    if (status != APEXCODEC_STATUS_OK) {
+                        LOG(ERROR) << "FillMemory -- failed to set graphic buffer";
+                        return false;
+                    }
+                    ApexCodec_Buffer_setBufferInfo(apexBuffer, flags, frameIndex, timestampUs);
+                    return true;
+                } else if (buffer->data().graphicBlocks().size() > 1) {
+                    return false;
+                }
+                const C2Handle *handle = buffer->data().graphicBlocks().front().handle();
+                uint32_t width, height, format, stride, igbp_slot, generation;
+                uint64_t usage, igbp_id;
+                _UnwrapNativeCodec2GrallocMetadata(
+                        handle, &width, &height, &format, &usage, &stride, &generation,
+                        &igbp_id, &igbp_slot);
+                native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
+                sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+                        grallocHandle, GraphicBuffer::CLONE_HANDLE,
+                        width, height, format, 1, usage, stride);
+                native_handle_delete(grallocHandle);
+                AHardwareBuffer *hardwareBuffer =
+                    AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
+                AHardwareBuffer_acquire(hardwareBuffer);
+                ApexCodec_Status status = ApexCodec_Buffer_setGraphicBuffer(
+                        apexBuffer, hardwareBuffer);
+                if (status != APEXCODEC_STATUS_OK) {
+                    LOG(ERROR) << "FillMemory -- failed to set graphic buffer";
+                    return false;
+                }
+                ApexCodec_Buffer_setBufferInfo(apexBuffer, flags, frameIndex, timestampUs);
                 return true;
-            } else if (buffer->data().graphicBlocks().size() > 1) {
-                return false;
             }
-            const C2Handle *handle = buffer->data().graphicBlocks().front().handle();
-            uint32_t width, height, format, stride, igbp_slot, generation;
-            uint64_t usage, igbp_id;
-            _UnwrapNativeCodec2GrallocMetadata(
-                    handle, &width, &height, &format, &usage, &stride, &generation,
-                    &igbp_id, &igbp_slot);
-            native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
-            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
-                    grallocHandle, GraphicBuffer::CLONE_HANDLE,
-                    width, height, format, 1, usage, stride);
-            native_handle_delete(grallocHandle);
-            AHardwareBuffer *hardwareBuffer =
-                AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
-            AHardwareBuffer_acquire(hardwareBuffer);
-            apexBuffer->memory.graphic = hardwareBuffer;
-            return true;
         }
         return false;
     }
@@ -3029,9 +3111,12 @@
     }
     if (mAidlBase) {
         c2_aidl::WorkBundle workBundle;
-        if (!c2_aidl::utils::ToAidl(&workBundle, *items, mAidlBufferPoolSender.get())) {
-            LOG(ERROR) << "queue -- bad input.";
-            return C2_TRANSACTION_FAILED;
+        {
+            ScopedTrace trace(ATRACE_TAG, "CCodec::Codec2Client::ToAidl");
+            if (!c2_aidl::utils::ToAidl(&workBundle, *items, mAidlBufferPoolSender.get())) {
+                LOG(ERROR) << "queue -- bad input.";
+                return C2_TRANSACTION_FAILED;
+            }
         }
         ::ndk::ScopedAStatus transStatus = mAidlBase->queue(workBundle);
         return GetC2Status(transStatus, "queue");
@@ -3364,7 +3449,7 @@
         const C2ConstGraphicBlock& block,
         const QueueBufferInput& input,
         QueueBufferOutput* output) {
-    ScopedTrace trace(ATRACE_TAG,"Codec2Client::Component::queueToOutputSurface");
+    ScopedTrace trace(ATRACE_TAG,"CCodec::Codec2Client::Component::queueToOutputSurface");
     if (mAidlBase) {
         std::shared_ptr<AidlGraphicBufferAllocator> gba =
                 mGraphicBufferAllocators->current();
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 536caaa..2338e6e 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -224,6 +224,7 @@
         static constexpr int kNumSlots = ::android::BufferQueueDefs::NUM_BUFFER_SLOTS;
 
         uint64_t mBqId;
+        uint64_t mUsage;
         uint32_t mGeneration;
         ::android::sp<IGraphicBufferProducer> mIgbp;
 
@@ -245,9 +246,11 @@
 
         std::atomic<int> mNumAttached;
 
-        BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr}, mNumAttached{0} {}
-        BufferCache(uint64_t bqId, uint32_t generation, const sp<IGraphicBufferProducer>& igbp) :
-            mBqId{bqId}, mGeneration{generation}, mIgbp{igbp}, mNumAttached{0} {}
+        BufferCache() : mBqId{0ULL}, mUsage{0ULL},
+                mGeneration{0}, mIgbp{nullptr}, mNumAttached{0} {}
+        BufferCache(uint64_t bqId, uint64_t usage, uint32_t generation,
+                const sp<IGraphicBufferProducer>& igbp) :
+            mBqId{bqId}, mUsage{usage}, mGeneration{generation}, mIgbp{igbp}, mNumAttached{0} {}
 
         ~BufferCache();
 
@@ -307,6 +310,10 @@
     bool mStopRequested;
     std::atomic<int> mAllocAfterStopRequested;
 
+    // Release Surface where we get allocations after stop/release being requested.
+    class PlaceHolderSurface;
+    std::unique_ptr<PlaceHolderSurface> mReleaseSurface;
+
 
 private:
     explicit GraphicsTracker(int maxDequeueCount);
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
index 5bf4fbe..63d27f4 100644
--- a/media/codec2/hal/common/HalSelection.cpp
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -77,9 +77,6 @@
     if (inputSurfaceSetting <= 0) {
         return false;
     }
-    if (!android::media::codec::provider_->aidl_hal_input_surface()) {
-        return false;
-    }
     return true;
 }
 
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
index ccdde5e..bfcf9d4 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
@@ -106,6 +106,10 @@
         "res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5",
         "res/bbb_av1_640_360_chksum.md5",
         "res/bbb_av1_176_144_chksm.md5",
+        "res/trim_pattern_640x480_30fps_16mbps_apv_10bit.apv",
+        "res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv",
+        "res/trim_pattern_640x480_30fps_16mbps_apv_10bit.info",
+        "res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.info",
     ],
 }
 
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
index 0640f02..709ee5b 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
@@ -28,4 +28,5 @@
     name: "VtsHalMediaC2V1_0TargetComponentTest",
     defaults: ["VtsHalMediaC2V1_0Defaults"],
     srcs: ["VtsHalMediaC2V1_0TargetComponentTest.cpp"],
+    test_suites: ["device-tests"],
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
index 5e52fde..9eca6f3 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
@@ -28,4 +28,5 @@
     name: "VtsHalMediaC2V1_0TargetMasterTest",
     defaults: ["VtsHalMediaC2V1_0Defaults"],
     srcs: ["VtsHalMediaC2V1_0TargetMasterTest.cpp"],
+    test_suites: ["device-tests"],
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv
new file mode 100644
index 0000000..37e4f06
--- /dev/null
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv
Binary files differ
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.info b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.info
new file mode 100644
index 0000000..87f4456
--- /dev/null
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_1280x720_30fps_30mbps_apv_10bit.info
@@ -0,0 +1,21 @@
+18 32 0
+106148 1 0
+107780 1 33333
+109230 1 66666
+110011 1 100000
+111734 1 133333
+112143 1 166666
+113393 1 200000
+114155 1 233333
+114715 1 266666
+116142 1 300000
+117126 1 333333
+117224 1 366666
+117813 1 400000
+119040 1 433333
+120019 1 466666
+119985 1 500000
+120218 1 533333
+120444 1 566655
+121402 1 599988
+
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_640x480_30fps_16mbps_apv_10bit.apv b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_640x480_30fps_16mbps_apv_10bit.apv
new file mode 100644
index 0000000..db7a52a
--- /dev/null
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_640x480_30fps_16mbps_apv_10bit.apv
Binary files differ
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_640x480_30fps_16mbps_apv_10bit.info b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_640x480_30fps_16mbps_apv_10bit.info
new file mode 100644
index 0000000..503942b
--- /dev/null
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/trim_pattern_640x480_30fps_16mbps_apv_10bit.info
@@ -0,0 +1,21 @@
+18 32 0
+52377 1 0
+53102 1 33333
+54181 1 66666
+54946 1 100000
+55609 1 133333
+56871 1 166666
+57622 1 200000
+58229 1 233333
+58803 1 266666
+58977 1 300000
+59710 1 333333
+60249 1 366666
+60817 1 400000
+61125 1 433333
+61591 1 466666
+61584 1 500000
+62193 1 533333
+62499 1 566655
+62807 1 599988
+
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
index d04c2f6..41ada04 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
@@ -42,6 +42,7 @@
     ],
     data: [":media_c2_v1_video_decode_res"],
     test_config: "VtsHalMediaC2V1_0TargetVideoDecTest.xml",
+    test_suites: ["device-tests"],
 }
 
 cc_test {
@@ -51,4 +52,5 @@
     srcs: ["VtsHalMediaC2V1_0TargetVideoEncTest.cpp"],
     data: [":media_c2_v1_video_encode_res"],
     test_config: "VtsHalMediaC2V1_0TargetVideoEncTest.xml",
+    test_suites: ["device-tests"],
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 90d1874..239a484 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -86,6 +86,10 @@
          "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
         {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
         {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+        {"apv", "trim_pattern_640x480_30fps_16mbps_apv_10bit.apv",
+                "trim_pattern_640x480_30fps_16mbps_apv_10bit.info", ""},
+        {"apv", "trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv",
+                "trim_pattern_1280x720_30fps_30mbps_apv_10bit.info", ""},
 };
 
 class LinearBuffer : public C2Buffer {
@@ -442,28 +446,14 @@
     }
 
     if (surfMode == SURFACE) {
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        texture = new GLConsumer(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
-                                 false /* isControlledByApp */);
-        sp<Surface> s = texture->getSurface();
+        sp<Surface> s;
+        std::tie(texture, s) =
+                GLConsumer::create(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
+                                   true /* useFenceSync */, false /* isControlledByApp */);
         surface = s;
         ASSERT_NE(surface, nullptr) << "failed to create Surface object";
 
         producer = s->getIGraphicBufferProducer();
-#else
-        sp<IGraphicBufferConsumer> consumer = nullptr;
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
-        ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
-
-        texture =
-                new GLConsumer(consumer, 0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
-                               true /* useFenceSync */, false /* isControlledByApp */);
-
-        surface = new Surface(producer);
-        ASSERT_NE(surface, nullptr) << "failed to create Surface object";
-#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-
         producer->setGenerationNumber(generation);
     }
 
@@ -587,9 +577,9 @@
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
 
-    class DummyConsumerListener : public BnConsumerListener {
+    class DummyConsumerListener : public IConsumerListener {
       public:
-        DummyConsumerListener() : BnConsumerListener() {}
+        DummyConsumerListener() : IConsumerListener() {}
         void onFrameAvailable(const BufferItem&) override {}
         void onBuffersReleased() override {}
         void onSidebandStreamChanged() override {}
@@ -734,7 +724,8 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
           strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
-          strcasestr(mMime.c_str(), "mpeg2") || strcasestr(mMime.c_str(), "av01"))) {
+          strcasestr(mMime.c_str(), "mpeg2") || strcasestr(mMime.c_str(), "av01") ||
+          strcasestr(mMime.c_str(), "apv"))) {
         return;
     }
 
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
index a1049df..ffeeade 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
@@ -59,6 +59,10 @@
         <option name="push-file" key="bbb_vp9_640x360_1600kbps_30fps_chksm.md5" value="/data/local/tmp/media/bbb_vp9_640x360_1600kbps_30fps_chksm.md5" />
         <option name="push-file" key="bbb_av1_640_360_chksum.md5" value="/data/local/tmp/media/bbb_av1_640_360_chksum.md5" />
         <option name="push-file" key="bbb_av1_176_144_chksm.md5" value="/data/local/tmp/media/bbb_av1_176_144_chksm.md5" />
+        <option name="push-file" key="trim_pattern_640x480_30fps_16mbps_apv_10bit.apv" value="/data/local/tmp/media/trim_pattern_640x480_30fps_16mbps_apv_10bit.apv" />
+        <option name="push-file" key="trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv" value="/data/local/tmp/media/trim_pattern_1280x720_30fps_30mbps_apv_10bit.apv" />
+        <option name="push-file" key="trim_pattern_640x480_30fps_16mbps_apv_10bit.info" value="/data/local/tmp/media/trim_pattern_640x480_30fps_16mbps_apv_10bit.info" />
+        <option name="push-file" key="trim_pattern_1280x720_30fps_30mbps_apv_10bit.info" value="/data/local/tmp/media/trim_pattern_1280x720_30fps_30mbps_apv_10bit.info" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index cc5d10c..489857d 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -30,6 +30,7 @@
         "CCodecBufferChannel.cpp",
         "CCodecBuffers.cpp",
         "CCodecConfig.cpp",
+        "CCodecResources.cpp",
         "Codec2Buffer.cpp",
         "Codec2InfoBuilder.cpp",
         "FrameReassembler.cpp",
@@ -54,6 +55,7 @@
         "libSurfaceFlingerProperties",
         "aconfig_mediacodec_flags_c_lib",
         "android.media.codec-aconfig-cc",
+        "android.media.tv.flags-aconfig-cc",
     ],
 
     shared_libs: [
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 72b5a61..99f0f53 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "CCodec"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <sstream>
 #include <thread>
@@ -54,6 +56,7 @@
 #include <media/stagefright/omx/OmxGraphicBufferSource.h>
 #include <media/stagefright/CCodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/CCodecResources.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecMetricsConstants.h>
 #include <media/stagefright/PersistentSurface.h>
@@ -882,6 +885,7 @@
             const std::weak_ptr<Codec2Client::Component>& component,
             std::list<std::unique_ptr<C2Work>>& workItems) override {
         (void)component;
+        ScopedTrace trace(ATRACE_TAG, "CCodec::ClientListener-WorkDone");
         sp<CCodec> codec(mCodec.promote());
         if (!codec) {
             return;
@@ -1039,11 +1043,12 @@
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
         return;
     }
-    ALOGD("allocate(%s)", codecInfo->getCodecName());
     mClientListener.reset(new ClientListener(this));
 
     AString componentName = codecInfo->getCodecName();
+    AString halName = codecInfo->getHalName();
     std::shared_ptr<Codec2Client> client;
+    ALOGD("allocate(%s)", componentName.c_str());
 
     // set up preferred component store to access vendor store parameters
     client = Codec2Client::CreateFromService("default");
@@ -1055,12 +1060,12 @@
 
     std::shared_ptr<Codec2Client::Component> comp;
     c2_status_t status = Codec2Client::CreateComponentByName(
-            componentName.c_str(),
+            halName.c_str(),
             mClientListener,
             &comp,
             &client);
     if (status != C2_OK) {
-        ALOGE("Failed Create component: %s, error=%d", componentName.c_str(), status);
+        ALOGE("Failed Create component: %s, error=%d", halName.c_str(), status);
         Mutexed<State>::Locked state(mState);
         state->set(RELEASED);
         state.unlock();
@@ -1068,7 +1073,7 @@
         state.lock();
         return;
     }
-    ALOGI("Created component [%s]", componentName.c_str());
+    ALOGI("Created component [%s] for [%s]", halName.c_str(), componentName.c_str());
     mChannel->setComponent(comp);
     auto setAllocated = [this, comp, client] {
         Mutexed<State>::Locked state(mState);
@@ -1095,6 +1100,11 @@
     }
     config->queryConfiguration(comp);
 
+    if (android::media::codec::codec_availability_support()) {
+        std::string storeName = mClient->getServiceName();
+        mCodecResources = std::make_unique<CCodecResources>(storeName);
+    }
+
     mCallback->onComponentAllocated(componentName.c_str());
 }
 
@@ -1861,6 +1871,11 @@
     mMetrics = new AMessage;
     mChannel->resetBuffersPixelFormat((config->mDomain & Config::IS_ENCODER) ? true : false);
 
+    // Query required system resources for the current configuration
+    if (mCodecResources) {
+        // TODO: Should we fail the configuration if this query fails?
+        mCodecResources->queryRequiredResources(comp);
+    }
     mCallback->onComponentConfigured(config->mInputFormat, config->mOutputFormat);
 }
 
@@ -2773,6 +2788,13 @@
     return config->unsubscribeFromVendorConfigUpdate(comp, names);
 }
 
+std::vector<InstanceResourceInfo> CCodec::getRequiredSystemResources() {
+    if (mCodecResources) {
+        return mCodecResources->getRequiredResources();
+    }
+    return std::vector<InstanceResourceInfo>{};
+}
+
 void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
         Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -2860,6 +2882,7 @@
             break;
         }
         case kWhatWorkDone: {
+            ScopedTrace trace(ATRACE_TAG, "CCodec::msg-onWorkDone");
             std::unique_ptr<C2Work> work;
             bool shouldPost = false;
             {
@@ -2894,6 +2917,21 @@
                             : work->worklets.front()->output.configUpdate) {
                         updates.push_back(C2Param::Copy(*param));
                     }
+                    // Check for change in resources required.
+                    if (!updates.empty() && android::media::codec::codec_availability_support()) {
+                        for (const std::unique_ptr<C2Param>& param : updates) {
+                            if (param->index() == C2ResourcesNeededTuning::PARAM_TYPE) {
+                                // Update the required resources.
+                                if (mCodecResources) {
+                                    mCodecResources->updateRequiredResources(
+                                            C2ResourcesNeededTuning::From(param.get()));
+                                }
+                                // Report to MediaCodec
+                                mCallback->onRequiredResourcesChanged();
+                                break;
+                            }
+                        }
+                    }
                     unsigned stream = 0;
                     std::vector<std::shared_ptr<C2Buffer>> &outputBuffers =
                         work->worklets.front()->output.buffers;
@@ -3519,4 +3557,13 @@
     return block;
 }
 
+//static
+std::vector<GlobalResourceInfo> CCodec::GetGloballyAvailableResources() {
+    if (android::media::codec::codec_availability_support()) {
+        return CCodecResources::GetGloballyAvailableResources();
+    }
+
+    return std::vector<GlobalResourceInfo>{};
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 81f68cd..7a576bb 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -29,6 +29,7 @@
 #include <chrono>
 
 #include <android_media_codec.h>
+#include <android_media_tv_flags.h>
 
 #include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
@@ -61,6 +62,7 @@
 #include <mediadrm/ICrypto.h>
 #include <server_configurable_flags/get_flags.h>
 #include <system/window.h>
+#include <ui/PictureProfileHandle.h>
 
 #include "CCodecBufferChannel.h"
 #include "Codec2Buffer.h"
@@ -649,7 +651,7 @@
             return -ENOSYS;
         }
         // we are dealing with just one cryptoInfo or descrambler.
-        std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+        std::unique_ptr<CodecCryptoInfo> &info = cryptoInfos->value[0];
         if (info == nullptr) {
             ALOGE("Cannot decrypt, CryptoInfos are null.");
             return -ENOSYS;
@@ -698,7 +700,7 @@
             mDecryptDestination, mHeapSeqNum, &dst.nonsecureMemory);
     for (int i = 0; i < bufferInfos->value.size(); i++) {
         if (bufferInfos->value[i].mSize > 0) {
-            std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+            std::unique_ptr<CodecCryptoInfo> &info = cryptoInfos->value[cryptoInfoIdx++];
             src.offset = srcOffset;
             src.size = bufferInfos->value[i].mSize;
             result = mCrypto->decrypt(
@@ -911,129 +913,130 @@
     size_t bufferSize = 0;
     c2_status_t blockRes = C2_OK;
     bool copied = false;
-    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
-            "CCodecBufferChannel::decrypt(%s)", mName).c_str());
-    if (mSendEncryptedInfoBuffer) {
-        static const C2MemoryUsage kDefaultReadWriteUsage{
-            C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
-        constexpr int kAllocGranule0 = 1024 * 64;
-        constexpr int kAllocGranule1 = 1024 * 1024;
-        std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
-        // round up encrypted sizes to limit fragmentation and encourage buffer reuse
-        if (allocSize <= kAllocGranule1) {
-            bufferSize = align(allocSize, kAllocGranule0);
-        } else {
-            bufferSize = align(allocSize, kAllocGranule1);
-        }
-        blockRes = pool->fetchLinearBlock(
-                bufferSize, kDefaultReadWriteUsage, &block);
+    {
+        ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+                "CCodecBufferChannel::decrypt(%s)", mName).c_str());
+        if (mSendEncryptedInfoBuffer) {
+            static const C2MemoryUsage kDefaultReadWriteUsage{
+                C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+            constexpr int kAllocGranule0 = 1024 * 64;
+            constexpr int kAllocGranule1 = 1024 * 1024;
+            std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+            // round up encrypted sizes to limit fragmentation and encourage buffer reuse
+            if (allocSize <= kAllocGranule1) {
+                bufferSize = align(allocSize, kAllocGranule0);
+            } else {
+                bufferSize = align(allocSize, kAllocGranule1);
+            }
+            blockRes = pool->fetchLinearBlock(
+                    bufferSize, kDefaultReadWriteUsage, &block);
 
-        if (blockRes == C2_OK) {
-            C2WriteView view = block->map().get();
-            if (view.error() == C2_OK && view.size() == bufferSize) {
-                copied = true;
-                // TODO: only copy clear sections
-                memcpy(view.data(), buffer->data(), allocSize);
+            if (blockRes == C2_OK) {
+                C2WriteView view = block->map().get();
+                if (view.error() == C2_OK && view.size() == bufferSize) {
+                    copied = true;
+                    // TODO: only copy clear sections
+                    memcpy(view.data(), buffer->data(), allocSize);
+                }
             }
         }
-    }
 
-    if (!copied) {
-        block.reset();
-    }
+        if (!copied) {
+            block.reset();
+        }
 
-    ssize_t result = -1;
-    ssize_t codecDataOffset = 0;
-    if (numSubSamples == 1
-            && subSamples[0].mNumBytesOfClearData == 0
-            && subSamples[0].mNumBytesOfEncryptedData == 0) {
-        // We don't need to go through crypto or descrambler if the input is empty.
-        result = 0;
-    } else if (mCrypto != nullptr) {
-        hardware::drm::V1_0::DestinationBuffer destination;
-        if (secure) {
-            destination.type = DrmBufferType::NATIVE_HANDLE;
-            destination.secureMemory = hidl_handle(encryptedBuffer->handle());
+        ssize_t result = -1;
+        ssize_t codecDataOffset = 0;
+        if (numSubSamples == 1
+                && subSamples[0].mNumBytesOfClearData == 0
+                && subSamples[0].mNumBytesOfEncryptedData == 0) {
+            // We don't need to go through crypto or descrambler if the input is empty.
+            result = 0;
+        } else if (mCrypto != nullptr) {
+            hardware::drm::V1_0::DestinationBuffer destination;
+            if (secure) {
+                destination.type = DrmBufferType::NATIVE_HANDLE;
+                destination.secureMemory = hidl_handle(encryptedBuffer->handle());
+            } else {
+                destination.type = DrmBufferType::SHARED_MEMORY;
+                IMemoryToSharedBuffer(
+                        mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
+            }
+            hardware::drm::V1_0::SharedBuffer source;
+            encryptedBuffer->fillSourceBuffer(&source);
+            result = mCrypto->decrypt(
+                    key, iv, mode, pattern, source, buffer->offset(),
+                    subSamples, numSubSamples, destination, errorDetailMsg);
+            if (result < 0) {
+                ALOGI("[%s] decrypt failed: result=%zd", mName, result);
+                return result;
+            }
+            if (destination.type == DrmBufferType::SHARED_MEMORY) {
+                encryptedBuffer->copyDecryptedContent(mDecryptDestination, result);
+            }
         } else {
-            destination.type = DrmBufferType::SHARED_MEMORY;
-            IMemoryToSharedBuffer(
-                    mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
-        }
-        hardware::drm::V1_0::SharedBuffer source;
-        encryptedBuffer->fillSourceBuffer(&source);
-        result = mCrypto->decrypt(
-                key, iv, mode, pattern, source, buffer->offset(),
-                subSamples, numSubSamples, destination, errorDetailMsg);
-        if (result < 0) {
-            ALOGI("[%s] decrypt failed: result=%zd", mName, result);
-            return result;
-        }
-        if (destination.type == DrmBufferType::SHARED_MEMORY) {
-            encryptedBuffer->copyDecryptedContent(mDecryptDestination, result);
-        }
-    } else {
-        // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
-        // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
-        hidl_vec<SubSample> hidlSubSamples;
-        hidlSubSamples.setToExternal((SubSample *)subSamples, numSubSamples, false /*own*/);
+            // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
+            // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
+            hidl_vec<SubSample> hidlSubSamples;
+            hidlSubSamples.setToExternal((SubSample *)subSamples, numSubSamples, false /*own*/);
 
-        hardware::cas::native::V1_0::SharedBuffer srcBuffer;
-        encryptedBuffer->fillSourceBuffer(&srcBuffer);
+            hardware::cas::native::V1_0::SharedBuffer srcBuffer;
+            encryptedBuffer->fillSourceBuffer(&srcBuffer);
 
-        DestinationBuffer dstBuffer;
-        if (secure) {
-            dstBuffer.type = BufferType::NATIVE_HANDLE;
-            dstBuffer.secureMemory = hidl_handle(encryptedBuffer->handle());
-        } else {
-            dstBuffer.type = BufferType::SHARED_MEMORY;
-            dstBuffer.nonsecureMemory = srcBuffer;
+            DestinationBuffer dstBuffer;
+            if (secure) {
+                dstBuffer.type = BufferType::NATIVE_HANDLE;
+                dstBuffer.secureMemory = hidl_handle(encryptedBuffer->handle());
+            } else {
+                dstBuffer.type = BufferType::SHARED_MEMORY;
+                dstBuffer.nonsecureMemory = srcBuffer;
+            }
+
+            CasStatus status = CasStatus::OK;
+            hidl_string detailedError;
+            ScramblingControl sctrl = ScramblingControl::UNSCRAMBLED;
+
+            if (key != nullptr) {
+                sctrl = (ScramblingControl)key[0];
+                // Adjust for the PES offset
+                codecDataOffset = key[2] | (key[3] << 8);
+            }
+
+            auto returnVoid = mDescrambler->descramble(
+                    sctrl,
+                    hidlSubSamples,
+                    srcBuffer,
+                    0,
+                    dstBuffer,
+                    0,
+                    [&status, &result, &detailedError] (
+                            CasStatus _status, uint32_t _bytesWritten,
+                            const hidl_string& _detailedError) {
+                        status = _status;
+                        result = (ssize_t)_bytesWritten;
+                        detailedError = _detailedError;
+                    });
+
+            if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
+                ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
+                        mName, returnVoid.description().c_str(), status, result);
+                return UNKNOWN_ERROR;
+            }
+
+            if (result < codecDataOffset) {
+                ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+                return BAD_VALUE;
+            }
+
+            ALOGV("[%s] descramble succeeded, %zd bytes", mName, result);
+
+            if (dstBuffer.type == BufferType::SHARED_MEMORY) {
+                encryptedBuffer->copyDecryptedContentFromMemory(result);
+            }
         }
 
-        CasStatus status = CasStatus::OK;
-        hidl_string detailedError;
-        ScramblingControl sctrl = ScramblingControl::UNSCRAMBLED;
-
-        if (key != nullptr) {
-            sctrl = (ScramblingControl)key[0];
-            // Adjust for the PES offset
-            codecDataOffset = key[2] | (key[3] << 8);
-        }
-
-        auto returnVoid = mDescrambler->descramble(
-                sctrl,
-                hidlSubSamples,
-                srcBuffer,
-                0,
-                dstBuffer,
-                0,
-                [&status, &result, &detailedError] (
-                        CasStatus _status, uint32_t _bytesWritten,
-                        const hidl_string& _detailedError) {
-                    status = _status;
-                    result = (ssize_t)_bytesWritten;
-                    detailedError = _detailedError;
-                });
-
-        if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
-            ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
-                    mName, returnVoid.description().c_str(), status, result);
-            return UNKNOWN_ERROR;
-        }
-
-        if (result < codecDataOffset) {
-            ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
-            return BAD_VALUE;
-        }
-
-        ALOGV("[%s] descramble succeeded, %zd bytes", mName, result);
-
-        if (dstBuffer.type == BufferType::SHARED_MEMORY) {
-            encryptedBuffer->copyDecryptedContentFromMemory(result);
-        }
+        buffer->setRange(codecDataOffset, result - codecDataOffset);
     }
-
-    buffer->setRange(codecDataOffset, result - codecDataOffset);
-
     return queueInputBufferInternal(buffer, block, bufferSize);
 }
 
@@ -1238,6 +1241,13 @@
 
 status_t CCodecBufferChannel::renderOutputBuffer(
         const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
+    std::string traceStr;
+    if (ATRACE_ENABLED()) {
+        traceStr = android::base::StringPrintf(
+                "CCodecBufferChannel::renderOutputBuffer-%s", mName);
+    }
+    ScopedTrace trace(ATRACE_TAG, traceStr.c_str());
+
     ALOGV("[%s] renderOutputBuffer: %p", mName, buffer.get());
     std::shared_ptr<C2Buffer> c2Buffer;
     bool released = false;
@@ -1456,6 +1466,14 @@
 
     qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
     qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
+
+    int64_t pictureProfileHandle;
+    if (android::media::tv::flags::apply_picture_profiles() &&
+                buffer->format()->findInt64(KEY_PICTURE_PROFILE_HANDLE, &pictureProfileHandle)) {
+        PictureProfileHandle handle(static_cast<PictureProfileId>(pictureProfileHandle));
+        qbi.setPictureProfileHandle(handle);
+    }
+
     IGraphicBufferProducer::QueueBufferOutput qbo;
     status_t result = std::atomic_load(&mComponent)->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
@@ -2363,6 +2381,12 @@
         const sp<AMessage> &inputFormat,
         const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
+    std::string traceStr;
+    if (ATRACE_ENABLED()) {
+        traceStr = android::base::StringPrintf(
+                "CCodecBufferChannel::onWorkDone-%s", mName).c_str();
+    }
+    ScopedTrace trace(ATRACE_TAG, traceStr.c_str());
     if (handleWork(std::move(work), inputFormat, outputFormat, initData)) {
         feedInputBufferIfAvailable();
     }
@@ -2396,6 +2420,12 @@
         const sp<AMessage> &inputFormat,
         const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
+    std::string traceStr;
+    if (ATRACE_ENABLED()) {
+        traceStr = android::base::StringPrintf(
+                "CCodecBufferChannel::handleWork-%s", mName).c_str();
+    }
+    ScopedTrace atrace(ATRACE_TAG, traceStr.c_str());
     {
         Mutexed<Output>::Locked output(mOutput);
         if (!output->buffers) {
@@ -2752,6 +2782,12 @@
 }
 
 void CCodecBufferChannel::sendOutputBuffers() {
+    std::string traceStr;
+    if (ATRACE_ENABLED()) {
+        traceStr = android::base::StringPrintf(
+                "CCodecBufferChannel::sendOutputBuffers-%s", mName);
+    }
+    ScopedTrace trace(ATRACE_TAG, traceStr.c_str());
     OutputBuffers::BufferAction action;
     size_t index;
     sp<MediaCodecBuffer> outBuffer;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 3eec0f3..2049c78 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "CCodecBuffers"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <numeric>
 
@@ -984,6 +986,7 @@
 // LinearInputBuffers
 
 bool LinearInputBuffers::requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) {
+    ScopedTrace trace(ATRACE_TAG, "CCodec::LinearInputBuffers::requestNewBuffer");
     sp<Codec2Buffer> newBuffer = createNewBuffer();
     if (newBuffer == nullptr) {
         return false;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 897a696..119658a 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -20,6 +20,7 @@
 #include <initializer_list>
 
 #include <android_media_codec.h>
+#include <android_media_tv_flags.h>
 
 #include <cutils/properties.h>
 #include <log/log.h>
@@ -1047,6 +1048,11 @@
             return C2Value();
         }));
 
+    if (android::media::tv::flags::apply_picture_profiles()) {
+        add(ConfigMapper(KEY_PICTURE_PROFILE_HANDLE, C2_PARAMKEY_DISPLAY_PROCESSING_TOKEN, "value")
+                    .limitTo(D::VIDEO & D::RAW & D::DECODER));
+    }
+
     /* still to do
        not yet used by MediaCodec, but defined as MediaFormat
     KEY_AUDIO_SESSION_ID // we use "audio-hw-sync"
diff --git a/media/codec2/sfplugin/CCodecResources.cpp b/media/codec2/sfplugin/CCodecResources.cpp
new file mode 100644
index 0000000..9934049
--- /dev/null
+++ b/media/codec2/sfplugin/CCodecResources.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <C2Config.h>
+#include <media/stagefright/CCodecResources.h>
+
+namespace android {
+
+// Construct the name based on the component store name
+// and the id of the resource.
+// resource name = "componentStoreName-id"
+static inline std::string getResourceName(const std::string& componentStoreName, uint32_t id) {
+    return componentStoreName + "-" + std::to_string(id);
+}
+
+static
+c2_status_t queryGlobalResources(const std::shared_ptr<Codec2Client>& client,
+                                 std::vector<GlobalResourceInfo>& systemAvailableResources) {
+    std::vector<std::unique_ptr<C2Param>> heapParams;
+    c2_status_t c2err = client->query(
+            {},
+            {C2ResourcesCapacityTuning::PARAM_TYPE, C2ResourcesExcludedTuning::PARAM_TYPE},
+            C2_MAY_BLOCK,
+            &heapParams);
+
+    if (c2err == C2_OK || heapParams.size() == 2u) {
+        // Construct Globally available resources now.
+        // Get the total capacity first.
+        std::string storeName = client->getServiceName();
+        const C2ResourcesCapacityTuning* systemCapacity =
+                C2ResourcesCapacityTuning::From(heapParams[0].get());
+        if (systemCapacity && *systemCapacity) {
+            for (size_t i = 0; i < systemCapacity->flexCount(); ++i) {
+                const C2SystemResourceStruct& resource =
+                    systemCapacity->m.values[i];
+                std::string name = getResourceName(storeName, resource.id);
+                uint64_t capacity = (resource.kind == CONST) ? resource.amount : 0;
+                systemAvailableResources.push_back({name, capacity, capacity});
+            }
+        } else {
+            ALOGW("Failed to get C2ResourcesCapacityTuning");
+        }
+
+        // Get the excluded resource info.
+        // The available resource should exclude this, if there are any.
+        const C2ResourcesExcludedTuning* systemExcluded =
+                C2ResourcesExcludedTuning::From( heapParams[1].get());
+        if (systemExcluded && *systemExcluded) {
+            for (size_t i = 0; i < systemExcluded->flexCount(); ++i) {
+                const C2SystemResourceStruct& resource =
+                    systemExcluded->m.values[i];
+                std::string name = getResourceName(storeName, resource.id);
+                uint64_t excluded = (resource.kind == CONST) ? resource.amount : 0;
+                auto found = std::find_if(systemAvailableResources.begin(),
+                                          systemAvailableResources.end(),
+                                          [name](const GlobalResourceInfo& item) {
+                                              return item.mName == name; });
+
+                if (found != systemAvailableResources.end()) {
+                    // Take off excluded resources from available resources.
+                    if (found->mAvailable >= excluded) {
+                        found->mAvailable -= excluded;
+                    } else {
+                        ALOGW("Excluded resources(%jd) can't be more than Available resources(%jd)",
+                              excluded, found->mAvailable);
+                        found->mAvailable = 0;
+                    }
+                } else {
+                    ALOGW("Failed to find the resource [%s]", name.c_str());
+                }
+            }
+        } else {
+            ALOGW("Failed to get C2ResourcesExcludedTuning");
+        }
+
+    } else {
+        ALOGW("Failed to query component store for system resources: %d", c2err);
+    }
+
+    return c2err;
+}
+
+/**
+ * A utility function that converts C2ResourcesNeededTuning into
+ * a vector of InstanceResourceInfo.
+ *
+ * Right now, this function is at its simplest form looking into
+ * mapping constant and per frame resource kinds,
+ * but we need to extend this to address:
+ *  - Construct the name for each resources
+ *    (using the resource id and component store)
+ *  - Devise a unified way of presenting per frame, per input/output block
+ *    resource requirements.
+ */
+static status_t getSystemResource(const C2ResourcesNeededTuning* systemResourcesInfo,
+                                  const std::string& storeName,
+                                  std::vector<InstanceResourceInfo>& resources) {
+    resources.clear();
+    if (systemResourcesInfo && *systemResourcesInfo) {
+        for (size_t i = 0; i < systemResourcesInfo->flexCount(); ++i) {
+            const C2SystemResourceStruct& resource =
+                systemResourcesInfo->m.values[i];
+            uint64_t staticCount = 0;
+            uint64_t perFrameCount = 0;
+            std::string name = getResourceName(storeName, resource.id);
+
+            switch (resource.kind) {
+            case CONST:
+                staticCount = resource.amount;
+                break;
+            case PER_FRAME:
+                perFrameCount = resource.amount;
+                break;
+            case PER_INPUT_BLOCK:
+            case PER_OUTPUT_BLOCK:
+                // TODO: Find a way to pass this info through InstanceResourceInfo
+                // For now, we are using this as per frame count.
+                perFrameCount = resource.amount;
+                break;
+            }
+            resources.push_back({name, staticCount, perFrameCount});
+        }
+
+        return OK;
+    }
+
+    return UNKNOWN_ERROR;
+}
+
+//static
+std::vector<GlobalResourceInfo> CCodecResources::GetGloballyAvailableResources() {
+    // Try creating client from "default" service:
+    std::shared_ptr<Codec2Client> client = Codec2Client::CreateFromService("default");
+    if (client) {
+        // Query the system resource capacity from the component store.
+        std::vector<GlobalResourceInfo> systemAvailableResources;
+        c2_status_t status = queryGlobalResources(client, systemAvailableResources);
+        if (status == C2_OK) {
+            return systemAvailableResources;
+        }
+    } else {
+        ALOGW("Failed to create client from default component store!");
+    }
+
+    return std::vector<GlobalResourceInfo>{};
+}
+
+CCodecResources::CCodecResources(const std::string& storeName) : mStoreName(storeName) {}
+
+status_t CCodecResources::queryRequiredResources(
+        const std::shared_ptr<Codec2Client::Component>& comp) {
+    // Query required/needed system resources for the current configuration.
+    std::vector<std::unique_ptr<C2Param>> heapParams;
+    c2_status_t c2err = comp->query(
+            {},
+            {C2ResourcesNeededTuning::PARAM_TYPE},
+            C2_MAY_BLOCK,
+            &heapParams);
+    if (c2err != C2_OK || heapParams.size() != 1u) {
+        ALOGE("Failed to query component interface for required system resources: %d", c2err);
+        return UNKNOWN_ERROR;
+    }
+
+    // Construct Required System Resources.
+    Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(mResources);
+    std::vector<InstanceResourceInfo>& resources = *resourcesLocked;
+    return getSystemResource(C2ResourcesNeededTuning::From(heapParams[0].get()),
+                             mStoreName,
+                             resources);
+}
+
+std::vector<InstanceResourceInfo> CCodecResources::getRequiredResources() {
+    Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(mResources);
+    return *resourcesLocked;
+}
+
+status_t CCodecResources::updateRequiredResources(
+        const C2ResourcesNeededTuning* systemResourcesInfo) {
+    // Update the required resources from the given systemResourcesInfo.
+    Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(mResources);
+    std::vector<InstanceResourceInfo>& resources = *resourcesLocked;
+    return getSystemResource(systemResourcesInfo, mStoreName, resources);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 7d4e8ab..c2fe172 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -22,8 +22,6 @@
 
 #include <android_media_codec.h>
 
-#include <aidl/android/hardware/graphics/common/Cta861_3.h>
-#include <aidl/android/hardware/graphics/common/Smpte2086.h>
 #include <android-base/no_destructor.h>
 #include <android-base/properties.h>
 #include <android/hardware/cas/native/1.0/types.h>
@@ -38,7 +36,6 @@
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <mediadrm/ICrypto.h>
 #include <nativebase/nativebase.h>
-#include <ui/GraphicBufferMapper.h>
 #include <ui/Fence.h>
 
 #include <C2AllocatorGralloc.h>
@@ -602,202 +599,4 @@
     mView.setOffset(0);
 }
 
-using ::aidl::android::hardware::graphics::common::Cta861_3;
-using ::aidl::android::hardware::graphics::common::Smpte2086;
-
-namespace {
-
-class GrallocBuffer {
-public:
-    GrallocBuffer(const C2Handle *const handle) : mBuffer(nullptr) {
-        GraphicBufferMapper& mapper = GraphicBufferMapper::get();
-
-        // Unwrap raw buffer handle from the C2Handle
-        native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
-        if (!nh) {
-            ALOGE("handle is not compatible to any gralloc C2Handle types");
-            return;
-        }
-        // Import the raw handle so IMapper can use the buffer. The imported
-        // handle must be freed when the client is done with the buffer.
-        status_t status = mapper.importBufferNoValidate(
-                nh,
-                &mBuffer);
-
-        if (status != OK) {
-            ALOGE("Failed to import buffer. Status: %d.", status);
-            return;
-        }
-
-        // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
-        //         does not clone the fds. Thus we need to delete the handle
-        //         without closing it.
-        native_handle_delete(nh);
-    }
-
-    ~GrallocBuffer() {
-        GraphicBufferMapper& mapper = GraphicBufferMapper::get();
-        if (mBuffer) {
-            // Free the imported buffer handle. This does not release the
-            // underlying buffer itself.
-            mapper.freeBuffer(mBuffer);
-        }
-    }
-
-    buffer_handle_t get() const { return mBuffer; }
-    operator bool() const { return (mBuffer != nullptr); }
-private:
-    buffer_handle_t mBuffer;
-};
-
-}  // namspace
-
-c2_status_t GetHdrMetadataFromGralloc4Handle(
-        const C2Handle *const handle,
-        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
-        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
-    c2_status_t err = C2_OK;
-    GraphicBufferMapper& mapper = GraphicBufferMapper::get();
-    GrallocBuffer buffer(handle);
-    if (!buffer) {
-        // Gralloc4 not supported; nothing to do
-        return err;
-    }
-    if (staticInfo) {
-        ALOGV("Grabbing static HDR info from gralloc metadata");
-        staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
-        memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
-        (*staticInfo)->maxCll = 0;
-        (*staticInfo)->maxFall = 0;
-
-        std::optional<Smpte2086> smpte2086;
-        status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
-        if (status != OK || !smpte2086) {
-            err = C2_CORRUPTED;
-        } else {
-            if (smpte2086) {
-                  (*staticInfo)->mastering.red.x    = smpte2086->primaryRed.x;
-                  (*staticInfo)->mastering.red.y    = smpte2086->primaryRed.y;
-                  (*staticInfo)->mastering.green.x  = smpte2086->primaryGreen.x;
-                  (*staticInfo)->mastering.green.y  = smpte2086->primaryGreen.y;
-                  (*staticInfo)->mastering.blue.x   = smpte2086->primaryBlue.x;
-                  (*staticInfo)->mastering.blue.y   = smpte2086->primaryBlue.y;
-                  (*staticInfo)->mastering.white.x  = smpte2086->whitePoint.x;
-                  (*staticInfo)->mastering.white.y  = smpte2086->whitePoint.y;
-
-                  (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
-                  (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
-            }
-        }
-
-        std::optional<Cta861_3> cta861_3;
-        status = mapper.getCta861_3(buffer.get(), &cta861_3);
-        if (status != OK || !cta861_3) {
-            err = C2_CORRUPTED;
-        } else {
-            if (cta861_3) {
-                  (*staticInfo)->maxCll   = cta861_3->maxContentLightLevel;
-                  (*staticInfo)->maxFall  = cta861_3->maxFrameAverageLightLevel;
-            }
-        }
-    }
-
-    if (err != C2_OK) {
-        staticInfo->reset();
-    }
-
-    if (dynamicInfo) {
-        ALOGV("Grabbing dynamic HDR info from gralloc metadata");
-        dynamicInfo->reset();
-        std::optional<std::vector<uint8_t>> vec;
-        status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
-        if (status != OK || !vec) {
-            dynamicInfo->reset();
-            err = C2_CORRUPTED;
-        } else {
-            if (vec) {
-                *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
-                      vec->size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
-                memcpy((*dynamicInfo)->m.data, vec->data(), vec->size());
-            }
-        }
-    }
-
-    return err;
-}
-
-c2_status_t SetMetadataToGralloc4Handle(
-        android_dataspace_t dataSpace,
-        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
-        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
-        const C2Handle *const handle) {
-    c2_status_t err = C2_OK;
-    GraphicBufferMapper& mapper = GraphicBufferMapper::get();
-    GrallocBuffer buffer(handle);
-    if (!buffer) {
-        // Gralloc4 not supported; nothing to do
-        return err;
-    }
-    // Use V0 dataspaces for Gralloc4+
-    if (android::media::codec::provider_->dataspace_v0_partial()) {
-        ColorUtils::convertDataSpaceToV0(dataSpace);
-    }
-    status_t status = mapper.setDataspace(buffer.get(), static_cast<ui::Dataspace>(dataSpace));
-    if (status != OK) {
-       err = C2_CORRUPTED;
-    }
-    if (staticInfo && *staticInfo) {
-        ALOGV("Setting static HDR info as gralloc metadata");
-        std::optional<Smpte2086> smpte2086 = Smpte2086{
-            {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
-            {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
-            {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
-            {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
-            staticInfo->mastering.maxLuminance,
-            staticInfo->mastering.minLuminance,
-        };
-        if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
-                && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
-                && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
-                && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
-                && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
-                && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
-                && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
-                && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
-                && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance) {
-            status = mapper.setSmpte2086(buffer.get(), smpte2086);
-            if (status != OK) {
-                err = C2_CORRUPTED;
-            }
-        }
-        std::optional<Cta861_3> cta861_3 = Cta861_3{
-            staticInfo->maxCll,
-            staticInfo->maxFall,
-        };
-        if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel) {
-            status = mapper.setCta861_3(buffer.get(), cta861_3);
-            if (status != OK) {
-                err = C2_CORRUPTED;
-            }
-        }
-    }
-    if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
-        ALOGV("Setting dynamic HDR info as gralloc metadata");
-        if (dynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
-            std::optional<std::vector<uint8_t>> smpte2094_40 = std::vector<uint8_t>();
-            smpte2094_40->resize(dynamicInfo->flexCount());
-            memcpy(smpte2094_40->data(), dynamicInfo->m.data, dynamicInfo->flexCount());
-
-            status = mapper.setSmpte2094_40(buffer.get(), smpte2094_40);
-            if (status != OK) {
-                err = C2_CORRUPTED;
-            }
-        } else {
-            err = C2_BAD_VALUE;
-        }
-    }
-
-    return err;
-}
-
 }  // namespace android
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 8c5e909..bdd4bd0 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -19,7 +19,6 @@
 #define CODEC2_BUFFER_H_
 
 #include <C2Buffer.h>
-#include <C2Config.h>
 
 #include <binder/IMemory.h>
 #include <media/hardware/VideoAPI.h>
@@ -381,38 +380,6 @@
     int32_t mHeapSeqNum;
 };
 
-/**
- * Get HDR metadata from Gralloc4 handle.
- *
- * \param[in]   handle      handle of the allocation
- * \param[out]  staticInfo  HDR static info to be filled. Ignored if null;
- *                          if |handle| is invalid or does not contain the metadata,
- *                          the shared_ptr is reset.
- * \param[out]  dynamicInfo HDR dynamic info to be filled. Ignored if null;
- *                          if |handle| is invalid or does not contain the metadata,
- *                          the shared_ptr is reset.
- * \return C2_OK if successful
- */
-c2_status_t GetHdrMetadataFromGralloc4Handle(
-        const C2Handle *const handle,
-        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
-        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
-
-/**
- * Set metadata to Gralloc4 handle.
- *
- * \param[in]   dataSpace   Dataspace to set.
- * \param[in]   staticInfo  HDR static info to set. Ignored if null or invalid.
- * \param[in]   dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
- * \param[out]  handle      handle of the allocation.
- * \return C2_OK if successful
- */
-c2_status_t SetMetadataToGralloc4Handle(
-        const android_dataspace_t dataSpace,
-        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
-        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
-        const C2Handle *const handle);
-
 }  // namespace android
 
 #endif  // CODEC2_BUFFER_H_
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index 2b1cf60..220366e 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -40,6 +40,7 @@
 namespace android {
 
 class CCodecBufferChannel;
+class CCodecResources;
 class InputSurfaceWrapper;
 struct CCodecConfig;
 struct MediaCodecInfo;
@@ -71,6 +72,8 @@
     virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
     virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
 
+    virtual std::vector<InstanceResourceInfo> getRequiredSystemResources() override;
+
     void initiateReleaseIfStuck();
     void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
     void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
@@ -93,6 +96,8 @@
             uint64_t usage,
             const std::vector<std::string> &names);
 
+    static std::vector<GlobalResourceInfo> GetGloballyAvailableResources();
+
 protected:
     virtual ~CCodec();
 
@@ -206,6 +211,7 @@
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
 
     sp<AMessage> mMetrics;
+    std::unique_ptr<CCodecResources> mCodecResources;
 
     friend class CCodecCallbackImpl;
 
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodecResources.h b/media/codec2/sfplugin/include/media/stagefright/CCodecResources.h
new file mode 100644
index 0000000..3e9dcc1
--- /dev/null
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodecResources.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CCODEC_RESOURCES_H_
+#define CCODEC_RESOURCES_H_
+
+#include <string>
+#include <vector>
+
+#include <C2Component.h>
+#include <codec2/hidl/client.h>
+
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/ResourceInfo.h>
+
+namespace android {
+
+class CCodecResources {
+public:
+    CCodecResources(const std::string& storeName);
+
+    /// Gets the globally available resources from the
+    /// default store.
+    static std::vector<GlobalResourceInfo> GetGloballyAvailableResources();
+
+    /// Queries the regurired resources for the given codec component.
+    status_t queryRequiredResources(
+            const std::shared_ptr<Codec2Client::Component>& comp);
+
+    /// Gets the required resources.
+    std::vector<InstanceResourceInfo> getRequiredResources();
+
+    /// Updates the required resources.
+    status_t updateRequiredResources(const C2ResourcesNeededTuning* systemResourcesInfo);
+
+private:
+    const std::string mStoreName;
+    Mutexed<std::vector<InstanceResourceInfo>> mResources;
+};
+
+} // namespace android
+
+#endif  // CCODEC_RESOURCES_H_
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 508bec2..e476d32 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -60,6 +60,7 @@
 
     CCodecConfigTest()
         : mReflector{std::make_shared<C2ReflectorHelper>()} {
+          initializeSystemResources();
     }
 
     void init(
@@ -68,7 +69,8 @@
             const char *mediaType) {
         sp<hardware::media::c2::V1_0::utils::CachedConfigurable> cachedConfigurable =
             new hardware::media::c2::V1_0::utils::CachedConfigurable(
-                    std::make_unique<Configurable>(mReflector, domain, kind, mediaType));
+                    std::make_unique<Configurable>(mReflector, domain, kind, mediaType,
+                                                   mSystemResources, mExcludedResources));
         cachedConfigurable->init(std::make_shared<Cache>());
         mConfigurable = std::make_shared<Codec2Client::Configurable>(cachedConfigurable);
     }
@@ -85,9 +87,11 @@
                 const std::shared_ptr<C2ReflectorHelper> &reflector,
                 C2Component::domain_t domain,
                 C2Component::kind_t kind,
-                const char *mediaType)
+                const char *mediaType,
+                const std::vector<C2SystemResourceStruct>& systemResources,
+                const std::vector<C2SystemResourceStruct>& excludedResources)
             : ConfigurableC2Intf("name", 0u),
-              mImpl(reflector, domain, kind, mediaType) {
+              mImpl(reflector, domain, kind, mediaType, systemResources, excludedResources) {
         }
 
         c2_status_t query(
@@ -121,7 +125,9 @@
             Impl(const std::shared_ptr<C2ReflectorHelper> &reflector,
                     C2Component::domain_t domain,
                     C2Component::kind_t kind,
-                    const char *mediaType)
+                    const char *mediaType,
+                    const std::vector<C2SystemResourceStruct>& systemResources,
+                    const std::vector<C2SystemResourceStruct>& excludedResources)
                 : C2InterfaceHelper{reflector} {
 
                 setDerivedInstance(this);
@@ -210,6 +216,32 @@
                         .withSetter(Setter<C2StreamPixelAspectRatioInfo::output>)
                         .build());
 
+                // Add System Resource Capacity
+                addParameter(
+                    DefineParam(mResourcesCapacity, C2_PARAMKEY_RESOURCES_CAPACITY)
+                    .withDefault(C2ResourcesCapacityTuning::AllocShared(
+                            systemResources.size(), systemResources))
+                    .withFields({
+                            C2F(mResourcesCapacity, m.values[0].id).any(),
+                            C2F(mResourcesCapacity, m.values[0].kind).any(),
+                            C2F(mResourcesCapacity, m.values[0].amount).any(),
+                    })
+                    .withSetter(Setter<C2ResourcesCapacityTuning>)
+                    .build());
+
+                // Add Excluded System Resources
+                addParameter(
+                    DefineParam(mResourcesExcluded, C2_PARAMKEY_RESOURCES_EXCLUDED)
+                    .withDefault(C2ResourcesExcludedTuning::AllocShared(
+                            excludedResources.size(), excludedResources))
+                    .withFields({
+                            C2F(mResourcesExcluded, m.values[0].id).any(),
+                            C2F(mResourcesExcluded, m.values[0].kind).any(),
+                            C2F(mResourcesExcluded, m.values[0].amount).any(),
+                    })
+                    .withSetter(Setter<C2ResourcesExcludedTuning>)
+                    .build());
+
                 if (isEncoder) {
                     addParameter(
                             DefineParam(mInputBitrate, C2_PARAMKEY_BITRATE)
@@ -273,6 +305,8 @@
             std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
             std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
             std::shared_ptr<C2StreamQpOffsetRects::output> mInputQpOffsetRects;
+            std::shared_ptr<C2ResourcesCapacityTuning> mResourcesCapacity;
+            std::shared_ptr<C2ResourcesExcludedTuning> mResourcesExcluded;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
@@ -292,6 +326,51 @@
     std::shared_ptr<C2ReflectorHelper> mReflector;
     std::shared_ptr<Codec2Client::Configurable> mConfigurable;
     CCodecConfig mConfig;
+
+    /*
+     * This test tracks two system resources:
+     *  - max instance limit, which is capped at 64
+     *  - max pixel count: up to 4 instances of 4K ==> 4 * 3840 * 2400
+     *
+     *  These 2 resource types are given 2 different ids as below.
+     */
+    void initializeSystemResources() {
+        // max instance limit 64
+        const uint32_t kMaxInstanceCount = 0x1000;
+        // max pixel count: up to 4 instances of 4K
+        const uint32_t kMaxPixelCount = 0x1001;
+        mSystemResources.push_back(C2SystemResourceStruct(kMaxInstanceCount, CONST, 64));
+        mSystemResources.push_back(C2SystemResourceStruct(kMaxPixelCount, CONST, 4 * 3840 * 2400));
+
+        // Nothing is excluded, but lets just add them with amount as 0.
+        mExcludedResources.push_back(C2SystemResourceStruct(kMaxInstanceCount, CONST, 0));
+        mExcludedResources.push_back(C2SystemResourceStruct(kMaxPixelCount, CONST, 0));
+    }
+
+    bool validateSystemResources(const std::vector<C2SystemResourceStruct>& resources) const {
+        if (resources.size() != mSystemResources.size()) {
+            return false;
+        }
+
+        for (const auto& resource : mSystemResources) {
+            auto found = std::find_if(resources.begin(),
+                                      resources.end(),
+                                      [resource](const C2SystemResourceStruct& item) {
+                                          return (item.id == resource.id &&
+                                                  item.kind == resource.kind &&
+                                                  item.amount == resource.amount); });
+
+            if (found == resources.end()) {
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+private:
+    std::vector<C2SystemResourceStruct> mSystemResources;
+    std::vector<C2SystemResourceStruct> mExcludedResources;
 };
 
 using D = CCodecConfig::Domain;
@@ -707,4 +786,94 @@
     }
 }
 
+static
+c2_status_t queryGlobalResources(std::shared_ptr<Codec2Client::Configurable>& configurable,
+                                 std::vector<C2SystemResourceStruct>& resources) {
+    std::vector<std::unique_ptr<C2Param>> heapParams;
+    c2_status_t c2err = configurable->query(
+            {},
+            {C2ResourcesCapacityTuning::PARAM_TYPE, C2ResourcesExcludedTuning::PARAM_TYPE},
+            C2_MAY_BLOCK, &heapParams);
+
+    if (c2err == C2_OK && heapParams.size() == 2u) {
+        // Construct Globally available resources now.
+        // Get the total capacity first.
+        const C2ResourcesCapacityTuning* systemCapacity =
+                C2ResourcesCapacityTuning::From(heapParams[0].get());
+        if (systemCapacity && *systemCapacity) {
+            for (size_t i = 0; i < systemCapacity->flexCount(); ++i) {
+                resources.push_back(systemCapacity->m.values[i]);
+                ALOGI("System Resource[%zu]{%u %d %jd}", i,
+                      systemCapacity->m.values[i].id,
+                      systemCapacity->m.values[i].kind,
+                      systemCapacity->m.values[i].amount);
+            }
+        } else {
+            ALOGE("Failed to get C2ResourcesCapacityTuning");
+            return C2_BAD_VALUE;
+        }
+
+        // Get the excluded resource info.
+        // The available resource should exclude this, if there are any.
+        const C2ResourcesExcludedTuning* systemExcluded =
+                C2ResourcesExcludedTuning::From(heapParams[1].get());
+        if (systemExcluded && *systemExcluded) {
+            for (size_t i = 0; i < systemExcluded->flexCount(); ++i) {
+                const C2SystemResourceStruct& resource =
+                    systemExcluded->m.values[i];
+                ALOGI("Excluded Resource[%zu]{%u %d %jd}", i,
+                      resource.id, resource.kind, resource.amount);
+                uint64_t excluded = (resource.kind == CONST) ? resource.amount : 0;
+                auto found = std::find_if(resources.begin(),
+                                          resources.end(),
+                                          [resource](const C2SystemResourceStruct& item) {
+                                              return item.id == resource.id; });
+
+                if (found != resources.end()) {
+                    // Take off excluded resources from available resources.
+                    if (found->amount >= excluded) {
+                        found->amount -= excluded;
+                    } else {
+                       ALOGE("Excluded resources(%jd) can't be more than Available resources(%jd)",
+                             excluded, found->amount);
+                       return C2_BAD_VALUE;
+                    }
+                } else {
+                    ALOGE("Failed to find the resource [%u]", resource.id);
+                    return C2_BAD_VALUE;
+                }
+            }
+        } else {
+            ALOGE("Failed to get C2ResourcesExcludedTuning");
+            return C2_BAD_VALUE;
+        }
+
+    } else if (c2err == C2_OK) {
+        ALOGE("Expected query results for 2 params, but got %zu", heapParams.size());
+        return C2_BAD_VALUE;
+    } else {
+        ALOGE("Failed to query component store for system resources: %d", c2err);
+        return c2err;
+    }
+
+    size_t index = 0;
+    for (const auto& resource : resources) {
+        ALOGI("Globally Available System Resource[%zu]{%u %d %jd}", index++,
+              resource.id, resource.kind, resource.amount);
+    }
+    return c2err;
+}
+
+TEST_F(CCodecConfigTest, QuerySystemResources) {
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER, MIMETYPE_VIDEO_AVC);
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+    std::vector<C2SystemResourceStruct> resources;
+    ASSERT_EQ(C2_OK, queryGlobalResources(mConfigurable, resources));
+
+    // Make sure that what we got from the query is the same as what was added.
+    ASSERT_TRUE(validateSystemResources(resources));
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 574f1b9..c50d197 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -337,6 +337,27 @@
             && layout.planes[layout.PLANE_V].rowSampling == 2);
 }
 
+bool IsYUV422_10bit(const C2GraphicView &view) {
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.numPlanes == 3
+            && layout.type == C2PlanarLayout::TYPE_YUV
+            && layout.planes[layout.PLANE_Y].channel == C2PlaneInfo::CHANNEL_Y
+            && layout.planes[layout.PLANE_Y].allocatedDepth == 16
+            && layout.planes[layout.PLANE_Y].bitDepth == 10
+            && layout.planes[layout.PLANE_Y].colSampling == 1
+            && layout.planes[layout.PLANE_Y].rowSampling == 1
+            && layout.planes[layout.PLANE_U].channel == C2PlaneInfo::CHANNEL_CB
+            && layout.planes[layout.PLANE_U].allocatedDepth == 16
+            && layout.planes[layout.PLANE_U].bitDepth == 10
+            && layout.planes[layout.PLANE_U].colSampling == 2
+            && layout.planes[layout.PLANE_U].rowSampling == 1
+            && layout.planes[layout.PLANE_V].channel == C2PlaneInfo::CHANNEL_CR
+            && layout.planes[layout.PLANE_V].allocatedDepth == 16
+            && layout.planes[layout.PLANE_V].bitDepth == 10
+            && layout.planes[layout.PLANE_V].colSampling == 2
+            && layout.planes[layout.PLANE_V].rowSampling == 1);
+}
+
 
 bool IsNV12(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
@@ -369,6 +390,22 @@
             && layout.planes[layout.PLANE_V].rightShift == 6);
 }
 
+bool IsP210(const C2GraphicView &view) {
+    if (!IsYUV422_10bit(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 4
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_U
+            && layout.planes[layout.PLANE_U].offset == 0
+            && layout.planes[layout.PLANE_V].colInc == 4
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_U
+            && layout.planes[layout.PLANE_V].offset == 2
+            && layout.planes[layout.PLANE_Y].rightShift == 6
+            && layout.planes[layout.PLANE_U].rightShift == 6
+            && layout.planes[layout.PLANE_V].rightShift == 6);
+}
 
 bool IsNV21(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 8daf3d8..e2ff75c 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -99,6 +99,11 @@
 bool IsYUV420_10bit(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a YUV 422 10-10-10 layout.
+ */
+bool IsYUV422_10bit(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a NV12 layout.
  */
 bool IsNV12(const C2GraphicView &view);
@@ -109,6 +114,11 @@
 bool IsP010(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a P210 layout.
+ */
+bool IsP210(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a NV21 layout.
  */
 bool IsNV21(const C2GraphicView &view);
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 3841831..925a1b37e 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -547,6 +547,22 @@
     { C2Config::picture_type_t::B_FRAME,        PICTURE_TYPE_B },
 };
 
+ALookup<C2Config::profile_t, int32_t> sAc4Profiles = {
+    { C2Config::PROFILE_AC4_0_0, AC4Profile00 },
+    { C2Config::PROFILE_AC4_1_0, AC4Profile10 },
+    { C2Config::PROFILE_AC4_1_1, AC4Profile11 },
+    { C2Config::PROFILE_AC4_2_1, AC4Profile21 },
+    { C2Config::PROFILE_AC4_2_2, AC4Profile22 },
+};
+
+ALookup<C2Config::level_t, int32_t> sAc4Levels = {
+    { C2Config::LEVEL_AC4_0, AC4Level0 },
+    { C2Config::LEVEL_AC4_1, AC4Level1 },
+    { C2Config::LEVEL_AC4_2, AC4Level2 },
+    { C2Config::LEVEL_AC4_3, AC4Level3 },
+    { C2Config::LEVEL_AC4_4, AC4Level4 },
+};
+
 /**
  * A helper that passes through vendor extension profile and level values.
  */
@@ -831,6 +847,21 @@
     bool mIsHdr10Plus;
 };
 
+struct Ac4ProfileLevelMapper : ProfileLevelMapperHelper {
+    virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+        return sAc4Levels.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+        return sAc4Levels.map(from, to);
+    }
+    virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+        return sAc4Profiles.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+        return sAc4Profiles.map(from, to);
+    }
+};
+
 } // namespace
 
 // the default mapper is used for media types that do not support HDR
@@ -866,6 +897,8 @@
         return std::make_shared<Av1ProfileLevelMapper>();
     } else if (mediaType == MIMETYPE_VIDEO_APV) {
         return std::make_shared<ApvProfileLevelMapper>();
+    } else if (mediaType == MIMETYPE_AUDIO_AC4) {
+        return std::make_shared<Ac4ProfileLevelMapper>();
     }
     return nullptr;
 }
diff --git a/media/codec2/tests/aidl/GraphicsTracker_test.cpp b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
index da79277..ec3e3d1 100644
--- a/media/codec2/tests/aidl/GraphicsTracker_test.cpp
+++ b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
@@ -76,15 +76,15 @@
     }
 };
 
-struct DummyConsumerListener : public android::BnConsumerListener {
+struct DummyConsumerListener : public android::IConsumerListener {
     void onFrameAvailable(const BufferItem& /* item */) override {}
     void onBuffersReleased() override {}
     void onSidebandStreamChanged() override {}
 };
 
-struct TestConsumerListener : public android::BnConsumerListener {
-    TestConsumerListener(const sp<IGraphicBufferConsumer> &consumer)
-            : BnConsumerListener(), mConsumer(consumer) {}
+struct TestConsumerListener : public android::IConsumerListener {
+    TestConsumerListener(const sp<IGraphicBufferConsumer>& consumer)
+        : IConsumerListener(), mConsumer(consumer) {}
     void onFrameAvailable(const BufferItem&) override {
         constexpr static int kRenderDelayUs = 1000000/30; // 30fps
         BufferItem buffer;
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9d1cbff..623b843 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -174,14 +174,12 @@
         "libui",
         "libdl",
         "libvndksupport",
-        "libprocessgroup",
     ],
 }
 
 // public dependency for implementing Codec 2 components
 cc_defaults {
     name: "libcodec2-impl-defaults",
-    cpp_std: "gnu++17",
 
     defaults: [
         "libcodec2_hal_selection",
diff --git a/media/codec2/vndk/C2AllocatorBlob.cpp b/media/codec2/vndk/C2AllocatorBlob.cpp
index 8cfa1d7..31d95f6 100644
--- a/media/codec2/vndk/C2AllocatorBlob.cpp
+++ b/media/codec2/vndk/C2AllocatorBlob.cpp
@@ -17,6 +17,8 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "C2AllocatorBlob"
 
+#include <memory>
+#include <mutex>
 #include <set>
 
 #include <C2AllocatorBlob.h>
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 971b5a5..3670008 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -20,12 +20,17 @@
 
 #include <mutex>
 
+#include <android_media_codec.h>
+
+#include <aidl/android/hardware/graphics/common/Cta861_3.h>
 #include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
+#include <aidl/android/hardware/graphics/common/Smpte2086.h>
 #include <android/hardware/graphics/common/1.2/types.h>
 #include <cutils/native_handle.h>
 #include <drm/drm_fourcc.h>
 #include <gralloctypes/Gralloc4.h>
 #include <hardware/gralloc.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
 #include <ui/Rect.h>
@@ -969,6 +974,203 @@
     return C2HandleGralloc::MigrateNativeHandle(handle, generation, igbp_id, igbp_slot);
 }
 
+using ::aidl::android::hardware::graphics::common::Cta861_3;
+using ::aidl::android::hardware::graphics::common::Smpte2086;
+
+namespace {
+
+class GrallocBuffer {
+public:
+    GrallocBuffer(const C2Handle *const handle) : mBuffer(nullptr) {
+        GraphicBufferMapper& mapper = GraphicBufferMapper::get();
+
+        // Unwrap raw buffer handle from the C2Handle
+        native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
+        if (!nh) {
+            ALOGE("handle is not compatible to any gralloc C2Handle types");
+            return;
+        }
+        // Import the raw handle so IMapper can use the buffer. The imported
+        // handle must be freed when the client is done with the buffer.
+        status_t status = mapper.importBufferNoValidate(
+                nh,
+                &mBuffer);
+
+        if (status != OK) {
+            ALOGE("Failed to import buffer. Status: %d.", status);
+            return;
+        }
+
+        // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+        //         does not clone the fds. Thus we need to delete the handle
+        //         without closing it.
+        native_handle_delete(nh);
+    }
+
+    ~GrallocBuffer() {
+        GraphicBufferMapper& mapper = GraphicBufferMapper::get();
+        if (mBuffer) {
+            // Free the imported buffer handle. This does not release the
+            // underlying buffer itself.
+            mapper.freeBuffer(mBuffer);
+        }
+    }
+
+    buffer_handle_t get() const { return mBuffer; }
+    operator bool() const { return (mBuffer != nullptr); }
+private:
+    buffer_handle_t mBuffer;
+};
+
+}  // namspace
+
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+        const C2Handle *const handle,
+        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
+    c2_status_t err = C2_OK;
+    GraphicBufferMapper& mapper = GraphicBufferMapper::get();
+    GrallocBuffer buffer(handle);
+    if (!buffer) {
+        // Gralloc4 not supported; nothing to do
+        return err;
+    }
+    if (staticInfo) {
+        ALOGV("Grabbing static HDR info from gralloc metadata");
+        staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
+        memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
+        (*staticInfo)->maxCll = 0;
+        (*staticInfo)->maxFall = 0;
+
+        std::optional<Smpte2086> smpte2086;
+        status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
+        if (status != OK || !smpte2086) {
+            err = C2_CORRUPTED;
+        } else {
+            if (smpte2086) {
+                  (*staticInfo)->mastering.red.x    = smpte2086->primaryRed.x;
+                  (*staticInfo)->mastering.red.y    = smpte2086->primaryRed.y;
+                  (*staticInfo)->mastering.green.x  = smpte2086->primaryGreen.x;
+                  (*staticInfo)->mastering.green.y  = smpte2086->primaryGreen.y;
+                  (*staticInfo)->mastering.blue.x   = smpte2086->primaryBlue.x;
+                  (*staticInfo)->mastering.blue.y   = smpte2086->primaryBlue.y;
+                  (*staticInfo)->mastering.white.x  = smpte2086->whitePoint.x;
+                  (*staticInfo)->mastering.white.y  = smpte2086->whitePoint.y;
+
+                  (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
+                  (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
+            }
+        }
+
+        std::optional<Cta861_3> cta861_3;
+        status = mapper.getCta861_3(buffer.get(), &cta861_3);
+        if (status != OK || !cta861_3) {
+            err = C2_CORRUPTED;
+        } else {
+            if (cta861_3) {
+                  (*staticInfo)->maxCll   = cta861_3->maxContentLightLevel;
+                  (*staticInfo)->maxFall  = cta861_3->maxFrameAverageLightLevel;
+            }
+        }
+    }
+
+    if (err != C2_OK) {
+        staticInfo->reset();
+    }
+
+    if (dynamicInfo) {
+        ALOGV("Grabbing dynamic HDR info from gralloc metadata");
+        dynamicInfo->reset();
+        std::optional<std::vector<uint8_t>> vec;
+        status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
+        if (status != OK || !vec) {
+            dynamicInfo->reset();
+            err = C2_CORRUPTED;
+        } else {
+            if (vec) {
+                *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
+                      vec->size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+                memcpy((*dynamicInfo)->m.data, vec->data(), vec->size());
+            }
+        }
+    }
+
+    return err;
+}
+
+c2_status_t SetMetadataToGralloc4Handle(
+        android_dataspace_t dataSpace,
+        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+        const C2Handle *const handle) {
+    c2_status_t err = C2_OK;
+    GraphicBufferMapper& mapper = GraphicBufferMapper::get();
+    GrallocBuffer buffer(handle);
+    if (!buffer) {
+        // Gralloc4 not supported; nothing to do
+        return err;
+    }
+    // Use V0 dataspaces for Gralloc4+
+    if (android::media::codec::provider_->dataspace_v0_partial()) {
+        ColorUtils::convertDataSpaceToV0(dataSpace);
+    }
+    status_t status = mapper.setDataspace(buffer.get(), static_cast<ui::Dataspace>(dataSpace));
+    if (status != OK) {
+       err = C2_CORRUPTED;
+    }
+    if (staticInfo && *staticInfo) {
+        ALOGV("Setting static HDR info as gralloc metadata");
+        std::optional<Smpte2086> smpte2086 = Smpte2086{
+            {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
+            {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
+            {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
+            {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
+            staticInfo->mastering.maxLuminance,
+            staticInfo->mastering.minLuminance,
+        };
+        if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
+                && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
+                && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
+                && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
+                && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
+                && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
+                && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
+                && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
+                && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance) {
+            status = mapper.setSmpte2086(buffer.get(), smpte2086);
+            if (status != OK) {
+                err = C2_CORRUPTED;
+            }
+        }
+        std::optional<Cta861_3> cta861_3 = Cta861_3{
+            staticInfo->maxCll,
+            staticInfo->maxFall,
+        };
+        if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel) {
+            status = mapper.setCta861_3(buffer.get(), cta861_3);
+            if (status != OK) {
+                err = C2_CORRUPTED;
+            }
+        }
+    }
+    if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
+        ALOGV("Setting dynamic HDR info as gralloc metadata");
+        if (dynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
+            std::optional<std::vector<uint8_t>> smpte2094_40 = std::vector<uint8_t>();
+            smpte2094_40->resize(dynamicInfo->flexCount());
+            memcpy(smpte2094_40->data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+
+            status = mapper.setSmpte2094_40(buffer.get(), smpte2094_40);
+            if (status != OK) {
+                err = C2_CORRUPTED;
+            }
+        } else {
+            err = C2_BAD_VALUE;
+        }
+    }
+
+    return err;
+}
 
 
 class C2AllocationGralloc : public C2GraphicAllocation {
diff --git a/media/codec2/vndk/C2Config.cpp b/media/codec2/vndk/C2Config.cpp
index e9223fb..fa90c5a 100644
--- a/media/codec2/vndk/C2Config.cpp
+++ b/media/codec2/vndk/C2Config.cpp
@@ -150,6 +150,11 @@
         { "mpegh-high", C2Config::PROFILE_MPEGH_HIGH },
         { "mpegh-lc", C2Config::PROFILE_MPEGH_LC },
         { "mpegh-baseline", C2Config::PROFILE_MPEGH_BASELINE },
+        { "ac4-00-00", C2Config::PROFILE_AC4_0_0 },
+        { "ac4-01-00", C2Config::PROFILE_AC4_1_0 },
+        { "ac4-01-01", C2Config::PROFILE_AC4_1_1 },
+        { "ac4-02-01", C2Config::PROFILE_AC4_2_1 },
+        { "ac4-02-02", C2Config::PROFILE_AC4_2_2 },
 }))
 
 DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2Config::level_t, ({
@@ -261,6 +266,11 @@
         { "mpegh-3", C2Config::LEVEL_MPEGH_3 },
         { "mpegh-4", C2Config::LEVEL_MPEGH_4 },
         { "mpegh-5", C2Config::LEVEL_MPEGH_5 },
+        { "ac4-00", C2Config::LEVEL_AC4_0 },
+        { "ac4-01", C2Config::LEVEL_AC4_1 },
+        { "ac4-02", C2Config::LEVEL_AC4_2 },
+        { "ac4-03", C2Config::LEVEL_AC4_3 },
+        { "ac4-04", C2Config::LEVEL_AC4_4 },
 }))
 
 DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2BufferData::type_t, ({
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index d28f926..fe8a9eb 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -21,6 +21,7 @@
 #include <android-base/unique_fd.h>
 #include <cutils/native_handle.h>
 #include <utils/Log.h>
+#include <utils/SystemClock.h>
 #include <ui/Fence.h>
 
 #include <C2FenceFactory.h>
@@ -631,50 +632,66 @@
  */
 class _C2FenceFactory::PipeFenceImpl: public C2Fence::Impl {
 private:
-    bool waitEvent(c2_nsecs_t timeoutNs, bool *hangUp, bool *event) const {
+    // Wait for an event using ::ppoll() and handle any interruptions by signals
+    // (EINTR) by retrying and accounting for time already waited.
+    // Note: while ppoll in theory supports blocking signals, Linux NPTL library does
+    // not allow blocking 2 realtime signals (see man nptl), so we do need to handle
+    // signal interruptions.
+    bool waitEvent(c2_nsecs_t timeoutNs, bool *hangUp) const {
         if (!mValid) {
             *hangUp = true;
-            return true;
+            return false;
         }
 
-        struct pollfd pfd;
-        pfd.fd = mPipeFd.get();
-        pfd.events = POLLIN;
-        pfd.revents = 0;
-        struct timespec ts;
+        int64_t waitTimeNs = kPipeFenceWaitLimitSecs * 1000000000LL;
         if (timeoutNs >= 0) {
-            ts.tv_sec = int(timeoutNs / 1000000000);
-            ts.tv_nsec = timeoutNs % 1000000000;
+            waitTimeNs = timeoutNs;
         } else {
             ALOGD("polling for indefinite duration requested, but changed to wait for %d sec",
                   kPipeFenceWaitLimitSecs);
-            ts.tv_sec = kPipeFenceWaitLimitSecs;
-            ts.tv_nsec = 0;
         }
-        int ret = ::ppoll(&pfd, 1, &ts, nullptr);
-        if (ret >= 0) {
-            if (pfd.revents) {
-                if (pfd.revents & ~POLLIN) {
-                    // Mostly this means the writing end fd was closed.
-                    *hangUp = true;
-                    mValid = false;
-                    ALOGD("PipeFenceImpl: pipe fd hangup or err event returned");
+
+        int64_t startTsNs = android::elapsedRealtimeNano();
+        int64_t elapsedTsNs = 0;
+        int tryNum = 0;
+        int noEvent = 0;
+        do {
+            struct pollfd pfd;
+            pfd.fd = mPipeFd.get();
+            pfd.events = POLLIN;
+            pfd.revents = 0;
+            struct timespec ts;
+
+            ts.tv_sec = int((waitTimeNs - elapsedTsNs) / 1000000000);
+            ts.tv_nsec = (waitTimeNs - elapsedTsNs) % 1000000000;
+            ++tryNum;
+            int ret = ::ppoll(&pfd, 1, &ts, nullptr);
+            if (ret >= 0) {
+                if (pfd.revents) {
+                    if (pfd.revents & ~POLLIN) {
+                        // Mostly this means the writing end fd was closed.
+                        *hangUp = true;
+                        mValid = false;
+                        ALOGD("PipeFenceImpl: pipe fd hangup or err event returned");
+                        return false;
+                    }
+                    return true;
                 }
-                *event = true;
-                return true;
+                ++noEvent;
+                // retry if the deadline does not meet yet.
+            } else if (errno != EINTR) {
+                ALOGE("PipeFenceImpl: poll() error %d", errno);
+                *hangUp = true;
+                mValid = false;
+                return false;
             }
-            // event not ready yet.
-            return true;
-        }
-        if (errno == EINTR) {
-            // poll() was cancelled by signal or inner kernel status.
-            return false;
-        }
-        // Since poll error happened here, treat the error is irrecoverable.
-        ALOGE("PipeFenceImpl: poll() error %d", errno);
-        *hangUp = true;
-        mValid = false;
-        return true;
+            elapsedTsNs = android::elapsedRealtimeNano() - startTsNs;
+        } while (elapsedTsNs < waitTimeNs);
+        // EINTR till the end.
+        // treat this as event not ready yet.
+        ALOGV("PipeFenceImpl: tried %d times NoEvent %d times, spent %lld nanosecs",
+              tryNum, noEvent, (long long)elapsedTsNs);
+        return false;
     }
 
 public:
@@ -683,18 +700,14 @@
             return C2_BAD_STATE;
         }
         bool hangUp = false;
-        bool event = false;
-        if (waitEvent(timeoutNs, &hangUp, &event)) {
-            if (hangUp) {
-                return C2_BAD_STATE;
-            }
-            if (event) {
-                return C2_OK;
-            }
-            return C2_TIMED_OUT;
-        } else {
-            return C2_CANCELED;
+        bool event = waitEvent(timeoutNs, &hangUp);
+        if (hangUp) {
+            return C2_BAD_STATE;
         }
+        if (event) {
+            return C2_OK;
+        }
+        return C2_TIMED_OUT;
     }
 
     virtual bool valid() const {
@@ -702,13 +715,8 @@
             return false;
         }
         bool hangUp = false;
-        bool event = false;
-        if (waitEvent(0, &hangUp, &event)) {
-            if (hangUp) {
-                return false;
-            }
-        }
-        return true;
+        (void)waitEvent(0, &hangUp);
+        return !hangUp;
     }
 
     virtual bool ready() const {
@@ -716,13 +724,8 @@
             return false;
         }
         bool hangUp = false;
-        bool event = false;
-        if (waitEvent(0, &hangUp, &event)) {
-            if (event) {
-                return true;
-            }
-        }
-        return false;
+        bool event = waitEvent(0, &hangUp);
+        return event;
     }
 
     virtual int fd() const {
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 6ec9d6b..7ca86c1 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -576,11 +576,13 @@
                 res = allocatorStore->fetchAllocator(
                         C2PlatformAllocatorStore::IGBA, &allocator);
                 if (res == C2_OK) {
+                    bool blockFence =
+                            (components.size() == 1 && allocatorParam.blockFenceSupport);
                     std::shared_ptr<C2BlockPool> ptr(
                             new C2IgbaBlockPool(allocator,
                                                 allocatorParam.igba,
                                                 std::move(allocatorParam.waitableFd),
-                                                poolId), deleter);
+                                                blockFence, poolId), deleter);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index 53b6262..1018583 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -20,6 +20,7 @@
 #include <functional>
 
 #include <C2Buffer.h>
+#include <C2Config.h>
 
 namespace android {
 // VNDK
@@ -92,6 +93,38 @@
         uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
         uint64_t origId);
 
+/**
+ * Get HDR metadata from Gralloc4 handle.
+ *
+ * \param[in]   handle      handle of the allocation
+ * \param[out]  staticInfo  HDR static info to be filled. Ignored if null;
+ *                          if |handle| is invalid or does not contain the metadata,
+ *                          the shared_ptr is reset.
+ * \param[out]  dynamicInfo HDR dynamic info to be filled. Ignored if null;
+ *                          if |handle| is invalid or does not contain the metadata,
+ *                          the shared_ptr is reset.
+ * \return C2_OK if successful
+ */
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+        const C2Handle *const handle,
+        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
+
+/**
+ * Set metadata to Gralloc4 handle.
+ *
+ * \param[in]   dataSpace   Dataspace to set.
+ * \param[in]   staticInfo  HDR static info to set. Ignored if null or invalid.
+ * \param[in]   dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
+ * \param[out]  handle      handle of the allocation.
+ * \return C2_OK if successful
+ */
+c2_status_t SetMetadataToGralloc4Handle(
+        const android_dataspace_t dataSpace,
+        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+        const C2Handle *const handle);
+
 class C2AllocatorGralloc : public C2Allocator {
 public:
     virtual id_t getId() const override;
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 806932c..6c08935 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -22,7 +22,9 @@
 #include <C2Buffer.h>
 #include <C2BlockInternal.h>
 
+#include <atomic>
 #include <functional>
+#include <mutex>
 
 namespace android {
 class GraphicBuffer;
diff --git a/media/codec2/vndk/include/C2IgbaBufferPriv.h b/media/codec2/vndk/include/C2IgbaBufferPriv.h
index 5879263..4692981 100644
--- a/media/codec2/vndk/include/C2IgbaBufferPriv.h
+++ b/media/codec2/vndk/include/C2IgbaBufferPriv.h
@@ -37,6 +37,7 @@
             const std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator>
                     &igba,
             ::android::base::unique_fd &&ufd,
+            const bool blockFence,
             const local_id_t localId);
 
     virtual ~C2IgbaBlockPool() = default;
@@ -75,12 +76,14 @@
         uint32_t format,
         C2MemoryUsage usage,
         c2_nsecs_t timeoutNs,
+        bool blockFence,
         uint64_t *origId /* nonnull */,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
         C2Fence *fence /* nonnull */);
 
     const std::shared_ptr<C2Allocator> mAllocator;
     const std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> mIgba;
+    const bool mBlockFence;
     const local_id_t mLocalId;
     std::atomic<bool> mValid;
     C2Fence mWaitFence;
diff --git a/media/codec2/vndk/include/C2PlatformSupport.h b/media/codec2/vndk/include/C2PlatformSupport.h
index 6fa155a..d434379 100644
--- a/media/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/codec2/vndk/include/C2PlatformSupport.h
@@ -180,6 +180,10 @@
     std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> igba;
     ::android::base::unique_fd waitableFd; // This will be passed and moved to C2Fence
                                            // implementation.
+    bool blockFenceSupport;
+
+    C2PlatformAllocatorDesc()
+            : allocatorId(C2AllocatorStore::DEFAULT_LINEAR), blockFenceSupport(false) {}
 };
 
 /**
diff --git a/media/codec2/vndk/platform/C2IgbaBuffer.cpp b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
index 3622d5e..0ae15ab 100644
--- a/media/codec2/vndk/platform/C2IgbaBuffer.cpp
+++ b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
@@ -167,7 +167,10 @@
         const std::shared_ptr<C2Allocator> &allocator,
         const std::shared_ptr<C2IGBA> &igba,
         ::android::base::unique_fd &&ufd,
-        const local_id_t localId) : mAllocator(allocator), mIgba(igba), mLocalId(localId) {
+        const bool blockFence,
+        const local_id_t localId) :
+                mAllocator(allocator), mIgba(igba),
+                mBlockFence(blockFence), mLocalId(localId) {
     if (!mIgba) {
         mValid = false;
         return;
@@ -190,7 +193,8 @@
     uint64_t origId;
     C2Fence fence;
     c2_status_t res = _fetchGraphicBlock(
-            width, height, format, usage, kBlockingFetchTimeoutNs, &origId, block, &fence);
+            width, height, format, usage, kBlockingFetchTimeoutNs, false,
+            &origId, block, &fence);
 
     if (res == C2_TIMED_OUT) {
         // SyncFence waiting timeout.
@@ -205,7 +209,8 @@
         uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block, C2Fence *fence) {
     uint64_t origId;
-    c2_status_t res = _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+    c2_status_t res = _fetchGraphicBlock(
+            width, height, format, usage, 0LL, mBlockFence, &origId, block, fence);
     if (res == C2_TIMED_OUT) {
         *fence = C2Fence();
         return C2_BLOCKING;
@@ -216,6 +221,7 @@
 c2_status_t C2IgbaBlockPool::_fetchGraphicBlock(
         uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
         c2_nsecs_t timeoutNs,
+        bool blockFence,
         uint64_t *origId,
         std::shared_ptr<C2GraphicBlock> *block,
         C2Fence *fence) {
@@ -263,32 +269,36 @@
         C2Fence syncFence  = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
         AHardwareBuffer *ahwb = allocation.buffer.release(); // This is acquired.
         CHECK(AHardwareBuffer_getId(ahwb, origId) == ::android::OK);
+        bool syncFenceSignaled = false;
 
-        // We are waiting for SyncFence here for backward compatibility.
-        // H/W based Sync Fence could be returned to improve pipeline latency.
-        //
-        // TODO: Add a component configuration for returning sync fence
-        // from fetchGraphicBlock() as the C2Fence output param(b/322283520).
-        // In the case C2_OK along with GraphicBlock must be returned together.
-        c2_status_t res = syncFence.wait(kSyncFenceWaitNs);
-        if (res != C2_OK) {
-            AHardwareBuffer_release(ahwb);
-            bool aidlRet = true;
-            ::ndk::ScopedAStatus status = mIgba->deallocate(*origId, &aidlRet);
-            ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
-                  res, status.isOk(), aidlRet);
-            return C2_TIMED_OUT;
+        if (!blockFence) {
+            // If a sync fence is not supposed to return along with a block,
+            // We are waiting for SyncFence here for backward compatibility.
+            c2_status_t res = syncFence.wait(kSyncFenceWaitNs);
+            if (res != C2_OK) {
+                AHardwareBuffer_release(ahwb);
+                bool aidlRet = true;
+                ::ndk::ScopedAStatus status = mIgba->deallocate(*origId, &aidlRet);
+                ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
+                      res, status.isOk(), aidlRet);
+                return C2_TIMED_OUT;
+            }
+            syncFenceSignaled = true;
         }
 
-        res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
+        c2_status_t res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
         AHardwareBuffer_release(ahwb);
         if (res != C2_OK) {
             bool aidlRet = true;
             ::ndk::ScopedAStatus status = mIgba->deallocate(*origId, &aidlRet);
             ALOGE("We got AHWB via AIDL but failed to created C2GraphicBlock err(%d) aidl(%d, %d)",
                   res, status.isOk(), aidlRet);
+            return res;
         }
-        return res;
+        if (!syncFenceSignaled) {
+            *fence = syncFence;
+        }
+        return C2_OK;
     } else {
         return C2_OMITTED;
     }
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index add28e0..e56e7f4 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -44,3 +44,15 @@
     export_shared_lib_headers: ["aaudio-aidl-cpp"],
     shared_libs: ["aaudio-aidl-cpp"],
 }
+
+cc_library_headers {
+    // used in mediametrics's audio analytics
+    // just for the defines and typedefs, so no libraries like in libaaudio_headers
+    name: "libaaudio_headers-module",
+    export_include_dirs: ["include"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+}
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index e670642..30a411b 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -424,6 +424,13 @@
         printf("          2 = PCM_FLOAT\n");
         printf("          3 = PCM_I24_PACKED\n");
         printf("          4 = PCM_I32\n");
+        printf("          6 = MP3\n");
+        printf("          7 = AAC_LC\n");
+        printf("          8 = AAC_HE_V1\n");
+        printf("          9 = AAC_HE_V2\n");
+        printf("          10 = AAC_ELD\n");
+        printf("          12 = AAC_XHE\n");
+        printf("          13 = OPUS\n");
         printf("      -i{inputPreset} eg. 5 for AAUDIO_INPUT_PRESET_CAMCORDER\n");
         printf("      -m{0|1|2|3} set MMAP policy\n");
         printf("          0 = _UNSPECIFIED, use aaudio.mmap_policy system property, default\n");
@@ -435,6 +442,7 @@
         printf("          n for _NONE\n");
         printf("          l for _LATENCY\n");
         printf("          p for _POWER_SAVING;\n");
+        printf("          o for _POWER_SAVING_OFFLOADED;\n");
         printf("      -r{sampleRate} for example 44100\n");
         printf("      -s{duration} in seconds, default is %d\n", DEFAULT_DURATION_SECONDS);
         printf("      -u{usage} eg. 14 for AAUDIO_USAGE_GAME\n");
@@ -477,6 +485,9 @@
             case 'p':
                 mode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
                 break;
+            case 'o':
+                mode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED;
+                break;
             default:
                 printf("ERROR: invalid performance mode %c\n", c);
                 break;
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 956b3cd..5b8ab59 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -71,7 +71,7 @@
 class AAudioSimplePlayer {
 public:
     AAudioSimplePlayer() {}
-    ~AAudioSimplePlayer() {
+    virtual ~AAudioSimplePlayer() {
         close();
     };
 
@@ -119,7 +119,8 @@
     aaudio_result_t open(const AAudioParameters &parameters,
                          AAudioStream_dataCallback dataCallback = nullptr,
                          AAudioStream_errorCallback errorCallback = nullptr,
-                         void *userContext = nullptr) {
+                         void *userContext = nullptr,
+                         AAudioStream_presentationEndCallback presentationEndCallback = nullptr) {
         aaudio_result_t result = AAUDIO_OK;
 
         // Use an AAudioStreamBuilder to contain requested parameters.
@@ -137,6 +138,10 @@
         if (errorCallback != nullptr) {
             AAudioStreamBuilder_setErrorCallback(builder, errorCallback, userContext);
         }
+        if (presentationEndCallback != nullptr) {
+            AAudioStreamBuilder_setPresentationEndCallback(
+                    builder, presentationEndCallback, userContext);
+        }
         //AAudioStreamBuilder_setFramesPerDataCallback(builder, CALLBACK_SIZE_FRAMES);
         //AAudioStreamBuilder_setBufferCapacityInFrames(builder, 48 * 8);
 
@@ -271,6 +276,24 @@
         return result;
     }
 
+    aaudio_result_t setOffloadDelayPadding(int delay, int padding) {
+        aaudio_result_t result = AAudioStream_setOffloadDelayPadding(mStream, delay, padding);
+        if (result != AAUDIO_OK) {
+            printf("WARNING - AAudioStream_setOffloadDelayPadding(%d, %d) returned %d %s\n",
+                   delay, padding, result, AAudio_convertResultToText(result));
+        }
+        return result;
+    }
+
+    aaudio_result_t setOffloadEndOfStream() {
+        aaudio_result_t result = AAudioStream_setOffloadEndOfStream(mStream);
+        if (result != AAUDIO_OK) {
+            printf("ERROR - AAudioStream_setOffloadEndOfStream() returned %d %s\n",
+                   result, AAudio_convertResultToText(result));
+        }
+        return result;
+    }
+
     AAudioStream *getStream() const {
         return mStream;
     }
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 07fed18..9014c3b 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -323,7 +323,7 @@
             [&]() {
                 char** tags = nullptr;
                 (void)AAudioStream_obtainTags(mAaudioStream, &tags);
-                AAudioStream_releaseTags(mAaudioStream, tags);
+                AAudioStream_destroyTags(mAaudioStream, tags);
             },
             [&]() {
                 (void)AAudioStream_isMMapUsed(mAaudioStream);
diff --git a/media/libaaudio/include/system/aaudio/AAudio.h b/media/libaaudio/include/system/aaudio/AAudio.h
index 4c2d291..ae139d9 100644
--- a/media/libaaudio/include/system/aaudio/AAudio.h
+++ b/media/libaaudio/include/system/aaudio/AAudio.h
@@ -65,7 +65,7 @@
 /**
  * Allocate and read the audio attributes' tags for the stream into a buffer.
  * The client is responsible to free the memory for tags by calling
- * {@link #AAudioStream_releaseTags} unless the number of tags is 0.
+ * {@link #AAudioStream_destroyTags} unless the number of tags is 0.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param tags a pointer to a variable that will be set to a pointer to an array of char* pointers
@@ -82,7 +82,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param tags reference provided by AAudioStream_obtainTags()
  */
-void AAudioStream_releaseTags(AAudioStream* _Nonnull stream, char* _Nonnull * _Nullable tags);
+void AAudioStream_destroyTags(AAudioStream* _Nonnull stream, char* _Nonnull * _Nullable tags);
 
 #ifdef __cplusplus
 }
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 0758170..5aa4964 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -281,5 +281,12 @@
         java: {
             sdk_version: "module_current",
         },
+        ndk: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
     },
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index e59f0ec..33f152c 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -134,12 +134,7 @@
     request.getConfiguration().setInputPreset(getInputPreset());
     request.getConfiguration().setPrivacySensitive(isPrivacySensitive());
 
-    // When sample rate conversion is needed, we use the device sample rate instead of the
-    // requested sample rate to scale the capacity in configureDataInformation().
-    // Thus, we should scale the capacity here to cancel out the (sampleRate / deviceSampleRate)
-    // scaling there.
-    request.getConfiguration().setBufferCapacity(builder.getBufferCapacity()
-            * 48000 / getSampleRate());
+    request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
     mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
     if (getServiceHandle() < 0
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 0427777..a7ac12e 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -108,6 +108,10 @@
     mFlowGraph.reset();
     // Prevent stale data from being played.
     mAudioEndpoint->eraseDataMemory();
+    // All data has been erased. To avoid mixer for the shared stream use stale
+    // counters, which may cause the service side thinking stream starts flowing before
+    // the client actually writes data, advance the client to match server position.
+    advanceClientToMatchServerPosition(0 /*serverMargin*/);
 }
 
 void AudioStreamInternalPlay::prepareBuffersForStop() {
@@ -117,7 +121,8 @@
         return;
     }
     // Sleep until the DSP has read all of the data written.
-    int64_t validFramesInBuffer = getFramesWritten() - getFramesRead();
+    int64_t validFramesInBuffer =
+            mAudioEndpoint->getDataWriteCounter() - mAudioEndpoint->getDataReadCounter();
     if (validFramesInBuffer >= 0) {
         int64_t emptyFramesInBuffer = ((int64_t) getBufferCapacity()) - validFramesInBuffer;
 
@@ -131,7 +136,8 @@
         // Sleep until we are confident the DSP has consumed all of the valid data.
         // Sleep for one extra burst as a safety margin because the IsochronousClockModel
         // is not perfectly accurate.
-        int64_t positionInEmptyMemory = getFramesWritten() + getFramesPerBurst();
+        // The ClockModel uses the server frame position so do not use getFramesWritten().
+        int64_t positionInEmptyMemory = mAudioEndpoint->getDataWriteCounter() + getFramesPerBurst();
         int64_t timeAllConsumed = mClockModel.convertPositionToTime(positionInEmptyMemory);
         int64_t durationAllConsumed = timeAllConsumed - AudioClock::getNanoseconds();
         // Prevent sleeping for too long.
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index 430ba83..94b342b 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -269,9 +269,8 @@
     int64_t framesDelta = nextBurstPosition - mMarkerFramePosition;
     int64_t nanosDelta = convertDeltaPositionToTime(framesDelta);
     int64_t time = mMarkerNanoTime + nanosDelta;
-//    ALOGD("convertPositionToTime: pos = %llu --> time = %llu",
-//         (unsigned long long)framePosition,
-//         (unsigned long long)time);
+//    ALOGD("%s(): pos = %" PRId64 " --> time = %" PRId64, __func__,
+//            framePosition, time);
     return time;
 }
 
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 64f115c..ecffcbd 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -629,7 +629,7 @@
     return aaTags.size();
 }
 
-AAUDIO_API void AAudioStream_releaseTags(AAudioStream* stream, char** tags) {
+AAUDIO_API void AAudioStream_destroyTags(AAudioStream* stream, char** tags) {
     if (tags == nullptr) {
         return;
     }
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index fdda3b7..2f65faf 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -206,6 +206,7 @@
 
     aaudio_result_t result = requestStart_l();
     if (result == AAUDIO_OK) {
+        mPlayerBase->baseUpdateDeviceIds(getDeviceIds());
         // We only call this for logging in "dumpsys audio". So ignore return code.
         (void) mPlayerBase->startWithStatus(getDeviceIds());
     }
@@ -480,6 +481,10 @@
         // Run callback loop. This may take a very long time.
         procResult = mThreadProc(mThreadArg);
         mThreadRegistrationResult = unregisterThread();
+    } else {
+        // If we cannot register the thread then it has probably become disconnected.
+        // The only way to inform the app from this thread is with an error callback.
+        maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
     }
     return procResult;
 }
diff --git a/media/libaaudio/src/flowgraph/RampLinear.cpp b/media/libaaudio/src/flowgraph/RampLinear.cpp
index 80ac72a..32ad260 100644
--- a/media/libaaudio/src/flowgraph/RampLinear.cpp
+++ b/media/libaaudio/src/flowgraph/RampLinear.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "RampLinear"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
 #include <algorithm>
 #include <unistd.h>
 #include "FlowGraphNode.h"
@@ -31,6 +35,10 @@
 }
 
 void RampLinear::setTarget(float target) {
+    if (std::isnan(target)) {
+        ALOGE("%s rejected to set target as nan", __func__);
+        return;
+    }
     mTarget.store(target);
     // If the ramp has not been used then start immediately at this level.
     if (mLastCallCount == kInitialCallCount) {
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
index e47ee8e..e806449 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <algorithm>
 #include <cassert>
 #include <math.h>
 #include "IntegerRatio.h"
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
index a14ee47..c46f7aa 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <algorithm>
 #include <cassert>
 #include <math.h>
 #include "SincResampler.h"
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
index d459abf..679d159 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <algorithm>
 #include <cassert>
 #include <math.h>
 
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dfb9a01..cdd004c 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -105,26 +105,32 @@
         // If the caller specified an exact size then use a block size adapter.
         if (mBlockAdapter != nullptr) {
             int32_t byteCount = buffer.getFrameCount() * getBytesPerDeviceFrame();
-            callbackResult = mBlockAdapter->processVariableBlock(
+            std::tie(callbackResult, written) = mBlockAdapter->processVariableBlock(
                     buffer.data(), byteCount);
         } else {
             // Call using the AAudio callback interface.
             callbackResult = callDataCallbackFrames(buffer.data(),
                                                     buffer.getFrameCount());
+            written = callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE ?
+                    buffer.getFrameCount() * getBytesPerDeviceFrame() : 0;
         }
-        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-            written = buffer.getFrameCount() * getBytesPerDeviceFrame();
-        } else {
+
+        if (callbackResult != AAUDIO_CALLBACK_RESULT_CONTINUE) {
             if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
                 ALOGD("%s() callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
             } else {
                 ALOGW("%s() callback returned invalid result = %d",
                       __func__, callbackResult);
             }
-            written = 0;
-            systemStopInternal();
-            // Disable the callback just in case the system keeps trying to call us.
-            mCallbackEnabled.store(false);
+            if (callbackResult != AAUDIO_CALLBACK_RESULT_STOP || shouldStopStream()) {
+                // If the callback result is STOP, stop the stream if it should be stopped.
+                // Currently, the framework will not call stop if the client is doing offload
+                // playback and waiting for stream end. The client will already be STOPPING
+                // state when waiting for stream end.
+                systemStopInternal();
+                // Disable the callback just in case the system keeps trying to call us.
+                mCallbackEnabled.store(false);
+            }
         }
 
         if (processCommands() != AAUDIO_OK) {
@@ -170,23 +176,23 @@
         // If the caller specified an exact size then use a block size adapter.
         if (mBlockAdapter != nullptr) {
             int32_t byteCount = buffer.getFrameCount() * getBytesPerDeviceFrame();
-            callbackResult = mBlockAdapter->processVariableBlock(
+            std::tie(callbackResult, written) = mBlockAdapter->processVariableBlock(
                     buffer.data(), byteCount);
         } else {
             // Call using the AAudio callback interface.
             callbackResult = callDataCallbackFrames(buffer.data(),
                                                     buffer.getFrameCount());
+            written = callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE ?
+                    buffer.getFrameCount() * getBytesPerDeviceFrame() : 0;
         }
-        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-            written = buffer.getFrameCount() * getBytesPerDeviceFrame();
-        } else {
+        if (callbackResult != AAUDIO_CALLBACK_RESULT_CONTINUE) {
             if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
                 ALOGD("%s() callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
             } else {
                 ALOGW("%s() callback returned invalid result = %d",
                       __func__, callbackResult);
             }
-            written = 0;
+            // Always stop the recording case if callback result is not CONTINUE.
             systemStopInternal();
             // Disable the callback just in case the system keeps trying to call us.
             mCallbackEnabled.store(false);
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index a729161..5cecc75 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -126,6 +126,8 @@
      */
     virtual int32_t getBufferCapacityFromDevice() const = 0;
 
+    virtual bool shouldStopStream() const { return true; }
+
     // This is used for exact matching by MediaMetrics. So do not change it.
     // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_AAUDIO
     static constexpr char     kCallerName[] = "aaudio";
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index f0a25b5..0cb2328 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -131,6 +131,9 @@
             // that is some multiple of the burst size.
             notificationFrames = 0 - DEFAULT_BURSTS_PER_BUFFER_CAPACITY;
         }
+    } else if (getPerformanceMode() == AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
+        streamTransferType = AudioTrack::transfer_type::TRANSFER_SYNC_NOTIF_CALLBACK;
+        callback = wp<AudioTrack::IAudioTrackCallback>::fromExisting(this);
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
@@ -470,10 +473,9 @@
     case AAUDIO_STREAM_STATE_STOPPING:
         if (mAudioTrack->stopped()) {
             if (getPerformanceMode() == AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
-                std::lock_guard<std::mutex> lock(mStreamLock);
-                if (!mOffloadEosPending) {
-                    break;
-                }
+                // For offload mode, the state will be updated as `STOPPED` from
+                // stream end callback.
+                break;
             }
             setState(AAUDIO_STREAM_STATE_STOPPED);
         }
@@ -684,6 +686,7 @@
         return result;
     }
     mOffloadEosPending = true;
+    setState(AAUDIO_STREAM_STATE_STOPPING);
     return AAUDIO_OK;
 }
 
@@ -703,6 +706,8 @@
         std::lock_guard<std::mutex> lock(mStreamLock);
         if (mOffloadEosPending) {
             requestStart_l();
+        } else {
+            setState(AAUDIO_STREAM_STATE_STOPPED);
         }
         mOffloadEosPending = false;
     }
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 82ba772..764fd0b 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -112,6 +112,8 @@
     }
 
     void maybeCallPresentationEndCallback();
+
+    bool shouldStopStream() const final { return !mOffloadEosPending; }
     // Offload end ----------------------------------------
 
 #if AAUDIO_USE_VOLUME_SHAPER
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 2425ae4..0de0546 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -82,7 +82,7 @@
     AAudioStreamBuilder_addTag; # systemapi
     AAudioStreamBuilder_clearTags; # systemapi
     AAudioStream_obtainTags; # systemapi
-    AAudioStream_releaseTags; #systemapi
+    AAudioStream_destroyTags; #systemapi
   local:
     *;
 };
diff --git a/media/libaaudio/src/utility/FixedBlockAdapter.h b/media/libaaudio/src/utility/FixedBlockAdapter.h
index 290e473..516e67f 100644
--- a/media/libaaudio/src/utility/FixedBlockAdapter.h
+++ b/media/libaaudio/src/utility/FixedBlockAdapter.h
@@ -19,6 +19,7 @@
 
 #include <memory>
 #include <stdio.h>
+#include <utility>
 
 /**
  * Interface for a class that needs fixed-size blocks.
@@ -29,6 +30,10 @@
     virtual int32_t onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) = 0;
 };
 
+// The first value is the processing result code which 0 is OK.
+// The second value is the actual processed size in bytes.
+using AdapterProcessResult = std::pair<int32_t, int32_t>;
+
 /**
  * Base class for a variable-to-fixed-size block adapter.
  */
@@ -53,9 +58,9 @@
      *
      * @param buffer
      * @param numBytes
-     * @return zero if OK or a non-zero code
+     * @return
      */
-    virtual int32_t processVariableBlock(uint8_t *buffer, int32_t numBytes) = 0;
+    virtual AdapterProcessResult processVariableBlock(uint8_t *buffer, int32_t numBytes) = 0;
 
     /**
      * Free internal resources.
diff --git a/media/libaaudio/src/utility/FixedBlockReader.cpp b/media/libaaudio/src/utility/FixedBlockReader.cpp
index 7931fa0..2d3174d 100644
--- a/media/libaaudio/src/utility/FixedBlockReader.cpp
+++ b/media/libaaudio/src/utility/FixedBlockReader.cpp
@@ -44,26 +44,32 @@
     return bytesToRead;
 }
 
-int32_t FixedBlockReader::processVariableBlock(uint8_t *buffer, int32_t numBytes) {
+AdapterProcessResult FixedBlockReader::processVariableBlock(uint8_t *buffer, int32_t numBytes) {
     int32_t result = 0;
     int32_t bytesLeft = numBytes;
+    int32_t bytesProcessed = 0;
     while(bytesLeft > 0 && result == 0) {
         if (mPosition < mSize) {
             // Use up bytes currently in storage.
             int32_t bytesRead = readFromStorage(buffer, bytesLeft);
             buffer += bytesRead;
             bytesLeft -= bytesRead;
+            bytesProcessed += bytesRead;
         } else if (bytesLeft >= mSize) {
             // Read through if enough for a complete block.
             result = mFixedBlockProcessor.onProcessFixedBlock(buffer, mSize);
+            if (result != 0) {
+                break;
+            }
             buffer += mSize;
             bytesLeft -= mSize;
+            bytesProcessed += mSize;
         } else {
             // Just need a partial block so we have to use storage.
             result = mFixedBlockProcessor.onProcessFixedBlock(mStorage.get(), mSize);
             mPosition = 0;
         }
     }
-    return result;
+    return {result, bytesProcessed};
 }
 
diff --git a/media/libaaudio/src/utility/FixedBlockReader.h b/media/libaaudio/src/utility/FixedBlockReader.h
index dc82416..a8a037f 100644
--- a/media/libaaudio/src/utility/FixedBlockReader.h
+++ b/media/libaaudio/src/utility/FixedBlockReader.h
@@ -41,7 +41,7 @@
     /**
      * Read into a variable sized block.
      */
-    int32_t processVariableBlock(uint8_t *buffer, int32_t numBytes) override;
+    AdapterProcessResult processVariableBlock(uint8_t *buffer, int32_t numBytes) override;
 };
 
 
diff --git a/media/libaaudio/src/utility/FixedBlockWriter.cpp b/media/libaaudio/src/utility/FixedBlockWriter.cpp
index afb83c1..ff6ef8a 100644
--- a/media/libaaudio/src/utility/FixedBlockWriter.cpp
+++ b/media/libaaudio/src/utility/FixedBlockWriter.cpp
@@ -24,7 +24,7 @@
         : FixedBlockAdapter(fixedBlockProcessor) {}
 
 
-int32_t FixedBlockWriter::writeToStorage(uint8_t *buffer, int32_t numBytes) {
+int32_t FixedBlockWriter::writeToStorage(const uint8_t *buffer, int32_t numBytes) {
     int32_t bytesToStore = numBytes;
     int32_t roomAvailable = mSize - mPosition;
     if (bytesToStore > roomAvailable) {
@@ -35,15 +35,17 @@
     return bytesToStore;
 }
 
-int32_t FixedBlockWriter::processVariableBlock(uint8_t *buffer, int32_t numBytes) {
+AdapterProcessResult FixedBlockWriter::processVariableBlock(uint8_t *buffer, int32_t numBytes) {
     int32_t result = 0;
     int32_t bytesLeft = numBytes;
+    int32_t bytesProcessed = 0;
 
     // If we already have data in storage then add to it.
     if (mPosition > 0) {
         int32_t bytesWritten = writeToStorage(buffer, bytesLeft);
         buffer += bytesWritten;
         bytesLeft -= bytesWritten;
+        bytesProcessed += bytesWritten;
         // If storage full then flush it out
         if (mPosition == mSize) {
             result = mFixedBlockProcessor.onProcessFixedBlock(mStorage.get(), mSize);
@@ -54,14 +56,19 @@
     // Write through if enough for a complete block.
     while(bytesLeft > mSize && result == 0) {
         result = mFixedBlockProcessor.onProcessFixedBlock(buffer, mSize);
+        if (result != 0) {
+            break;
+        }
         buffer += mSize;
         bytesLeft -= mSize;
+        bytesProcessed += mSize;
     }
 
     // Save any remaining partial block for next time.
     if (bytesLeft > 0) {
         writeToStorage(buffer, bytesLeft);
+        bytesProcessed += bytesLeft;
     }
 
-    return result;
+    return {result, bytesProcessed};
 }
diff --git a/media/libaaudio/src/utility/FixedBlockWriter.h b/media/libaaudio/src/utility/FixedBlockWriter.h
index 3e89b5d..8010124 100644
--- a/media/libaaudio/src/utility/FixedBlockWriter.h
+++ b/media/libaaudio/src/utility/FixedBlockWriter.h
@@ -32,12 +32,12 @@
 
     virtual ~FixedBlockWriter() = default;
 
-    int32_t writeToStorage(uint8_t *buffer, int32_t numBytes);
+    int32_t writeToStorage(const uint8_t *buffer, int32_t numBytes);
 
     /**
      * Write from a variable sized block.
      */
-    int32_t processVariableBlock(uint8_t *buffer, int32_t numBytes) override;
+    AdapterProcessResult processVariableBlock(uint8_t *buffer, int32_t numBytes) override;
 };
 
 #endif /* AAUDIO_FIXED_BLOCK_WRITER_H */
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 6aa04a8..1ff75be 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -276,3 +276,19 @@
         ],
     },
 }
+
+cc_binary {
+    name: "test_pcm_offload",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_pcm_offload.cpp"],
+    header_libs: ["libaaudio_example_utils"],
+    shared_libs: ["libaaudio"],
+}
+
+cc_binary {
+    name: "test_compress_offload",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_compress_offload.cpp"],
+    header_libs: ["libaaudio_example_utils"],
+    shared_libs: ["libaaudio"],
+}
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index fcb083d..52c17cf 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -132,7 +132,7 @@
         readTagsSet.insert(readTags[i]);
     }
     EXPECT_EQ(addedTags, readTagsSet);
-    AAudioStream_releaseTags(aaudioStream, readTags);
+    AAudioStream_destroyTags(aaudioStream, readTags);
 
     aaudio_input_preset_t expectedPreset =
             (preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
diff --git a/media/libaaudio/tests/test_block_adapter.cpp b/media/libaaudio/tests/test_block_adapter.cpp
index a22abb9..1023b2d 100644
--- a/media/libaaudio/tests/test_block_adapter.cpp
+++ b/media/libaaudio/tests/test_block_adapter.cpp
@@ -77,7 +77,7 @@
     }
 
     // Simulate audio input from a variable sized callback.
-    int32_t testInputWrite(int32_t variableCount) {
+    AdapterProcessResult testInputWrite(int32_t variableCount) {
         fillSequence(mTestBuffer, variableCount);
         int32_t sizeBytes = variableCount * sizeof(int32_t);
         return mFixedBlockWriter.processVariableBlock((uint8_t *) mTestBuffer, sizeBytes);
@@ -105,13 +105,14 @@
     }
 
     // Simulate audio output from a variable sized callback.
-    int32_t testOutputRead(int32_t variableCount) {
+    AdapterProcessResult testOutputRead(int32_t variableCount) {
         int32_t sizeBytes = variableCount * sizeof(int32_t);
-        int32_t result = mFixedBlockReader.processVariableBlock((uint8_t *) mTestBuffer, sizeBytes);
+        auto [result, dataProcessedBytes] =
+                mFixedBlockReader.processVariableBlock((uint8_t *) mTestBuffer, sizeBytes);
         if (result >= 0) {
             result = checkSequence((int32_t *)mTestBuffer, variableCount);
         }
-        return result;
+        return {result, dataProcessedBytes};
     }
 
 private:
@@ -122,6 +123,7 @@
 TEST(test_block_adapter, block_adapter_write) {
     TestBlockWriter tester;
     int result = 0;
+    int bytesProcessed = 0;
     const int numLoops = 1000;
 
     for (int i = 0; i<numLoops && result == 0; i++) {
@@ -129,7 +131,8 @@
         int32_t size = (r % TEST_BUFFER_SIZE);
         ASSERT_LE(size, TEST_BUFFER_SIZE);
         ASSERT_GE(size, 0);
-        result = tester.testInputWrite(size);
+        std::tie(result, bytesProcessed) = tester.testInputWrite(size);
+        ASSERT_GE(bytesProcessed, 0);
     }
     ASSERT_EQ(0, result);
 }
@@ -137,6 +140,7 @@
 TEST(test_block_adapter, block_adapter_read) {
     TestBlockReader tester;
     int result = 0;
+    int bytesProcessed = 0;
     const int numLoops = 1000;
 
     for (int i = 0; i < numLoops && result == 0; i++) {
@@ -144,7 +148,8 @@
         int32_t size = (r % TEST_BUFFER_SIZE);
         ASSERT_LE(size, TEST_BUFFER_SIZE);
         ASSERT_GE(size, 0);
-        result = tester.testOutputRead(size);
+        std::tie(result, bytesProcessed) = tester.testOutputRead(size);
+        ASSERT_GE(bytesProcessed, 0);
     }
     ASSERT_EQ(0, result);
 };
diff --git a/media/libaaudio/tests/test_compress_offload.cpp b/media/libaaudio/tests/test_compress_offload.cpp
new file mode 100644
index 0000000..5670d1d
--- /dev/null
+++ b/media/libaaudio/tests/test_compress_offload.cpp
@@ -0,0 +1,279 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Compress offload
+
+#include <atomic>
+#include <condition_variable>
+#include <fstream>
+#include <memory>
+#include <mutex>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string>
+#include <thread>
+#include <vector>
+#include <utility>
+
+#include <aaudio/AAudio.h>
+#include <android-base/thread_annotations.h>
+
+#include "AAudioArgsParser.h"
+#include "AAudioSimplePlayer.h"
+#include "SineGenerator.h"
+
+const static int DEFAULT_TIME_TO_RUN_IN_SECOND = 60;
+
+aaudio_data_callback_result_t MyDatacallback(AAudioStream* stream,
+                                             void* userData,
+                                             void* audioData,
+                                             int32_t numFrames);
+
+void MyErrorCallback(AAudioStream* /*stream*/, void* /*userData*/, aaudio_result_t error);
+
+void MyPresentationEndCallback(AAudioStream* /*stream*/, void* userData);
+
+class FileDataProvider {
+public:
+    bool loadData(const std::string& filePath) {
+        mPosition = 0;
+        std::ifstream is(filePath, std::ios::in | std::ios::binary);
+        if (!is.good()) {
+            printf("Failed to open file %s\n", filePath.c_str());
+            return false;
+        }
+        is.seekg(0, is.end);
+        mData.reserve(mData.size() + is.tellg());
+        is.seekg(0, is.beg);
+        mData.insert(mData.end(), std::istreambuf_iterator<char>(is),
+                     std::istreambuf_iterator<char>());
+        if (is.fail()) {
+            printf("Failed to read from file %s\n", filePath.c_str());
+            return false;
+        }
+        return true;
+    }
+
+    std::pair<bool, int> copyData(void* audioData, int32_t numFrames) {
+        bool endOfFile = false;
+        int dataToCopy = std::min((int)mData.size() - mPosition, numFrames);
+        std::copy(mData.begin() + mPosition, mData.begin() + mPosition + dataToCopy,
+                  static_cast<uint8_t*>(audioData));
+        mPosition += dataToCopy;
+        if (mPosition >= mData.size()) {
+            endOfFile = true;
+            mPosition = 0;
+        }
+        return {endOfFile, dataToCopy};
+    }
+
+private:
+    std::vector<uint8_t> mData;
+    int mPosition;
+};
+
+class CompressOffloadPlayer : public AAudioSimplePlayer {
+public:
+    CompressOffloadPlayer(AAudioArgsParser& argParser, int delay, int padding,
+                          bool useDataCallback, const std::string& filePath)
+            : mArgParser(argParser), mDelay(delay), mPadding(padding),
+              mUseDataCallback(useDataCallback), mFilePath(filePath) {
+    }
+
+    aaudio_result_t open() {
+        if (!mDataProvider.loadData(mFilePath)) {
+            return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+        }
+        return AAudioSimplePlayer::open(
+                mArgParser,
+                mUseDataCallback ? &MyDatacallback : nullptr,
+                &MyErrorCallback,
+                this,
+                &MyPresentationEndCallback);
+    }
+
+    aaudio_data_callback_result_t renderAudio(AAudioStream* /*stream*/,
+                                              void* audioData,
+                                              int32_t numFrames) {
+        {
+            std::lock_guard lk(mWaitForExitingLock);
+            mReadyToExit = false;
+        }
+        auto [endOfFile, dataCopied] = mDataProvider.copyData(audioData, numFrames);
+        if (endOfFile) {
+            printf("%s(%d): endOfFile=%d, dataCopied=%d\n", __func__, numFrames, endOfFile,
+                   dataCopied);
+            setOffloadEndOfStream();
+            return AAUDIO_CALLBACK_RESULT_STOP;
+        }
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+
+    void presentationEnd() {
+        printf("Presentation end\n");
+        {
+            std::lock_guard lk(mWaitForExitingLock);
+            mReadyToExit = true;
+        }
+        mCV.notify_one();
+        setOffloadDelayPadding(mDelay, mPadding);
+        if (!mUseDataCallback) {
+            std::thread(&CompressOffloadPlayer::writeAllStreamData, this).detach();
+        }
+    }
+
+    void writeData() {
+        writeAllStreamData();
+    }
+
+    void waitForExiting() {
+        printf("%s\n", __func__);
+        std::unique_lock lk(mWaitForExitingLock);
+        mCV.wait(lk, [this]{ return mReadyToExit; });
+    }
+
+private:
+    void writeAllStreamData() {
+        int dataSize = mArgParser.getSampleRate();
+        uint8_t data[dataSize];
+        static constexpr int64_t kTimeOutNanos = 1e9;
+        while (true) {
+            auto [endOfFile, dataCopied] = mDataProvider.copyData(data, dataSize);
+            auto result = AAudioStream_write(getStream(), data, dataCopied, kTimeOutNanos);
+            if (result < AAUDIO_OK) {
+                printf("Failed to write data, error=%d\n", result);
+                break;
+            }
+            if (endOfFile) {
+                printf("All data from the file is written, set offload end of stream\n");
+                setOffloadEndOfStream();
+                break;
+            }
+        }
+    }
+
+    const AAudioArgsParser mArgParser;
+    const int mDelay;
+    const int mPadding;
+    const bool mUseDataCallback;
+    const std::string mFilePath;
+
+    FileDataProvider mDataProvider;
+    std::mutex mWaitForExitingLock;
+    std::condition_variable mCV;
+    bool mReadyToExit GUARDED_BY(mWaitForExitingLock);
+};
+
+aaudio_data_callback_result_t MyDatacallback(AAudioStream* stream,
+                                             void* userData,
+                                             void* audioData,
+                                             int32_t numFrames) {
+    CompressOffloadPlayer* player = static_cast<CompressOffloadPlayer*>(userData);
+    return player->renderAudio(stream, audioData, numFrames);
+}
+
+void MyErrorCallback(AAudioStream* /*stream*/, void* /*userData*/, aaudio_result_t error) {
+    printf("Error callback, error=%d\n", error);
+}
+
+void MyPresentationEndCallback(AAudioStream* /*stream*/, void* userData) {
+    CompressOffloadPlayer* player = static_cast<CompressOffloadPlayer*>(userData);
+    return player->presentationEnd();
+}
+
+static void usage() {
+    AAudioArgsParser::usage();
+    printf("      -D{delay} offload delay in frames\n");
+    printf("      -P{padding} offload padding in frames\n");
+    printf("      -T{seconds} time to run the test\n");
+    printf("      -F{filePath} file path for the compressed data\n");
+    printf("      -B use blocking write instead of data callback\n");
+}
+
+int main(int argc, char **argv) {
+    AAudioArgsParser argParser;
+    int delay = 0;
+    int padding = 0;
+    int timeToRun = DEFAULT_TIME_TO_RUN_IN_SECOND;
+    bool useDataCallback = true;
+    std::string filePath;
+    for (int i = 1; i < argc; ++i) {
+        const char *arg = argv[i];
+        if (argParser.parseArg(arg)) {
+            if (arg[0] == '-') {
+                char option = arg[1];
+                switch (option) {
+                    case 'D':
+                        delay = atoi(&arg[2]);
+                        break;
+                    case 'P':
+                        padding = atoi(&arg[2]);
+                        break;
+                    case 'T':
+                        timeToRun = atoi(&arg[2]);
+                        break;
+                    case 'B':
+                        useDataCallback = false;
+                        break;
+                    case 'F':
+                        filePath = &arg[2];
+                        break;
+                    default:
+                        usage();
+                        exit(EXIT_FAILURE);
+                }
+            } else {
+                usage();
+                exit(EXIT_FAILURE);
+            }
+        }
+    }
+
+    if (filePath.empty()) {
+        printf("A file path must be specified\n");
+        usage();
+        exit(EXIT_FAILURE);
+    }
+
+    // Force to use offload mode
+    argParser.setPerformanceMode(AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED);
+
+    CompressOffloadPlayer player(
+            argParser, delay, padding, useDataCallback, filePath);
+    if (auto result = player.open(); result != AAUDIO_OK) {
+        printf("Failed to open stream, error=%d\n", result);
+        exit(EXIT_FAILURE);
+    }
+
+    // Failed to set offload delay and padding will affect the gapless transition between tracks
+    // but doesn't affect playback.
+    (void) player.setOffloadDelayPadding(delay, padding);
+
+    if (auto result = player.start(); result != AAUDIO_OK) {
+        printf("Failed to start stream, error=%d", result);
+        exit(EXIT_FAILURE);
+    } else if (!useDataCallback) {
+        player.writeData();
+    }
+
+    sleep(timeToRun);
+
+    player.stop();
+
+    player.waitForExiting();
+
+    return EXIT_SUCCESS;
+}
diff --git a/media/libaaudio/tests/test_pcm_offload.cpp b/media/libaaudio/tests/test_pcm_offload.cpp
new file mode 100644
index 0000000..c01c77a
--- /dev/null
+++ b/media/libaaudio/tests/test_pcm_offload.cpp
@@ -0,0 +1,253 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// PCM offload
+
+#include <memory>
+#include <stdio.h>
+#include <stdlib.h>
+#include <vector>
+
+#include <aaudio/AAudio.h>
+
+#include "AAudioArgsParser.h"
+#include "AAudioSimplePlayer.h"
+#include "SineGenerator.h"
+
+const static int DEFAULT_TIME_TO_RUN_IN_SECOND = 5;
+
+aaudio_data_callback_result_t MyDatacallback(AAudioStream* stream,
+                                             void* userData,
+                                             void* audioData,
+                                             int32_t numFrames);
+
+void MyErrorCallback(AAudioStream* /*stream*/, void* /*userData*/, aaudio_result_t error);
+
+void MyPresentationEndCallback(AAudioStream* /*stream*/, void* userData);
+
+class OffloadPlayer : public AAudioSimplePlayer {
+public:
+    OffloadPlayer(AAudioArgsParser& argParser, int delay, int padding, int streamFrames,
+                  bool useDataCallback)
+            : mArgParser(argParser), mDelay(delay), mPadding(padding), mStreamFrames(streamFrames),
+              mUseDataCallback(useDataCallback) {
+    }
+
+    aaudio_result_t open() {
+        aaudio_result_t result = AAudioSimplePlayer::open(
+                mArgParser,
+                mUseDataCallback ? &MyDatacallback : nullptr,
+                &MyErrorCallback,
+                this,
+                &MyPresentationEndCallback);
+        if (result != AAUDIO_OK) {
+            return result;
+        }
+        mChannelCount = getChannelCount();
+        for (int i = 0; i < mChannelCount; ++i) {
+            SineGenerator sine;
+            sine.setup(440.0, 48000.0);
+            mSines.push_back(sine);
+        }
+        return result;
+    }
+
+    aaudio_data_callback_result_t renderAudio(AAudioStream* stream,
+                                              void* audioData,
+                                              int32_t numFrames) {
+        // Just handle PCM_16 and PCM_FLOAT for testing
+        if (!fillData(stream, audioData, numFrames)) {
+            return AAUDIO_CALLBACK_RESULT_STOP;
+        }
+        mFramesWritten += numFrames;
+        if (mStreamFrames > 0 && mFramesWritten >= mStreamFrames) {
+            if (auto result = setOffloadEndOfStream(); result != AAUDIO_OK) {
+                printf("Failed to set offload end of stream, stopping the stream now");
+                return AAUDIO_CALLBACK_RESULT_STOP;
+            }
+            (void) setOffloadDelayPadding(mDelay, mPadding);
+            mFramesWritten = 0;
+        }
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+
+    void presentationEnd(AAudioStream* stream) {
+        printf("Presentation end\n");
+        if (!mUseDataCallback) {
+            writeAllStreamData(stream);
+        }
+    }
+
+    void writeData() {
+        writeAllStreamData(getStream());
+    }
+
+private:
+    void writeAllStreamData(AAudioStream* stream) {
+        int bytesPerFrame = mChannelCount;
+        std::shared_ptr<uint8_t[]> data;
+        switch (AAudioStream_getFormat(stream)) {
+            case AAUDIO_FORMAT_PCM_I16: {
+                bytesPerFrame *= 2;
+            } break;
+            case AAUDIO_FORMAT_PCM_FLOAT: {
+                bytesPerFrame *= 4;
+            } break;
+            default:
+                printf("Unsupported format %d\n", AAudioStream_getFormat(stream));
+                return;
+        }
+        data = std::make_shared<uint8_t[]>(bytesPerFrame * mStreamFrames);
+        fillData(stream, static_cast<void*>(data.get()), mStreamFrames);
+        int bytesWritten = 0;
+        int framesLeft = mStreamFrames;
+        while (framesLeft > 0) {
+            auto framesWritten = AAudioStream_write(
+                    stream, static_cast<void *>(&data[bytesWritten]),
+                    framesLeft, NANOS_PER_SECOND);
+            if (framesWritten < 0) {
+                printf("Failed to write data %d\n", framesWritten);
+                return;
+            }
+            printf("Write data succeed, frames=%d\n", framesWritten);
+            framesLeft -= framesWritten;
+            bytesWritten += framesWritten * bytesPerFrame;
+        }
+        if (auto result = setOffloadEndOfStream(); result != AAUDIO_OK) {
+            printf("Failed to set offload end of stream, result=%d\n", result);
+        }
+    }
+
+    bool fillData(AAudioStream* stream, void* data, int numFrames) {
+        switch (AAudioStream_getFormat(stream)) {
+            case AAUDIO_FORMAT_PCM_I16: {
+                int16_t *audioBuffer = static_cast<int16_t *>(data);
+                for (int i = 0; i < mChannelCount; ++i) {
+                    mSines[i].render(&audioBuffer[i], mChannelCount, numFrames);
+                }
+            } break;
+            case AAUDIO_FORMAT_PCM_FLOAT: {
+                float *audioBuffer = static_cast<float *>(data);
+                for (int i = 0; i < mChannelCount; ++i) {
+                    mSines[i].render(&audioBuffer[i], mChannelCount, numFrames);
+                }
+            } break;
+            default:
+                return false;
+        }
+        return true;
+    }
+
+    const AAudioArgsParser mArgParser;
+    const int mDelay;
+    const int mPadding;
+    const int mStreamFrames;
+    const bool mUseDataCallback;
+
+    int mChannelCount;
+    std::vector<SineGenerator> mSines;
+    int mFramesWritten = 0;
+};
+
+aaudio_data_callback_result_t MyDatacallback(AAudioStream* stream,
+                                             void* userData,
+                                             void* audioData,
+                                             int32_t numFrames) {
+    OffloadPlayer* player = static_cast<OffloadPlayer*>(userData);
+    return player->renderAudio(stream, audioData, numFrames);
+}
+
+void MyErrorCallback(AAudioStream* /*stream*/, void* /*userData*/, aaudio_result_t error) {
+    printf("Error callback, error=%d\n", error);
+}
+
+void MyPresentationEndCallback(AAudioStream* stream, void* userData) {
+    OffloadPlayer* player = static_cast<OffloadPlayer*>(userData);
+    return player->presentationEnd(stream);
+}
+
+static void usage() {
+    AAudioArgsParser::usage();
+    printf("      -D{delay} offload delay in frames\n");
+    printf("      -P{padding} offload padding in frames\n");
+    printf("      -E{frames} frames to notify end of stream\n");
+    printf("      -T{seconds} time to run the test\n");
+    printf("      -B use blocking write instead of data callback\n");
+}
+
+int main(int argc, char **argv) {
+    AAudioArgsParser argParser;
+    int delay = 0;
+    int padding = 0;
+    int streamFrames = 0;
+    int timeToRun = DEFAULT_TIME_TO_RUN_IN_SECOND;
+    bool useDataCallback = true;
+    for (int i = 1; i < argc; ++i) {
+        const char *arg = argv[i];
+        if (argParser.parseArg(arg)) {
+            if (arg[0] == '-') {
+                char option = arg[1];
+                switch (option) {
+                    case 'D':
+                        delay = atoi(&arg[2]);
+                        break;
+                    case 'P':
+                        padding = atoi(&arg[2]);
+                        break;
+                    case 'E':
+                        streamFrames = atoi(&arg[2]);
+                        break;
+                    case 'T':
+                        timeToRun = atoi(&arg[2]);
+                        break;
+                    case 'B':
+                        useDataCallback = false;
+                        break;
+                    default:
+                        usage();
+                        exit(EXIT_FAILURE);
+                }
+            } else {
+                usage();
+                exit(EXIT_FAILURE);
+            }
+        }
+    }
+
+    // Force to use offload mode
+    argParser.setPerformanceMode(AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED);
+
+    OffloadPlayer player(argParser, delay, padding, streamFrames, useDataCallback);
+    if (auto result = player.open(); result != AAUDIO_OK) {
+        printf("Failed to open stream, error=%d\n", result);
+        exit(EXIT_FAILURE);
+    }
+
+    // Failed to set offload delay and padding will affect the gapless transition between tracks
+    // but doesn't affect playback.
+    (void) player.setOffloadDelayPadding(delay, padding);
+
+    if (auto result = player.start(); result != AAUDIO_OK) {
+        printf("Failed to start stream, error=%d", result);
+        exit(EXIT_FAILURE);
+    } else if (!useDataCallback) {
+        player.writeData();
+    }
+
+    sleep(timeToRun);
+
+    return EXIT_SUCCESS;
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 6dfb327..8b4e012 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -32,12 +32,14 @@
         "audiopolicy-aidl-cpp",
         "av-types-aidl-cpp",
         "spatializer-aidl-cpp",
+        "volumegroupcallback-aidl-cpp",
     ],
     export_static_lib_headers: [
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
         "av-types-aidl-cpp",
         "spatializer-aidl-cpp",
+        "volumegroupcallback-aidl-cpp",
     ],
     target: {
         darwin: {
@@ -151,6 +153,7 @@
         "libutils",
         "packagemanager_aidl-cpp",
         "spatializer-aidl-cpp",
+        "volumegroupcallback-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "audioflinger-aidl-cpp",
@@ -160,6 +163,7 @@
         "libmediametrics",
         "libmediautils",
         "spatializer-aidl-cpp",
+        "volumegroupcallback-aidl-cpp",
     ],
 
     include_dirs: [
@@ -336,6 +340,13 @@
         java: {
             sdk_version: "module_current",
         },
+        ndk: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
     },
 }
 
@@ -465,6 +476,7 @@
         "capture_state_listener-aidl",
         "framework-permission-aidl",
         "spatializer-aidl",
+        "volumegroupcallback-aidl",
     ],
 
     double_loadable: true,
@@ -532,3 +544,30 @@
         },
     },
 }
+
+aidl_interface {
+    name: "volumegroupcallback-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/INativeAudioVolumeGroupCallback.aidl",
+    ],
+    double_loadable: true,
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+        java: {
+            sdk_version: "module_current",
+        },
+    },
+}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 9a4b45d..af390c1 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -286,7 +286,7 @@
     if (binder != nullptr) {
         // Barrier to ensure runtime permission update propagates to audioflinger
         // Must be client-side
-        interface_cast<IAudioManager>(binder)->permissionUpdateBarrier();
+        interface_cast<IAudioManager>(binder)->getNativeInterface()->permissionUpdateBarrier();
     }
 
     mSelectedDeviceId = selectedDeviceId;
@@ -1246,6 +1246,21 @@
     return mInput;
 }
 
+status_t AudioRecord::setParameters(const String8& keyValuePairs) {
+    AutoMutex lock(mLock);
+    if (mInput == AUDIO_IO_HANDLE_NONE || mAudioRecord == nullptr) {
+        return NO_INIT;
+    }
+    return statusTFromBinderStatus(mAudioRecord->setParameters(keyValuePairs.c_str()));
+}
+
+String8 AudioRecord::getParameters(const String8& keys) {
+    AutoMutex lock(mLock);
+    return mInput != AUDIO_IO_HANDLE_NONE
+               ? AudioSystem::getParameters(mInput, keys)
+               : String8();
+}
+
 // -------------------------------------------------------------------------
 
 ssize_t AudioRecord::read(void* buffer, size_t userSize, bool blocking)
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 1430913..3ef9225 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -40,6 +40,7 @@
 #include <system/audio.h>
 #include <android/media/GetInputForAttrResponse.h>
 #include <android/media/AudioMixerAttributesInternal.h>
+#include <android/media/audio/common/AudioVolumeGroupChangeEvent.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -66,6 +67,7 @@
 using media::audio::common::AudioSource;
 using media::audio::common::AudioStreamType;
 using media::audio::common::AudioUsage;
+using media::audio::common::AudioVolumeGroupChangeEvent;
 using media::audio::common::Int;
 
 std::mutex AudioSystem::gMutex;
@@ -155,8 +157,8 @@
                 return;
             }
             if (!mService || mService->getDelegate() != service) {
-                ALOGW("%s: %s unmatched service death pointers, ignoring",
-                        __func__, getServiceName());
+                ALOGW("%s: %s unmatched service death pointers, previous %p, ignoring",
+                        __func__, getServiceName(), mService.get());
                 return;
             }
             mValid = false;
@@ -195,6 +197,7 @@
         }
         if (mValid) return mService;
         if (waitMs.count() < 0) waitMs = mWaitMs;
+        auto timepointLimit = std::chrono::steady_clock::now() + waitMs;
         ul.unlock();
 
         // mediautils::getService() installs a persistent new service notification.
@@ -205,6 +208,7 @@
         ul.lock();
         // return the IAudioFlinger interface which is adapted
         // from the media::IAudioFlingerService.
+        mCv.wait_until(ul, timepointLimit, isServiceValid_l);
         return mService;
     }
 
@@ -238,9 +242,12 @@
             ALOGW_IF(old != mService,
                     "%s: service changed during callback, continuing.", __func__);
         }
-        mService = af;
-        ul.unlock();
-        if (af) onNewServiceWithAdapter(af);
+        if (af) {
+            ul.unlock();
+            onNewServiceWithAdapter(af);
+        } else {
+            mService = nullptr;
+        }
         return OK;
     }
 
@@ -270,6 +277,11 @@
         bool reportNoError = false;
         {
             std::lock_guard l(mMutex);
+            if (mService == service ||
+                    (mService && service && mService->getDelegate() == service->getDelegate())) {
+                ALOGW("%s: %s  same service, ignoring", __func__, getServiceName());
+                return;
+            }
             ALOGW_IF(mValid, "%s: %s service already valid, continuing with initialization",
                     __func__, getServiceName());
             if (mClient == nullptr) {
@@ -281,6 +293,7 @@
             mService = service;
             client = mClient;
             mValid = true;
+            mCv.notify_all();
         }
         // TODO(b/375280520) consider registerClient() within mMutex lock.
         const int64_t token = IPCThreadState::self()->clearCallingIdentity();
@@ -295,7 +308,12 @@
         return sp<AudioFlingerClientAdapter>::make(af);
     }
 
+    static bool isServiceValid_l() REQUIRES(mMutex) {
+        return mValid;
+    }
+
     static inline constinit std::mutex mMutex;
+    static inline constinit std::condition_variable mCv;
     static inline constinit sp<AudioSystem::AudioFlingerClient> mClient GUARDED_BY(mMutex);
     static inline constinit sp<IAudioFlinger> mService GUARDED_BY(mMutex);
     static inline constinit std::chrono::milliseconds mWaitMs
@@ -1002,6 +1020,10 @@
         sp<AudioSystem::AudioPolicyServiceClient> client;
         {
             std::lock_guard l(mMutex);
+            if (aps == mService) {
+                ALOGW_IF("%s: %s same service, ignoring", __func__, getServiceName());
+                return;
+            }
             ALOGW_IF(mValid, "%s: %s service already valid, continuing with initialization",
                     __func__, getServiceName());
             if (mClient == nullptr) {
@@ -1010,6 +1032,7 @@
             client = mClient;
             mService = aps;
             mValid = true;
+            mCv.notify_all();
         }
         // TODO(b/375280520) consider registerClient() within mMutex lock.
         const int64_t token = IPCThreadState::self()->clearCallingIdentity();
@@ -1025,12 +1048,13 @@
         {
             std::lock_guard l(mMutex);
             if (!mValid) {
-                ALOGW("%s: %s service already invalidated, ignoring", __func__, getServiceName());
+                ALOGW("%s: %s service already invalidated, previous %p, ignoring",
+                        __func__, getServiceName(), mService.get());
                 return;
             }
             if (mService != service) {
-                ALOGW("%s: %s unmatched service death pointers, ignoring",
-                        __func__, getServiceName());
+                ALOGW("%s: %s unmatched service death pointers, previous %p, ignoring",
+                        __func__, getServiceName(), mService.get());
                 return;
             }
             mValid = false;
@@ -1069,6 +1093,7 @@
         }
         if (mValid) return mService;
         if (waitMs.count() < 0) waitMs = mWaitMs;
+        auto timepointLimit = std::chrono::steady_clock::now() + waitMs;
         ul.unlock();
 
         auto service = mediautils::getService<
@@ -1079,6 +1104,7 @@
         // (whereupon mService contained the actual local service pointer to use).
         // we should always return mService.
         ul.lock();
+        mCv.wait_until(ul, timepointLimit, isServiceValid_l);
         return mService;
     }
 
@@ -1108,9 +1134,12 @@
                 return OK;
             }
         }
-        mService = aps;
-        ul.unlock();
-        if (aps) onNewService(aps);
+        if (aps) {
+            ul.unlock();
+            onNewService(aps);
+        } else {
+            mService = nullptr;
+        }
         return OK;
     }
 
@@ -1124,7 +1153,12 @@
     }
 private:
 
+    static bool isServiceValid_l() REQUIRES(mMutex) {
+        return mValid;
+    }
+
     static inline constinit std::mutex mMutex;
+    static inline constinit std::condition_variable mCv;
     static inline constinit sp<AudioSystem::AudioPolicyServiceClient> mClient GUARDED_BY(mMutex);
     static inline constinit sp<IAudioPolicyService> mService GUARDED_BY(mMutex);
     static inline constinit bool mValid GUARDED_BY(mMutex) = false;
@@ -1161,7 +1195,7 @@
 
 status_t AudioSystem::setDeviceConnectionState(audio_policy_dev_state_t state,
                                                const android::media::audio::common::AudioPort& port,
-                                               audio_format_t encodedFormat) {
+                                               audio_format_t encodedFormat, bool deviceSwitch) {
     const sp<IAudioPolicyService> aps = get_audio_policy_service();
 
     if (aps == nullptr) return AudioPolicyServiceTraits::getError();
@@ -1172,7 +1206,8 @@
                             legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(state)),
                     port,
                     VALUE_OR_RETURN_STATUS(
-                            legacy2aidl_audio_format_t_AudioFormatDescription(encodedFormat))));
+                            legacy2aidl_audio_format_t_AudioFormatDescription(encodedFormat)),
+                    deviceSwitch));
 }
 
 audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
@@ -1948,7 +1983,8 @@
     return (ret < 0) ? INVALID_OPERATION : NO_ERROR;
 }
 
-status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
+status_t AudioSystem::addAudioVolumeGroupCallback(
+        const sp<media::INativeAudioVolumeGroupCallback>& callback) {
     const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) return AudioPolicyServiceTraits::getError();
     const auto apc = AudioSystem::getAudioPolicyClient();
@@ -1962,7 +1998,8 @@
     return (ret < 0) ? INVALID_OPERATION : NO_ERROR;
 }
 
-status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
+status_t AudioSystem::removeAudioVolumeGroupCallback(
+        const sp<media::INativeAudioVolumeGroupCallback>& callback) {
     const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) return AudioPolicyServiceTraits::getError();
     const auto apc = AudioSystem::getAudioPolicyClient();
@@ -2980,14 +3017,14 @@
 
 // ----------------------------------------------------------------------------
 int AudioSystem::AudioPolicyServiceClient::addAudioVolumeGroupCallback(
-        const sp<AudioVolumeGroupCallback>& callback) {
+        const sp<media::INativeAudioVolumeGroupCallback>& callback) {
     std::lock_guard _l(mMutex);
     return mAudioVolumeGroupCallbacks.insert(callback).second
             ? mAudioVolumeGroupCallbacks.size() : -1;
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioVolumeGroupCallback(
-        const sp<AudioVolumeGroupCallback>& callback) {
+        const sp<media::INativeAudioVolumeGroupCallback>& callback) {
     std::lock_guard _l(mMutex);
     return mAudioVolumeGroupCallbacks.erase(callback) > 0
             ? mAudioVolumeGroupCallbacks.size() : -1;
@@ -2995,13 +3032,12 @@
 
 Status AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(int32_t group,
                                                                         int32_t flags) {
-    volume_group_t groupLegacy = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_volume_group_t(group));
-    int flagsLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(flags));
-
+    AudioVolumeGroupChangeEvent aidlEvent;
+    aidlEvent.groupId = group;
+    aidlEvent.flags = flags;
     std::lock_guard _l(mMutex);
     for (const auto& callback : mAudioVolumeGroupCallbacks) {
-        callback->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
+        callback->onAudioVolumeGroupChanged(aidlEvent);
     }
     return Status::ok();
 }
@@ -3097,9 +3133,6 @@
     for (const auto& callback : mAudioPortCallbacks) {
         callback->onServiceDied();
     }
-    for (const auto& callback : mAudioVolumeGroupCallbacks) {
-        callback->onServiceDied();
-    }
 }
 
 ConversionResult<record_client_info_t>
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 2202539..0506645 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -271,46 +271,6 @@
         doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
-namespace {
-    class LegacyCallbackWrapper : public AudioTrack::IAudioTrackCallback {
-      const AudioTrack::legacy_callback_t mCallback;
-      void * const mData;
-      public:
-        LegacyCallbackWrapper(AudioTrack::legacy_callback_t callback, void* user)
-            : mCallback(callback), mData(user) {}
-        size_t onMoreData(const AudioTrack::Buffer & buffer) override {
-          AudioTrack::Buffer copy = buffer;
-          mCallback(AudioTrack::EVENT_MORE_DATA, mData, static_cast<void*>(&copy));
-          return copy.size();
-        }
-        void onUnderrun() override {
-            mCallback(AudioTrack::EVENT_UNDERRUN, mData, nullptr);
-        }
-        void onLoopEnd(int32_t loopsRemaining) override {
-            mCallback(AudioTrack::EVENT_LOOP_END, mData, &loopsRemaining);
-        }
-        void onMarker(uint32_t markerPosition) override {
-            mCallback(AudioTrack::EVENT_MARKER, mData, &markerPosition);
-        }
-        void onNewPos(uint32_t newPos) override {
-            mCallback(AudioTrack::EVENT_NEW_POS, mData, &newPos);
-        }
-        void onBufferEnd() override {
-            mCallback(AudioTrack::EVENT_BUFFER_END, mData, nullptr);
-        }
-        void onNewIAudioTrack() override {
-            mCallback(AudioTrack::EVENT_NEW_IAUDIOTRACK, mData, nullptr);
-        }
-        void onStreamEnd() override {
-            mCallback(AudioTrack::EVENT_STREAM_END, mData, nullptr);
-        }
-        size_t onCanWriteMoreData(const AudioTrack::Buffer & buffer) override {
-          AudioTrack::Buffer copy = buffer;
-          mCallback(AudioTrack::EVENT_CAN_WRITE_MORE_DATA, mData, static_cast<void*>(&copy));
-          return copy.size();
-        }
-    };
-}
 AudioTrack::AudioTrack(
         audio_stream_type_t streamType,
         uint32_t sampleRate,
@@ -602,6 +562,8 @@
         mOffloadInfoCopy.sample_rate = sampleRate;
         mOffloadInfoCopy.channel_mask = channelMask;
         mOffloadInfoCopy.stream_type = streamType;
+        mOffloadInfoCopy.usage = mAttributes.usage;
+        mOffloadInfoCopy.bit_width = audio_bytes_per_sample(format) * 8;
     }
 
     mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
@@ -716,58 +678,6 @@
     return logIfErrorAndReturnStatus(status, "");
 }
 
-
-status_t AudioTrack::set(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        uint32_t channelMask,
-        size_t frameCount,
-        audio_output_flags_t flags,
-        legacy_callback_t callback,
-        void* user,
-        int32_t notificationFrames,
-        const sp<IMemory>& sharedBuffer,
-        bool threadCanCallJava,
-        audio_session_t sessionId,
-        transfer_type transferType,
-        const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
-        const audio_attributes_t* pAttributes,
-        bool doNotReconnect,
-        float maxRequiredSpeed,
-        audio_port_handle_t selectedDeviceId)
-{
-    AttributionSourceState attributionSource;
-    auto attributionSourceUid = legacy2aidl_uid_t_int32_t(uid);
-    if (!attributionSourceUid.ok()) {
-        return logIfErrorAndReturnStatus(
-                BAD_VALUE,
-                StringPrintf("%s: received invalid attribution source uid, uid: %d, session id: %d",
-                             __func__, uid, sessionId));
-    }
-    attributionSource.uid = attributionSourceUid.value();
-    auto attributionSourcePid = legacy2aidl_pid_t_int32_t(pid);
-    if (!attributionSourcePid.ok()) {
-        return logIfErrorAndReturnStatus(
-                BAD_VALUE,
-                StringPrintf("%s: received invalid attribution source pid, pid: %d, sessionId: %d",
-                             __func__, pid, sessionId));
-    }
-    attributionSource.pid = attributionSourcePid.value();
-    attributionSource.token = sp<BBinder>::make();
-    if (callback) {
-        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
-    } else if (user) {
-        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
-    }
-    return set(streamType, sampleRate, format, static_cast<audio_channel_mask_t>(channelMask),
-               frameCount, flags, mLegacyCallbackWrapper, notificationFrames, sharedBuffer,
-               threadCanCallJava, sessionId, transferType, offloadInfo, attributionSource,
-               pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
-}
-
 // -------------------------------------------------------------------------
 
 status_t AudioTrack::start()
@@ -2909,7 +2819,8 @@
     // See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
     // causes a lot of churn on the service side, and it can reject starting
     // playback of a previously created track. May also apply to other cases.
-    const int INITIAL_RETRIES = 3;
+    const int INITIAL_RETRIES = 10;
+    const uint32_t RETRY_DELAY_US = 150000;
     int retries = INITIAL_RETRIES;
 retry:
     mFlags = mOrigFlags;
@@ -2986,7 +2897,7 @@
         ALOGW("%s(%d): failed status %d, retries %d", __func__, mPortId, result, retries);
         if (--retries > 0) {
             // leave time for an eventual race condition to clear before retrying
-            usleep(500000);
+            usleep(RETRY_DELAY_US);
             goto retry;
         }
         // if no retries left, set invalid bit to force restoring at next occasion
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index fab2d95..5906791 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -72,7 +72,8 @@
 
     void setDeviceConnectionState(in AudioPolicyDeviceState state,
                                   in android.media.audio.common.AudioPort port,
-                                  in AudioFormatDescription encodedFormat);
+                                  in AudioFormatDescription encodedFormat,
+                                  boolean deviceSwitch);
 
     AudioPolicyDeviceState getDeviceConnectionState(in AudioDevice device);
 
@@ -498,4 +499,9 @@
     // When adding a new method, please review and update
     // AudioPolicyService.cpp AudioPolicyService::onTransact()
     // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
+
+    /**
+     * Enable hardening independent of flag or exemption state
+     */
+     void setEnableHardening(boolean shouldEnable);
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
index 1ea4156..a375202 100644
--- a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -50,4 +50,7 @@
   void setPreferredMicrophoneFieldDimension(float zoom);
 
   void shareAudioHistory(@utf8InCpp String sharedAudioPackageName, long sharedAudioStartMs);
+
+  /** Send parameters to the audio hardware. */
+  void setParameters(@utf8InCpp String keyValuePairs);
 }
diff --git a/media/libaudioclient/aidl/android/media/INativeAudioVolumeGroupCallback.aidl b/media/libaudioclient/aidl/android/media/INativeAudioVolumeGroupCallback.aidl
new file mode 100644
index 0000000..43c6a65
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/INativeAudioVolumeGroupCallback.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioVolumeGroupChangeEvent;
+
+/**
+ * The INativeAudioVolumeGroupCallback interface is a callback associated to the
+ * setVolumeGroupVolumeIndex API. The callback is used by the AudioPolicyManager
+ * implementation in native audio server to communicate volume changes.
+ * {@hide}
+ */
+oneway interface INativeAudioVolumeGroupCallback {
+    /**
+     * Called when the index applied by the AudioPolicyManager changes
+     */
+    void onAudioVolumeGroupChanged(in AudioVolumeGroupChangeEvent volumeChangeEvent);
+}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 05db9e5..234bb7a 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -22,8 +22,8 @@
     name: "libaudioclient_aidl_fuzzer_defaults",
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
-        "libaudiopermission",
         "libaudiomockhal",
+        "libaudiopermission",
         "libfakeservicemanager",
         "libjsoncpp",
         "libmediametricsservice",
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index c7a04da..eec0a16 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -73,6 +73,8 @@
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    // TODO(b/183141167): need to rewrite 'dump' to avoid SIGPIPE.
+    signal(SIGPIPE, SIG_IGN);
     FuzzedDataProvider fdp(data, size);
 
     for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 80a756e..9c2a568 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -589,6 +589,12 @@
     /* Get the flags */
             audio_input_flags_t getFlags() const { AutoMutex _l(mLock); return mFlags; }
 
+    /* Set parameters */
+            status_t    setParameters(const String8& keyValuePairs);
+
+    /* Get parameters */
+            String8     getParameters(const String8& keys);
+
     /* Get active microphones. A empty vector of MicrophoneInfoFw will be passed as a parameter,
      * the data will be filled when querying the hal.
      */
@@ -692,7 +698,6 @@
     // for client callback handler
 
     wp<IAudioRecordCallback> mCallback;
-    sp<IAudioRecordCallback> mLegacyCallbackWrapper;
 
     bool                    mInitialized = false;   // Protect against double set
     // for notification APIs
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 1c171ab..16c3a7f 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -30,6 +30,7 @@
 #include <android/media/BnAudioFlingerClient.h>
 #include <android/media/BnAudioPolicyServiceClient.h>
 #include <android/media/EffectDescriptor.h>
+#include <android/media/INativeAudioVolumeGroupCallback.h>
 #include <android/media/INativeSpatializerCallback.h>
 #include <android/media/ISoundDose.h>
 #include <android/media/ISoundDoseCallback.h>
@@ -304,7 +305,8 @@
     static void onNewAudioModulesAvailable();
     static status_t setDeviceConnectionState(audio_policy_dev_state_t state,
                                              const android::media::audio::common::AudioPort& port,
-                                             audio_format_t encodedFormat);
+                                             audio_format_t encodedFormat,
+                                             bool deviceSwitch);
     static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                 const char *device_address);
     static status_t handleDeviceConfigChange(audio_devices_t device,
@@ -738,12 +740,12 @@
         virtual ~AudioVolumeGroupCallback() {}
 
         virtual void onAudioVolumeGroupChanged(volume_group_t group, int flags) = 0;
-        virtual void onServiceDied() = 0;
-
     };
 
-    static status_t addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
-    static status_t removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
+    static status_t addAudioVolumeGroupCallback(
+            const sp<media::INativeAudioVolumeGroupCallback>& callback);
+    static status_t removeAudioVolumeGroupCallback(
+            const sp<media::INativeAudioVolumeGroupCallback>& callback);
 
     class AudioPortCallback : public virtual RefBase
     {
@@ -880,10 +882,10 @@
         }
 
         int addAudioVolumeGroupCallback(
-                const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
+                const sp<media::INativeAudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
 
         int removeAudioVolumeGroupCallback(
-                const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
+                const sp<media::INativeAudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
 
         bool isAudioVolumeGroupCbEnabled() const EXCLUDES(mMutex) {
             std::lock_guard _l(mMutex);
@@ -913,7 +915,8 @@
     private:
         mutable std::mutex mMutex;
         std::set<sp<AudioPortCallback>> mAudioPortCallbacks GUARDED_BY(mMutex);
-        std::set<sp<AudioVolumeGroupCallback>> mAudioVolumeGroupCallbacks GUARDED_BY(mMutex);
+        std::set<sp<media::INativeAudioVolumeGroupCallback>> mAudioVolumeGroupCallbacks
+                GUARDED_BY(mMutex);
     };
 
     private:
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 330b5ee..a2f29b7 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -453,28 +453,6 @@
                         }
             void       onFirstRef() override;
         public:
-            typedef void (*legacy_callback_t)(int event, void* user, void* info);
-            // FIXME(b/169889714): Vendor code depends on the old method signature at link time
-            status_t    set(audio_stream_type_t streamType,
-                            uint32_t sampleRate,
-                            audio_format_t format,
-                            uint32_t channelMask,
-                            size_t frameCount   = 0,
-                            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                            legacy_callback_t cbf = nullptr,
-                            void* user          = nullptr,
-                            int32_t notificationFrames = 0,
-                            const sp<IMemory>& sharedBuffer = 0,
-                            bool threadCanCallJava = false,
-                            audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
-                            transfer_type transferType = TRANSFER_DEFAULT,
-                            const audio_offload_info_t *offloadInfo = nullptr,
-                            uid_t uid = AUDIO_UID_INVALID,
-                            pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = nullptr,
-                            bool doNotReconnect = false,
-                            float maxRequiredSpeed = 1.0f,
-                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Result of constructing the AudioTrack. This must be checked for successful initialization
      * before using any AudioTrack API (except for set()), because using
@@ -1348,7 +1326,6 @@
 
     // for client callback handler
     wp<IAudioTrackCallback> mCallback;                   // callback handler for events, or NULL
-    sp<IAudioTrackCallback> mLegacyCallbackWrapper;      // wrapper for legacy callback interface
     // for notification APIs
     std::unique_ptr<SetParams> mSetParams;          // Temporary copy of ctor params to allow for
                                                     // deferred set after first reference.
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 3941280..0494028 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -136,7 +136,10 @@
     cflags: [
         "-Wthread-safety",
     ],
-    data: ["bbb*.raw"],
+    data: [
+        "bbb*.raw",
+        "sine960hz_48000_3s.ape",
+    ],
     srcs: [
         "audio_test_utils.cpp",
         "test_execution_tracer.cpp",
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 2cb5f09..61dfd40 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -188,6 +188,11 @@
                                        AudioDeviceDescription::CONNECTION_USB());
 }
 
+AudioDeviceDescription make_ADD_MultichannelGroup() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_MULTICHANNEL_GROUP,
+                                       AudioDeviceDescription::CONNECTION_VIRTUAL());
+}
+
 AudioDevice make_AudioDevice(const AudioDeviceDescription& type,
                              const AudioDeviceAddress& address) {
     AudioDevice result;
@@ -481,7 +486,9 @@
                                          std::vector<uint8_t>{192, 168, 0, 1})),
                 make_AudioDevice(make_ADD_UsbHeadset(),
                                  AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
-                                         std::vector<int32_t>{1, 2}))));
+                                         std::vector<int32_t>{1, 2})),
+                make_AudioDevice(make_ADD_MultichannelGroup(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("id"))));
 
 TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
     const std::vector<uint8_t> sAnonymizedAidlAddress {0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
diff --git a/media/libaudioclient/tests/audio_test_template.xml b/media/libaudioclient/tests/audio_test_template.xml
index ed0cb21..6faa7dc 100644
--- a/media/libaudioclient/tests/audio_test_template.xml
+++ b/media/libaudioclient/tests/audio_test_template.xml
@@ -23,6 +23,7 @@
         <!-- Files used for audio testing -->
         <option name="push-file" key="bbb_1ch_8kHz_s16le.raw" value="/data/local/tmp/bbb_1ch_8kHz_s16le.raw" />
         <option name="push-file" key="bbb_2ch_24kHz_s16le.raw" value="/data/local/tmp/bbb_2ch_24kHz_s16le.raw" />
+        <option name="push-file" key="sine960hz_48000_3s.ape" value="/data/local/tmp/sine960hz_48000_3s.ape" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 7d13939..b186aaa 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -14,18 +14,26 @@
  * limitations under the License.
  */
 
+#include <thread>
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AudioTestUtils"
 
 #include <android-base/file.h>
+#include <android/content/pm/IPackageManagerNative.h>
+#include <binder/IServiceManager.h>
 #include <system/audio_config.h>
 #include <utils/Log.h>
+#include <utils/SystemClock.h>
 
 #include "audio_test_utils.h"
 
 #define WAIT_PERIOD_MS 10  // from AudioTrack.cpp
 #define MAX_WAIT_TIME_MS 5000
 
+static constexpr auto kShortCallbackTimeout = std::chrono::milliseconds(500);
+static constexpr auto kLongCallbackTimeout = std::chrono::seconds(10);
+
 void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
                                                       const DeviceIdVector& deviceIds) {
     ALOGI("%s: audioIo=%d deviceIds=%s", __func__, audioIo, toString(deviceIds).c_str());
@@ -39,7 +47,7 @@
 
 status_t OnAudioDeviceUpdateNotifier::waitForAudioDeviceCb(audio_port_handle_t expDeviceId) {
     std::unique_lock lock(mMutex);
-    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    base::ScopedLockAssertion lock_assertion(mMutex);
     if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
         (expDeviceId != AUDIO_PORT_HANDLE_NONE &&
          std::find(mDeviceIds.begin(), mDeviceIds.end(), expDeviceId) == mDeviceIds.end())) {
@@ -71,14 +79,7 @@
       mSessionId(sessionId),
       mTransferType(transferType),
       mAttributes(attributes),
-      mOffloadInfo(info) {
-    mStopPlaying = false;
-    mBytesUsedSoFar = 0;
-    mState = PLAY_NO_INIT;
-    mMemCapacity = 0;
-    mMemoryDealer = nullptr;
-    mMemory = nullptr;
-}
+      mOffloadInfo(info) {}
 
 AudioPlayback::~AudioPlayback() {
     stop();
@@ -93,15 +94,16 @@
     attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
     attributionSource.token = sp<BBinder>::make();
     if (mTransferType == AudioTrack::TRANSFER_OBTAIN) {
-        mTrack = new AudioTrack(attributionSource);
+        mTrack = sp<TestAudioTrack>::make(attributionSource);
         mTrack->set(AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, 0 /* frameCount */,
-                    mFlags, nullptr /* callback */, 0 /* notificationFrames */,
-                    nullptr /* sharedBuffer */, false /*canCallJava */, mSessionId, mTransferType,
-                    mOffloadInfo, attributionSource, mAttributes);
+                    mFlags, wp<AudioTrack::IAudioTrackCallback>::fromExisting(this),
+                    0 /* notificationFrames */, nullptr /* sharedBuffer */, false /*canCallJava */,
+                    mSessionId, mTransferType, mOffloadInfo, attributionSource, mAttributes);
     } else if (mTransferType == AudioTrack::TRANSFER_SHARED) {
-        mTrack = new AudioTrack(AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, mMemory,
-                                mFlags, wp<AudioTrack::IAudioTrackCallback>::fromExisting(this), 0,
-                                mSessionId, mTransferType, nullptr, attributionSource, mAttributes);
+        mTrack = sp<TestAudioTrack>::make(
+                AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, mMemory, mFlags,
+                wp<AudioTrack::IAudioTrackCallback>::fromExisting(this), 0, mSessionId,
+                mTransferType, nullptr, attributionSource, mAttributes);
     } else {
         ALOGE("Test application is not handling transfer type %s",
               AudioTrack::convertTransferToText(mTransferType));
@@ -154,6 +156,8 @@
         if (OK == status) {
             mState = PLAY_STARTED;
             LOG_FATAL_IF(false != mTrack->stopped());
+            std::lock_guard l(mMutex);
+            mStreamEndReceived = false;
         }
     }
     return status;
@@ -164,6 +168,15 @@
     mStopPlaying = true;
 }
 
+void AudioPlayback::onStreamEnd() {
+    ALOGD("%s", __func__);
+    {
+        std::lock_guard lock(mMutex);
+        mStreamEndReceived = true;
+    }
+    mCondition.notify_all();
+}
+
 status_t AudioPlayback::fillBuffer() {
     if (PLAY_STARTED != mState) return INVALID_OPERATION;
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
@@ -190,6 +203,7 @@
             counter++;
         }
     }
+    mBytesUsedSoFar = 0;
     return OK;
 }
 
@@ -247,6 +261,14 @@
         return INVALID_OPERATION;
 }
 
+void AudioPlayback::pause() {
+    mTrack->pause();
+}
+
+void AudioPlayback::resume() {
+    mTrack->start();
+}
+
 void AudioPlayback::stop() {
     {
         std::lock_guard lock(mMutex);
@@ -255,7 +277,7 @@
     if (mState != PLAY_STOPPED && mState != PLAY_NO_INIT) {
         int32_t msec = 0;
         (void)mTrack->pendingDuration(&msec);
-        mTrack->stopAndJoinCallbacks();
+        mTrack->stop();  // Do not join the callback thread, drain may be ongoing.
         LOG_FATAL_IF(true != mTrack->stopped());
         mState = PLAY_STOPPED;
         if (msec > 0) {
@@ -265,6 +287,23 @@
     }
 }
 
+bool AudioPlayback::waitForStreamEnd() {
+    ALOGD("%s", __func__);
+    const int64_t endMs = uptimeMillis() + std::chrono::milliseconds(kLongCallbackTimeout).count();
+    while (uptimeMillis() < endMs) {
+        // Wake up the AudioPlaybackThread to get notifications.
+        mTrack->wakeCallbackThread();
+        std::unique_lock lock(mMutex);
+        base::ScopedLockAssertion lock_assertion(mMutex);
+        mCondition.wait_for(lock, kShortCallbackTimeout, [this]() {
+            base::ScopedLockAssertion lock_assertion(mMutex);
+            return mStreamEndReceived;
+        });
+        if (mStreamEndReceived) return true;
+    }
+    return false;
+}
+
 // hold pcm data sent by AudioRecord
 RawBuffer::RawBuffer(int64_t ptsPipeline, int64_t ptsManual, int32_t capacity)
     : mData(capacity > 0 ? new uint8_t[capacity] : nullptr),
@@ -566,7 +605,7 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     std::unique_lock lock(mMutex);
-    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    base::ScopedLockAssertion lock_assertion(mMutex);
     while (mBuffersReceived.empty() && !mStopRecording && counter < maxTries) {
         mCondition.wait_for(lock, std::chrono::milliseconds(WAIT_PERIOD_MS));
         counter++;
@@ -626,9 +665,9 @@
 
 uint32_t AudioCapture::waitAndGetReceivedCbMarkerAtPosition() const {
     std::unique_lock lock(mMutex);
-    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    base::ScopedLockAssertion lock_assertion(mMutex);
     mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
-        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        base::ScopedLockAssertion lock_assertion(mMutex);
         return mReceivedCbMarkerAtPosition.has_value();
     });
     return mReceivedCbMarkerAtPosition.value_or(~0);
@@ -636,14 +675,36 @@
 
 uint32_t AudioCapture::waitAndGetReceivedCbMarkerCount() const {
     std::unique_lock lock(mMutex);
-    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    base::ScopedLockAssertion lock_assertion(mMutex);
     mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
-        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        base::ScopedLockAssertion lock_assertion(mMutex);
         return mReceivedCbMarkerCount.has_value();
     });
     return mReceivedCbMarkerCount.value_or(0);
 }
 
+status_t isAutomotivePlatform(bool* isAutomotive) {
+    const sp<IServiceManager> sm = defaultServiceManager();
+    if (sm == nullptr) {
+        ALOGE("%s: failed to retrieve defaultServiceManager", __func__);
+        return NO_INIT;
+    }
+    sp<IBinder> binder = sm->checkService(String16{"package_native"});
+    if (binder == nullptr) {
+        ALOGE("%s: failed to retrieve native package manager", __func__);
+        return NO_INIT;
+    }
+    *isAutomotive = false;
+    const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+    if (pm != nullptr) {
+        const auto status =
+                pm->hasSystemFeature(String16("android.hardware.type.automotive"), 0, isAutomotive);
+        return status.isOk() ? OK : status.transactionError();
+    }
+    ALOGE("%s: failed to cast to IPackageManagerNative", __func__);
+    return NO_INIT;
+}
+
 status_t listAudioPorts(std::vector<audio_port_v7>& portsVec) {
     int attempts = 5;
     status_t status;
@@ -673,34 +734,43 @@
     return status;
 }
 
-status_t getPortById(const audio_port_handle_t portId, audio_port_v7& port) {
+namespace {
+
+using PortPredicate = std::function<bool(const struct audio_port_v7& port)>;
+status_t getPort(PortPredicate pred, audio_port_v7& port) {
     std::vector<struct audio_port_v7> ports;
     status_t status = listAudioPorts(ports);
     if (status != OK) return status;
-    for (auto i = 0; i < ports.size(); i++) {
-        if (ports[i].id == portId) {
-            port = ports[i];
+    for (const auto& p : ports) {
+        if (pred(p)) {
+            port = p;
             return OK;
         }
     }
     return BAD_VALUE;
 }
 
+}  // namespace
+
+status_t getAnyPort(audio_port_role_t role, audio_port_type_t type, audio_port_v7& port) {
+    return getPort([&](const struct audio_port_v7& p) { return p.role == role && p.type == type; },
+                   port);
+}
+
+status_t getPortById(const audio_port_handle_t portId, audio_port_v7& port) {
+    return getPort([&](const struct audio_port_v7& p) { return p.id == portId; }, port);
+}
+
 status_t getPortByAttributes(audio_port_role_t role, audio_port_type_t type,
                              audio_devices_t deviceType, const std::string& address,
                              audio_port_v7& port) {
-    std::vector<struct audio_port_v7> ports;
-    status_t status = listAudioPorts(ports);
-    if (status != OK) return status;
-    for (auto i = 0; i < ports.size(); i++) {
-        if (ports[i].role == role && ports[i].type == type &&
-            ports[i].ext.device.type == deviceType &&
-            !strncmp(ports[i].ext.device.address, address.c_str(), AUDIO_DEVICE_MAX_ADDRESS_LEN)) {
-            port = ports[i];
-            return OK;
-        }
-    }
-    return BAD_VALUE;
+    return getPort(
+            [&](const struct audio_port_v7& p) {
+                return p.role == role && p.type == type && p.ext.device.type == deviceType &&
+                       !strncmp(p.ext.device.address, address.c_str(),
+                                AUDIO_DEVICE_MAX_ADDRESS_LEN);
+            },
+            port);
 }
 
 status_t listAudioPatches(std::vector<struct audio_patch>& patchesVec) {
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 9ccc7da..0ff3e47 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -22,7 +22,6 @@
 #include <deque>
 #include <memory>
 #include <mutex>
-#include <thread>
 #include <utility>
 
 #include <android-base/thread_annotations.h>
@@ -45,8 +44,10 @@
     std::string sink;
 };
 
+status_t isAutomotivePlatform(bool* isAutomotive);
 status_t listAudioPorts(std::vector<audio_port_v7>& portsVec);
 status_t listAudioPatches(std::vector<struct audio_patch>& patchesVec);
+status_t getAnyPort(audio_port_role_t role, audio_port_type_t type, audio_port_v7& port);
 status_t getPortByAttributes(audio_port_role_t role, audio_port_type_t type,
                              audio_devices_t deviceType, const std::string& address,
                              audio_port_v7& port);
@@ -73,6 +74,39 @@
     std::condition_variable mCondition;
 };
 
+namespace {
+
+class TestAudioTrack : public AudioTrack {
+  public:
+    explicit TestAudioTrack(const AttributionSourceState& attributionSourceState = {})
+        : AudioTrack(attributionSourceState) {}
+    TestAudioTrack(audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format,
+                   audio_channel_mask_t channelMask, const sp<IMemory>& sharedBuffer,
+                   audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                   const wp<IAudioTrackCallback>& callback = nullptr,
+                   int32_t notificationFrames = 0,
+                   audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
+                   transfer_type transferType = TRANSFER_DEFAULT,
+                   const audio_offload_info_t* offloadInfo = nullptr,
+                   const AttributionSourceState& attributionSource = AttributionSourceState(),
+                   const audio_attributes_t* pAttributes = nullptr, bool doNotReconnect = false,
+                   float maxRequiredSpeed = 1.0f)
+        : AudioTrack(streamType, sampleRate, format, channelMask, sharedBuffer, flags, callback,
+                     notificationFrames, sessionId, transferType, offloadInfo, attributionSource,
+                     pAttributes, doNotReconnect, maxRequiredSpeed) {}
+    // The callback thread is normally used for TRANSFER_SYNC_NOTIF_CALLBACK
+    // in order to deliver "more data" callback. However, for offload we are
+    // interested in the "stream end" event which is also served via the same
+    // callback interface.
+    void wakeCallbackThread() {
+        if (sp<AudioTrackThread> t = mAudioTrackThread; t != nullptr) {
+            t->wake();
+        }
+    }
+};
+
+}  // namespace
+
 // Simple AudioPlayback class.
 class AudioPlayback : public AudioTrack::IAudioTrackCallback {
     friend sp<AudioPlayback>;
@@ -90,11 +124,16 @@
     status_t waitForConsumption(bool testSeek = false) EXCLUDES(mMutex);
     status_t fillBuffer();
     status_t onProcess(bool testSeek = false);
-    void onBufferEnd() override EXCLUDES(mMutex);
+    void pause();
+    void resume();
     void stop() EXCLUDES(mMutex);
+    bool waitForStreamEnd();
 
-    bool mStopPlaying GUARDED_BY(mMutex);
-    mutable std::mutex mMutex;
+    // IAudioTrackCallback
+    void onBufferEnd() override EXCLUDES(mMutex);
+    void onStreamEnd() override EXCLUDES(mMutex);
+
+    bool mStopPlaying GUARDED_BY(mMutex) = false;
 
     enum State {
         PLAY_NO_INIT,
@@ -114,13 +153,15 @@
     const audio_attributes_t* mAttributes;
     const audio_offload_info_t* mOffloadInfo;
 
-    size_t mBytesUsedSoFar;
-    State mState;
-    size_t mMemCapacity;
+    size_t mBytesUsedSoFar = 0;
+    State mState = PLAY_NO_INIT;
+    size_t mMemCapacity = 0;
     sp<MemoryDealer> mMemoryDealer;
     sp<IMemory> mMemory;
-
-    sp<AudioTrack> mTrack;
+    sp<TestAudioTrack> mTrack;
+    mutable std::mutex mMutex;
+    bool mStreamEndReceived GUARDED_BY(mMutex) = false;
+    std::condition_variable mCondition;
 };
 
 // hold pcm data sent by AudioRecord
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index 3df5fd8..a412b9b 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -17,6 +17,7 @@
 #include <fstream>
 #include <iostream>
 #include <string>
+#include <thread>
 #include <tuple>
 #include <vector>
 
@@ -217,13 +218,13 @@
     std::ifstream fin(argsR.mDumpFileName, std::ios::in | std::ios::binary);
     fin.read((char*)output.data(), totalFrameCount * sizeof(output[0]));
     fin.close();
-    PFFFT_Setup* handle = pffft_new_setup(nPointFft, PFFFT_REAL);
+    pffft::detail::PFFFT_Setup* handle = pffft_new_setup(nPointFft, pffft::detail::PFFFT_REAL);
     // ignore first few samples. This is to not analyse until audio track is re-routed to remote
     // submix source, also for the effect filter response to reach steady-state (priming / pruning
     // samples).
     int rerouteOffset = kPrimeDurationInSec * kSamplingFrequency;
     pffft_transform_ordered(handle, output.data() + rerouteOffset, fftOutput.data(), nullptr,
-                            PFFFT_FORWARD);
+                            pffft::detail::PFFFT_FORWARD);
     pffft_destroy_setup(handle);
     for (auto i = 0; i < binOffsets.size(); i++) {
         auto k = binOffsets[i];
@@ -296,8 +297,9 @@
     generateMultiTone(centerFrequencies, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
                       input.data(), totalFrameCount);
     auto fftInput = pffft::AlignedVector<float>(kNPointFFT);
-    PFFFT_Setup* handle = pffft_new_setup(kNPointFFT, PFFFT_REAL);
-    pffft_transform_ordered(handle, input.data(), fftInput.data(), nullptr, PFFFT_FORWARD);
+    pffft::detail::PFFFT_Setup* handle = pffft_new_setup(kNPointFFT, pffft::detail::PFFFT_REAL);
+    pffft_transform_ordered(handle, input.data(), fftInput.data(), nullptr,
+                            pffft::detail::PFFFT_FORWARD);
     pffft_destroy_setup(handle);
     float inputMag[numBands];
     for (auto i = 0; i < numBands; i++) {
@@ -400,8 +402,9 @@
     generateMultiTone(testFrequencies, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
                       input.data(), totalFrameCount);
     auto fftInput = pffft::AlignedVector<float>(kNPointFFT);
-    PFFFT_Setup* handle = pffft_new_setup(kNPointFFT, PFFFT_REAL);
-    pffft_transform_ordered(handle, input.data(), fftInput.data(), nullptr, PFFFT_FORWARD);
+    pffft::detail::PFFFT_Setup* handle = pffft_new_setup(kNPointFFT, pffft::detail::PFFFT_REAL);
+    pffft_transform_ordered(handle, input.data(), fftInput.data(), nullptr,
+                            pffft::detail::PFFFT_FORWARD);
     pffft_destroy_setup(handle);
     float inputMag[testFrequencies.size()];
     for (auto i = 0; i < testFrequencies.size(); i++) {
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 7957c10..a55de4d 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -98,6 +98,11 @@
 };
 
 TEST_P(AudioTrackTest, DefaultRoutingTest) {
+    bool isAutomotive;
+    ASSERT_EQ(OK, isAutomotivePlatform(&isAutomotive));
+    if (isAutomotive) {
+        GTEST_SKIP() << "auto uses its own policy for routing";
+    }
     audio_port_v7 port;
     if (OK != getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
                                   AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port)) {
@@ -154,6 +159,11 @@
 class AudioRoutingTest : public ::testing::Test {
   public:
     void SetUp() override {
+        bool isAutomotive;
+        ASSERT_EQ(OK, isAutomotivePlatform(&isAutomotive));
+        if (isAutomotive) {
+            GTEST_SKIP() << "auto uses its own policy for routing";
+        }
         audio_port_v7 port;
         if (OK != getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
                                       AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port)) {
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 31cab78..bc3bb8d 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -412,6 +412,9 @@
                 outputDevices.push_back(outputDevice);
             }
         }
+        if (outputDevices.empty()) {
+            GTEST_SKIP() << "No speaker device found";
+        }
         EXPECT_EQ(OK, AudioSystem::setDevicesRoleForStrategy(mediaStrategy.getId(),
                                                              DEVICE_ROLE_PREFERRED, outputDevices));
         EXPECT_EQ(OK, AudioSystem::getDevicesForRoleAndStrategy(mediaStrategy.getId(),
@@ -425,8 +428,13 @@
 }
 
 TEST_F(AudioSystemTest, VolumeIndexForAttributes) {
+    std::optional<audio_port_v7> speakerPort = audio_port_v7{};
+    if (getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, AUDIO_DEVICE_OUT_SPEAKER,
+                            "", *speakerPort) != OK) {
+        speakerPort.reset();
+    }
     AudioVolumeGroupVector groups;
-    EXPECT_EQ(OK, AudioSystem::listAudioVolumeGroups(groups));
+    ASSERT_EQ(OK, AudioSystem::listAudioVolumeGroups(groups));
     for (const auto& group : groups) {
         if (group.getAudioAttributes().empty()) continue;
         const audio_attributes_t attr = group.getAudioAttributes()[0];
@@ -438,14 +446,15 @@
         EXPECT_EQ(OK, AudioSystem::getVolumeGroupFromAudioAttributes(attr, vg));
         EXPECT_EQ(group.getId(), vg);
 
-        int index;
-        EXPECT_EQ(OK,
-                  AudioSystem::getVolumeIndexForAttributes(attr, index, AUDIO_DEVICE_OUT_SPEAKER));
-
-        int indexTest;
-        EXPECT_EQ(OK, AudioSystem::getStreamVolumeIndex(streamType, &indexTest,
-                                                        AUDIO_DEVICE_OUT_SPEAKER));
-        EXPECT_EQ(index, indexTest);
+        if (speakerPort.has_value()) {
+            int index;
+            EXPECT_EQ(OK, AudioSystem::getVolumeIndexForAttributes(attr, index,
+                                                                   speakerPort->ext.device.type));
+            int indexTest;
+            EXPECT_EQ(OK, AudioSystem::getStreamVolumeIndex(streamType, &indexTest,
+                                                            speakerPort->ext.device.type));
+            EXPECT_EQ(index, indexTest);
+        }
     }
 }
 
@@ -562,11 +571,14 @@
     AudioDeviceTypeAddrVector inputDevices = {inputDevice};
     EXPECT_EQ(BAD_VALUE, AudioSystem::setUidDeviceAffinities(uid, inputDevices));
 
-    // Test valid device for example audio_is_output_device
-    AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
-    AudioDeviceTypeAddrVector outputDevices = {outputDevice};
-    EXPECT_EQ(NO_ERROR, AudioSystem::setUidDeviceAffinities(uid, outputDevices));
-    EXPECT_EQ(NO_ERROR, AudioSystem::removeUidDeviceAffinities(uid));
+    audio_port_v7 port;
+    if (OK == getAnyPort(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, port)) {
+        // Test valid device for example audio_is_output_device
+        AudioDeviceTypeAddr outputDevice(port.ext.device.type, port.ext.device.address);
+        AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+        EXPECT_EQ(NO_ERROR, AudioSystem::setUidDeviceAffinities(uid, outputDevices));
+        EXPECT_EQ(NO_ERROR, AudioSystem::removeUidDeviceAffinities(uid));
+    }
 }
 
 TEST_F(AudioSystemTest, UserIdDeviceAffinities) {
@@ -577,11 +589,14 @@
     AudioDeviceTypeAddrVector inputDevices = {inputDevice};
     EXPECT_EQ(BAD_VALUE, AudioSystem::setUserIdDeviceAffinities(userId, inputDevices));
 
-    // Test valid device for ezample audio_is_output_device
-    AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
-    AudioDeviceTypeAddrVector outputDevices = {outputDevice};
-    EXPECT_EQ(NO_ERROR, AudioSystem::setUserIdDeviceAffinities(userId, outputDevices));
-    EXPECT_EQ(NO_ERROR, AudioSystem::removeUserIdDeviceAffinities(userId));
+    audio_port_v7 port;
+    if (OK == getAnyPort(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, port)) {
+        // Test valid device for example audio_is_output_device
+        AudioDeviceTypeAddr outputDevice(port.ext.device.type, port.ext.device.address);
+        AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+        EXPECT_EQ(NO_ERROR, AudioSystem::setUserIdDeviceAffinities(userId, outputDevices));
+        EXPECT_EQ(NO_ERROR, AudioSystem::removeUserIdDeviceAffinities(userId));
+    }
 }
 
 namespace {
@@ -683,13 +698,13 @@
         // !!! Instead of the default format, use each format from 'ext.encodedFormats'
         // !!! if they are not empty
         status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                                                       aidlPort, AUDIO_FORMAT_DEFAULT);
+                                                       aidlPort, AUDIO_FORMAT_DEFAULT, false);
         EXPECT_EQ(OK, status);
         if (status != OK) continue;
         deviceState = AudioSystem::getDeviceConnectionState(type, address);
         EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE, deviceState);
         status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                       aidlPort, AUDIO_FORMAT_DEFAULT);
+                                                       aidlPort, AUDIO_FORMAT_DEFAULT, false);
         EXPECT_EQ(OK, status);
         deviceState = AudioSystem::getDeviceConnectionState(type, address);
         EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index d283c6c..bf60752 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -14,9 +14,12 @@
  * limitations under the License.
  */
 
+#include <thread>
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AudioTrackTests"
 
+#include <android-base/logging.h>
 #include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 
@@ -178,6 +181,108 @@
     ap->stop();
 }
 
+class AudioTrackOffloadTest : public ::testing::Test {
+  protected:
+    void TearDown() override {
+        if (!IsSkipped()) {
+            // Let the offload AF stream to exit to avoid interfering with other tests.
+            std::this_thread::sleep_for(std::chrono::milliseconds(750));
+        }
+    }
+    bool halSupportsClipTransition() const;
+    void testPlayback(bool testDrainPause, sp<AudioPlayback>* outPlayback = nullptr);
+};
+
+bool AudioTrackOffloadTest::halSupportsClipTransition() const {
+    // TODO: Check for the HAL type (HIDL/AIDL) and version. HIDL and AIDL V4 should also
+    //       support this.
+    AudioParameter param;
+    param.addKey(String8(AudioParameter::keyClipTransitionSupport));
+    String8 values = AudioSystem::getParameters(AUDIO_IO_HANDLE_NONE, param.keysToString());
+    LOG(DEBUG) << __func__ << ": values \"" << values << "\"";
+    return !values.empty();
+}
+
+void AudioTrackOffloadTest::testPlayback(bool testDrainPause, sp<AudioPlayback>* outPlayback) {
+    audio_offload_info_t info = AUDIO_INFO_INITIALIZER;
+    info.sample_rate = 48000;
+    info.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    info.format = AUDIO_FORMAT_APE;
+    info.stream_type = AUDIO_STREAM_MUSIC;
+    info.bit_rate = 236256;
+    info.duration_us = 120 * 1000000;  // 120 sec to ensure the offloading choice
+
+    if (AUDIO_OFFLOAD_NOT_SUPPORTED == AudioSystem::getOffloadSupport(info)) {
+        GTEST_SKIP() << "offload playback is not supported for "
+                     << audio_format_to_string(info.format);
+    }
+    auto ap = sp<AudioPlayback>::make(info.sample_rate, info.format, info.channel_mask,
+                                      AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD, AUDIO_SESSION_NONE,
+                                      AudioTrack::TRANSFER_OBTAIN, nullptr, &info);
+    ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/sine960hz_48000_3s.ape"))
+            << "unable to open the media file";
+    ASSERT_EQ(OK, ap->create()) << "track creation failed";
+    ASSERT_EQ(OK, ap->start()) << "audio track start failed";
+    LOG(INFO) << __func__ << ": Started track";
+    EXPECT_EQ(OK, ap->onProcess());
+    LOG(INFO) << __func__ << ": onProcess done";
+    if (!outPlayback) {
+        ap->stop();
+        LOG(INFO) << __func__ << ": Stopped track";
+    }
+    if (testDrainPause) {
+        // Wait for draining to start, no event for this.
+        std::this_thread::sleep_for(std::chrono::milliseconds(200));
+        LOG(INFO) << __func__ << ": Pausing drain";
+        ap->pause();
+        LOG(INFO) << __func__ << ": Resuming drain";
+        ap->resume();
+    }
+    if (!outPlayback) {
+        LOG(INFO) << __func__ << ": Waiting for stream end";
+        EXPECT_TRUE(ap->waitForStreamEnd()) << "Did not receive onStreamEnd";
+    } else {
+        *outPlayback = std::move(ap);
+    }
+}
+
+TEST_F(AudioTrackOffloadTest, Completion) {
+    testPlayback(false /*testDrainPause*/);
+}
+
+TEST_F(AudioTrackOffloadTest, DrainPause) {
+    if (!halSupportsClipTransition()) {
+        // TODO: In theory, this should also work w/o having the proper clip transition
+        //       support, but as a fact it was not. Need to figure out why.
+        GTEST_SKIP() << "Proper indication of clip transition is not supported";
+    }
+    testPlayback(true /*testDrainPause*/);
+}
+
+// Similar to AudioTrackOffloadTest.testMultipleAudioTrackOffloadPreemption
+TEST_F(AudioTrackOffloadTest, ClipPreemption) {
+    if (!halSupportsClipTransition()) {
+        GTEST_SKIP() << "Proper indication of clip transition is not supported";
+    }
+    sp<AudioPlayback> trackOne, trackTwo;
+    {
+        SCOPED_TRACE("track 1");
+        LOG(INFO) << __func__ << ": Creating and starting track 1";
+        ASSERT_NO_FATAL_FAILURE(testPlayback(false /*testDrainPause*/, &trackOne));
+    }
+    {
+        SCOPED_TRACE("track 2");
+        // Wait for track 1 to start playing, no event for this.
+        std::this_thread::sleep_for(std::chrono::milliseconds(300));
+        LOG(INFO) << __func__ << ": Creating and starting track 2";
+        ASSERT_NO_FATAL_FAILURE(testPlayback(false /*testDrainPause*/, &trackTwo));
+        std::this_thread::sleep_for(std::chrono::milliseconds(200));
+        trackTwo->stop();
+    }
+    LOG(INFO) << __func__ << ": Waiting for stream end on track 2";
+    EXPECT_TRUE(trackTwo->waitForStreamEnd()) << "Did not receive onStreamEnd on track 2";
+}
+
 class AudioTrackCreateTest
     : public ::testing::TestWithParam<std::tuple<uint32_t, audio_format_t, audio_channel_mask_t,
                                                  audio_output_flags_t, audio_session_t>> {
diff --git a/media/libaudioclient/tests/sine960hz_48000_3s.ape b/media/libaudioclient/tests/sine960hz_48000_3s.ape
new file mode 100644
index 0000000..149c42a
--- /dev/null
+++ b/media/libaudioclient/tests/sine960hz_48000_3s.ape
Binary files differ
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index f3d295b..6f12f95 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -84,6 +84,13 @@
     return audioDeviceOutLeAudioUnicastSet;
 }
 
+const DeviceTypeSet& getAudioDeviceOutPickForVolumeSet() {
+    static const DeviceTypeSet audioDevicePickForVolumeSet = DeviceTypeSet(
+            std::begin(AUDIO_DEVICE_OUT_PICK_FOR_VOLUME_ARRAY),
+            std::end(AUDIO_DEVICE_OUT_PICK_FOR_VOLUME_ARRAY));
+    return audioDevicePickForVolumeSet;
+}
+
 std::string deviceTypesToString(const DeviceTypeSet &deviceTypes) {
     if (deviceTypes.empty()) {
         return "Empty device types";
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index b6c0444..3863392 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -48,6 +48,7 @@
 const DeviceTypeSet& getAudioDeviceOutAllBleSet();
 const DeviceTypeSet& getAudioDeviceOutLeAudioUnicastSet();
 const DeviceTypeSet& getAudioDeviceOutLeAudioBroadcastSet();
+const DeviceTypeSet& getAudioDeviceOutPickForVolumeSet();
 
 template<typename T>
 static std::vector<T> Intersection(const std::set<T>& a, const std::set<T>& b) {
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 74a64bf..609db07 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -31,6 +31,7 @@
 
     shared_libs: [
         "audioclient-types-aidl-cpp",
+        "libaudiofoundation",
         "libbinder_ndk",
         "libdl",
         "libhidlbase",
@@ -75,6 +76,7 @@
     name: "libaudiohal_headers",
 
     header_libs: [
+        "libaudiofoundation_headers",
         "libeffectsconfig_headers",
     ],
 
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 0a131fa..36a40cd 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -94,6 +94,10 @@
 
 namespace {
 
+static constexpr int32_t kAidlVersion1 = 1;
+static constexpr int32_t kAidlVersion2 = 2;
+static constexpr int32_t kAidlVersion3 = 3;
+
 // Note: these converters are for types defined in different AIDL files. Although these
 // AIDL files are copies of each other, however formally these are different types
 // thus we don't use a conversion via a parcelable.
@@ -175,6 +179,17 @@
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     std::lock_guard l(mLock);
+    int32_t aidlVersion = 0;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getInterfaceVersion(&aidlVersion)));
+    if (aidlVersion > kAidlVersion3) {
+        mHasClipTransitionSupport = true;
+    } else {
+        AudioParameter parameterKeys;
+        parameterKeys.addKey(String8(AudioParameter::keyClipTransitionSupport));
+        String8 values;
+        auto status = parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, &values);
+        mHasClipTransitionSupport = status == OK && !values.empty();
+    }
     return mMapper.initialize();
 }
 
@@ -545,7 +560,7 @@
         std::lock_guard l(mLock);
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
     }
-    StreamContextAidl context(ret.desc, isOffload, aidlHandle);
+    StreamContextAidl context(ret.desc, isOffload, aidlHandle, mHasClipTransitionSupport);
     if (!context.isValid()) {
         AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
                     ret.desc.toString().c_str());
@@ -627,7 +642,8 @@
         std::lock_guard l(mLock);
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
     }
-    StreamContextAidl context(ret.desc, false /*isAsynchronous*/, aidlHandle);
+    StreamContextAidl context(
+            ret.desc, false /*isAsynchronous*/, aidlHandle, mHasClipTransitionSupport);
     if (!context.isValid()) {
         AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
                     ret.desc.toString().c_str());
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index af8b423..173d16f 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -265,6 +265,7 @@
     Hal2AidlMapper mMapper GUARDED_BY(mLock);
     LockedAccessor<Hal2AidlMapper> mMapperAccessor;
     Microphones mMicrophones GUARDED_BY(mLock);
+    bool mHasClipTransitionSupport = false;
 };
 
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 658fc18b..090ac8a 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "EffectHalAidl"
 //#define LOG_NDEBUG 0
 
+#include <algorithm>
 #include <memory>
 
 #include <audio_utils/primitives.h>
@@ -63,7 +64,9 @@
 using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
 using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
 using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
+using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
 
 namespace android {
 namespace effect {
@@ -388,5 +391,24 @@
     return mEffect->dump(fd, nullptr, 0);
 }
 
+status_t EffectHalAidl::setDevices(const AudioDeviceTypeAddrVector& deviceTypes) {
+    TIME_CHECK();
+
+    // TODO: b/397236443 - add this as part of effect dumpsys
+    ALOGD("%s %s", __func__,
+          dumpAudioDeviceTypeAddrVector(deviceTypes, false /*includeSensitiveInfo*/).c_str());
+
+    std::vector<AudioDeviceDescription> deviceDescs;
+    for (const AudioDeviceTypeAddr& deviceType : deviceTypes) {
+        AudioDeviceDescription deviceDesc = VALUE_OR_RETURN_STATUS(
+                ::aidl::android::legacy2aidl_audio_devices_t_AudioDeviceDescription(
+                        deviceType.mType));
+        deviceDescs.emplace_back(std::move(deviceDesc));
+    }
+
+    return statusTFromBinderStatus(
+            mEffect->setParameter(Parameter::make<Parameter::deviceDescription>(deviceDescs)));
+}
+
 } // namespace effect
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index a775337..c420eed 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -62,6 +62,9 @@
         return mEffect;
     }
 
+    // Set devices in AIDL type
+    status_t setDevices(const AudioDeviceTypeAddrVector& deviceTypes);
+
     // for TIME_CHECK
     const std::string getClassName() const { return "EffectHalAidl"; }
 
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index dda21ed..a70df4a 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -63,6 +63,9 @@
 
     uint64_t effectId() const { return mEffectId; }
 
+    // Not implemented in HIDL effect HAL
+    status_t setDevices(const AudioDeviceTypeAddrVector&) { return INVALID_OPERATION; };
+
   private:
     friend class EffectsFactoryHalHidl;
     typedef MessageQueue<Result, hardware::kSynchronizedReadWrite> StatusMQ;
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index ac69b26..8923f48 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -49,7 +49,6 @@
 using ::aidl::android::media::audio::common::AudioSource;
 using ::aidl::android::media::audio::common::AudioStreamType;
 using ::aidl::android::media::audio::common::AudioUuid;
-using ::android::audio::utils::toString;
 using ::android::base::unexpected;
 using ::android::detail::AudioHalVersionInfo;
 
@@ -200,7 +199,8 @@
                 statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
     }
     if (aidlEffect == nullptr) {
-        ALOGE("%s failed to create effect with UUID: %s", __func__, toString(aidlUuid).c_str());
+        ALOGE("%s failed to create effect with UUID: %s", __func__,
+              ::android::audio::utils::toString(aidlUuid).c_str());
         return NAME_NOT_FOUND;
     }
     Descriptor desc;
@@ -239,7 +239,8 @@
     auto matchIt = std::find_if(list.begin(), list.end(),
                                 [&](const auto& desc) { return desc.common.id.uuid == uuid; });
     if (matchIt == list.end()) {
-        ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
+        ALOGE("%s UUID not found in HAL and proxy list %s", __func__,
+              ::android::audio::utils::toString(uuid).c_str());
         return NAME_NOT_FOUND;
     }
 
@@ -260,7 +261,8 @@
     std::copy_if(mProxyDescList.begin(), mProxyDescList.end(), std::back_inserter(result),
                  [&](auto& desc) { return desc.common.id.type == type; });
     if (result.empty()) {
-        ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, toString(type).c_str());
+        ALOGW("%s UUID type not found in HAL and proxy list %s", __func__,
+              ::android::audio::utils::toString(type).c_str());
         return BAD_VALUE;
     }
 
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 0cdf0f2..5642b6e 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -1176,7 +1176,8 @@
 }
 
 void Hal2AidlMapper::updateDynamicMixPorts() {
-    for (int32_t portId : mDynamicMixPortIds) {
+    const auto dynamicMixPortIds = mDynamicMixPortIds;
+    for (int32_t portId : dynamicMixPortIds) {
         if (auto it = mPorts.find(portId); it != mPorts.end()) {
             updateAudioPort(portId, &it->second);
         } else {
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index e138cea..89dbee3 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -46,13 +46,43 @@
 using ::aidl::android::hardware::audio::core::IStreamOut;
 using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
 using ::aidl::android::hardware::audio::core::StreamDescriptor;
+using ::aidl::android::hardware::audio::core::VendorParameter;
 using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
 using ::aidl::android::media::audio::IHalAdapterVendorExtension;
 
+/**
+ * Notes on the position handling implementation. First, please consult
+ * "On position reporting" comment in StreamHalInterface.h for the context.
+ *
+ * The adaptation layer for AIDL HALs needs to emulate the HIDL HAL behavior
+ * (that's until some future release when the framework stops supporting HIDL
+ * HALs and it will be possible to remove the code in the framework which
+ * translates resetting positions into continuous) by resetting the reported
+ * position after certain events, depending on the kind of the audio data
+ * stream. Unlike the AIDL interface, the interface between the HAL adaptation
+ * layer and the framework uses separate method calls for controlling the stream
+ * state and retrieving the position. Because of that, the code which implements
+ * position reporting (methods 'getRenderPosition' and 'getObservablePosition')
+ * needs to use stored stream positions which it had at certain state changing
+ * events, like flush or drain. These are stored in the field called
+ * 'mStatePositions'. This field is updated in the code which changes the stream
+ * state. There are two places for that: the 'sendCommand' method, which is used
+ * for all streams, and handlers of asynchronous stream events called
+ * 'onAsync...'.
+ */
+
 namespace android {
 
 using HalCommand = StreamDescriptor::Command;
+
 namespace {
+
+static constexpr int32_t kAidlVersion1 = 1;
+static constexpr int32_t kAidlVersion2 = 2;
+static constexpr int32_t kAidlVersion3 = 3;
+
+static constexpr const char* kCreateMmapBuffer = "aosp.createMmapBuffer";
+
 template<HalCommand::Tag cmd> HalCommand makeHalCommand() {
     return HalCommand::make<cmd>(::aidl::android::media::audio::common::Void{});
 }
@@ -114,22 +144,34 @@
         mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
 
-    if (mStream != nullptr) {
-        mContext.getCommandMQ()->setErrorHandler(
-                fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
-        mContext.getReplyMQ()->setErrorHandler(
-                fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
-        if (mContext.getDataMQ() != nullptr) {
-            mContext.getDataMQ()->setErrorHandler(
-                    fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
+    if (mStream == nullptr) return;
+
+    mContext.getCommandMQ()->setErrorHandler(
+            fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
+    mContext.getReplyMQ()->setErrorHandler(
+            fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
+    if (mContext.getDataMQ() != nullptr) {
+        mContext.getDataMQ()->setErrorHandler(
+                fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
+    }
+
+    if (auto status = mStream->getInterfaceVersion(&mAidlInterfaceVersion); status.isOk()) {
+        if (mAidlInterfaceVersion > kAidlVersion3) {
+            mSupportsCreateMmapBuffer = true;
+        } else {
+            VendorParameter createMmapBuffer{.id = kCreateMmapBuffer};
+            mSupportsCreateMmapBuffer =
+                    mStream->setVendorParameters({createMmapBuffer}, false).isOk();
         }
+    } else {
+        AUGMENT_LOG(E, "failed to retrieve stream interface version: %s", status.getMessage());
     }
 }
 
 StreamHalAidl::~StreamHalAidl() {
     AUGMENT_LOG(D);
     if (mStream != nullptr) {
-        ndk::ScopedAStatus status = mStream->close();
+        ndk::ScopedAStatus status = serializeCall(mStream, &Stream::close);
         AUGMENT_LOG_IF(E, !status.isOk(), "status %s", status.getDescription().c_str());
     }
 }
@@ -166,9 +208,9 @@
     AUGMENT_LOG(D, "parameters: %s", parameters.toString().c_str());
 
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyStreamHwAvSync),
-            [&](int hwAvSyncId) {
-                return statusTFromBinderStatus(mStream->updateHwAvSyncId(hwAvSyncId));
+            parameters, String8(AudioParameter::keyStreamHwAvSync), [&](int hwAvSyncId) {
+                return statusTFromBinderStatus(
+                        serializeCall(mStream, &Stream::updateHwAvSyncId, hwAvSyncId));
             }));
     return parseAndSetVendorParameters(mVendorExt, mStream, parameters);
 }
@@ -205,7 +247,8 @@
         return BAD_VALUE;
     }
     auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
-    return statusTFromBinderStatus(mStream->addEffect(aidlEffect->getIEffect()));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::addEffect, aidlEffect->getIEffect()));
 }
 
 status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
@@ -216,7 +259,8 @@
         return BAD_VALUE;
     }
     auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
-    return statusTFromBinderStatus(mStream->removeEffect(aidlEffect->getIEffect()));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::removeEffect, aidlEffect->getIEffect()));
 }
 
 status_t StreamHalAidl::standby() {
@@ -269,15 +313,10 @@
     }
 }
 
-status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
+status_t StreamHalAidl::dump(int fd, const Vector<String16>& args __unused) {
     AUGMENT_LOG(D);
-    TIME_CHECK();
-    if (!mStream) return NO_INIT;
-    Vector<String16> newArgs = args;
-    newArgs.push(String16(kDumpFromAudioServerArgument));
-    status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size());
     mStreamPowerLog.dump(fd);
-    return status;
+    return OK;
 }
 
 status_t StreamHalAidl::start() {
@@ -382,12 +421,17 @@
     AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, &statePositions));
     if (reply.hardware.frames == StreamDescriptor::Position::UNKNOWN ||
         reply.hardware.timeNs == StreamDescriptor::Position::UNKNOWN) {
+        AUGMENT_LOG(W, "No position was reported by the HAL");
         return INVALID_OPERATION;
     }
-    *frames = reply.hardware.frames;
+    int64_t mostRecentResetPoint = std::max(statePositions.hardware.framesAtStandby,
+                                            statePositions.hardware.framesAtFlushOrDrain);
+    int64_t aidlFrames = reply.hardware.frames;
+    *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     *timestamp = reply.hardware.timeNs;
     return OK;
 }
@@ -445,6 +489,10 @@
             AUGMENT_LOG(E, "failed to read %zu bytes to data MQ", toRead);
             return NOT_ENOUGH_DATA;
         }
+    } else if (*transferred > bytes) {
+        ALOGW("%s: HAL module wrote %zu bytes, which exceeds requested count %zu",
+                __func__, *transferred, bytes);
+        *transferred = bytes;
     }
     mStreamPowerLog.log(buffer, *transferred);
     return OK;
@@ -475,7 +523,7 @@
             AUGMENT_LOG(D,
                         "HAL failed to handle the 'pause' command, but stream state is in one of"
                         " the PAUSED kind of states, current state: %s",
-                        toString(state).c_str());
+                        toString(innerReply->state).c_str());
             return OK;
         }
         return status;
@@ -553,7 +601,7 @@
     AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return statusTFromBinderStatus(mStream->prepareToClose());
+    return statusTFromBinderStatus(serializeCall(mStream, &Stream::prepareToClose));
 }
 
 void StreamHalAidl::onAsyncTransferReady() {
@@ -564,7 +612,17 @@
         std::lock_guard l(mCommandReplyLock);
         state = getState();
     }
+    bool isCallbackExpected = false;
     if (state == StreamDescriptor::State::TRANSFERRING) {
+        isCallbackExpected = true;
+    } else if (mContext.hasClipTransitionSupport() && state == StreamDescriptor::State::DRAINING) {
+        std::lock_guard l(mLock);
+        isCallbackExpected = mStatePositions.drainState == StatePositions::DrainState::EN_RECEIVED;
+        if (!isCallbackExpected) {
+            AUGMENT_LOG(W, "drainState %d", static_cast<int>(mStatePositions.drainState));
+        }
+    }
+    if (isCallbackExpected) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
@@ -582,17 +640,27 @@
         std::lock_guard l(mCommandReplyLock);
         state = getState();
     }
-    if (state == StreamDescriptor::State::DRAINING) {
+    if (state == StreamDescriptor::State::DRAINING ||
+            (mContext.hasClipTransitionSupport() &&
+                    (state == StreamDescriptor::State::TRANSFERRING ||
+                            state == StreamDescriptor::State::IDLE))) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
                     true /*safeFromNonWorkerThread */);
         // For compatibility with HIDL behavior, apply a "soft" position reset
-        // after receiving the "drain ready" callback.
+        // after receiving the "drain ready" callback for the clip end.
         std::lock_guard l(mLock);
-        if (mLastReply.observable.frames != StreamDescriptor::Position::UNKNOWN) {
-            mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+        if (mLastReply.observable.frames != StreamDescriptor::Position::UNKNOWN &&
+                (!mContext.hasClipTransitionSupport() ||
+                        (mStatePositions.drainState == StatePositions::DrainState::EN_RECEIVED
+                                || mStatePositions.drainState == StatePositions::DrainState::ALL))) {
+            AUGMENT_LOG(D, "setting position %lld as clip end",
+                    (long long)mLastReply.observable.frames);
+            mStatePositions.observable.framesAtFlushOrDrain = mLastReply.observable.frames;
         }
+        mStatePositions.drainState = mStatePositions.drainState == StatePositions::DrainState::EN ?
+                StatePositions::DrainState::EN_RECEIVED : StatePositions::DrainState::NONE;
     } else {
         AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
     }
@@ -612,12 +680,25 @@
     if (!mContext.isMmapped()) {
         return BAD_VALUE;
     }
+    if (mSupportsCreateMmapBuffer && (mAidlInterfaceVersion <= kAidlVersion3)) {
+        std::vector<VendorParameter> parameters;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                        mStream->getVendorParameters({kCreateMmapBuffer}, &parameters)));
+        if (parameters.size() == 1) {
+            std::optional<MmapBufferDescriptor> result;
+            RETURN_STATUS_IF_ERROR(parameters[0].ext.getParcelable(&result));
+            mContext.updateMmapBufferDescriptor(std::move(*result));
+        } else {
+            AUGMENT_LOG(E, "invalid output from 'createMmapBuffer' via 'getVendorParameters': %s",
+                        internal::ToString(parameters).c_str());
+            return INVALID_OPERATION;
+        }
+    }
     const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor();
     info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get();
     info->buffer_size_frames = mContext.getBufferSizeFrames();
     info->burst_size_frames = bufferDescriptor.burstSizeFrames;
     info->flags = static_cast<audio_mmap_buffer_flag>(bufferDescriptor.flags);
-
     return OK;
 }
 
@@ -655,6 +736,13 @@
         const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
         bool safeFromNonWorkerThread, StatePositions* statePositions) {
+
+    // Add timeCheck only for start command (pause, flush checked at caller).
+    std::unique_ptr<mediautils::TimeCheck> timeCheck;
+    if (command.getTag() == StreamDescriptor::Command::start) {
+        timeCheck = mediautils::makeTimeCheckStatsForClassMethodUniquePtr(
+                getClassName(), "sendCommand_start");
+    }
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -685,20 +773,31 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
-            if (!mIsInput && reply->status == STATUS_OK &&
-                    reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
-                if (command.getTag() == StreamDescriptor::Command::standby &&
-                        reply->state == StreamDescriptor::State::STANDBY) {
-                    mStatePositions.framesAtStandby = reply->observable.frames;
-                } else if (command.getTag() == StreamDescriptor::Command::flush &&
-                           reply->state == StreamDescriptor::State::IDLE) {
-                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
-                } else if (!mContext.isAsynchronous() &&
-                        command.getTag() == StreamDescriptor::Command::drain &&
-                        (reply->state == StreamDescriptor::State::IDLE ||
-                                reply->state == StreamDescriptor::State::DRAINING)) {
-                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
-                } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+            if (!mIsInput && reply->status == STATUS_OK) {
+                if (reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
+                    if (command.getTag() == StreamDescriptor::Command::standby &&
+                            reply->state == StreamDescriptor::State::STANDBY) {
+                        mStatePositions.observable.framesAtStandby = reply->observable.frames;
+                        mStatePositions.hardware.framesAtStandby = reply->hardware.frames;
+                    } else if (command.getTag() == StreamDescriptor::Command::flush &&
+                            reply->state == StreamDescriptor::State::IDLE) {
+                        mStatePositions.observable.framesAtFlushOrDrain = reply->observable.frames;
+                        mStatePositions.hardware.framesAtFlushOrDrain = reply->observable.frames;
+                    } else if (!mContext.isAsynchronous() &&
+                            command.getTag() == StreamDescriptor::Command::drain &&
+                            (reply->state == StreamDescriptor::State::IDLE ||
+                                    reply->state == StreamDescriptor::State::DRAINING)) {
+                        mStatePositions.observable.framesAtFlushOrDrain = reply->observable.frames;
+                        mStatePositions.hardware.framesAtFlushOrDrain = reply->observable.frames;
+                    } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+                }
+                if (mContext.isAsynchronous() &&
+                        command.getTag() == StreamDescriptor::Command::drain) {
+                    mStatePositions.drainState =
+                            command.get<StreamDescriptor::Command::drain>() ==
+                            StreamDescriptor::DrainMode::DRAIN_ALL ?
+                            StatePositions::DrainState::ALL : StatePositions::DrainState::EN;
+                }
             }
             if (statePositions != nullptr) {
                 *statePositions = mStatePositions;
@@ -721,15 +820,19 @@
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
         StatePositions* statePositions) {
     bool doUpdate = false;
+    HalCommand cmd;
     {
         std::lock_guard l(mLock);
         doUpdate = uptimeNanos() > mLastReplyExpirationNs;
+        cmd = mContext.isMmapped() && mSupportsCreateMmapBuffer
+                && mLastReply.state == StreamDescriptor::State::ACTIVE
+                ? makeHalCommand<HalCommand::Tag::burst>(0)
+                : makeHalCommand<HalCommand::Tag::getStatus>();
     }
     if (doUpdate) {
         // Since updates are paced, it is OK to perform them from any thread, they should
         // not interfere with I/O operations of the worker.
-        return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                reply, true /*safeFromNonWorkerThread */, statePositions);
+        return sendCommand(cmd, reply, true /*safeFromNonWorkerThread */, statePositions);
     } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
@@ -806,13 +909,14 @@
             volumes[i] = (left + right) / 2;
         }
     }
-    return statusTFromBinderStatus(mStream->setHwVolume(volumes));
+    return statusTFromBinderStatus(serializeCall(mStream, &Stream::setHwVolume, volumes));
 }
 
 status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return statusTFromBinderStatus(mStream->selectPresentation(presentationId, programId));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::selectPresentation, presentationId, programId));
 }
 
 status_t StreamOutHalAidl::write(const void *buffer, size_t bytes, size_t *written) {
@@ -835,10 +939,10 @@
     // See the table at the start of 'StreamHalInterface' on when it needs to reset.
     int64_t mostRecentResetPoint;
     if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
-        mostRecentResetPoint = statePositions.framesAtStandby;
+        mostRecentResetPoint = statePositions.observable.framesAtStandby;
     } else {
-        mostRecentResetPoint =
-                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        mostRecentResetPoint = std::max(statePositions.observable.framesAtStandby,
+                statePositions.observable.framesAtFlushOrDrain);
     }
     *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
@@ -887,9 +991,12 @@
 status_t StreamOutHalAidl::drain(bool earlyNotify) {
     if (!mStream) return NO_INIT;
 
-    if (const auto state = getState(); isInDrainedState(state)) {
+    if (const auto state = getState();
+            state == StreamDescriptor::State::DRAINING || isInDrainedState(state)) {
         AUGMENT_LOG(D, "stream already in %s state", toString(state).c_str());
-        if (mContext.isAsynchronous()) onDrainReady();
+        if (mContext.isAsynchronous() && isInDrainedState(state)) {
+            onDrainReady();
+        }
         return OK;
     }
 
@@ -911,8 +1018,8 @@
     if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
         *frames = aidlFrames;
     } else {
-        const int64_t mostRecentResetPoint =
-                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        const int64_t mostRecentResetPoint = std::max(statePositions.observable.framesAtStandby,
+                statePositions.observable.framesAtFlushOrDrain);
         *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
@@ -931,7 +1038,8 @@
     if (!mStream) return NO_INIT;
     ::aidl::android::hardware::audio::common::SourceMetadata aidlMetadata =
               VALUE_OR_RETURN_STATUS(legacy2aidl_SourceMetadata(sourceMetadata));
-    return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::updateMetadata, aidlMetadata));
 }
 
 status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
@@ -941,7 +1049,8 @@
         return BAD_VALUE;
     }
     ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getDualMonoMode(&aidlMode)));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::getDualMonoMode, &aidlMode)));
     *mode = VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(aidlMode));
     return OK;
@@ -952,7 +1061,8 @@
     if (!mStream) return NO_INIT;
     ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode));
-    return statusTFromBinderStatus(mStream->setDualMonoMode(aidlMode));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::setDualMonoMode, aidlMode));
 }
 
 status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB) {
@@ -961,13 +1071,15 @@
     if (leveldB == nullptr) {
         return BAD_VALUE;
     }
-    return statusTFromBinderStatus(mStream->getAudioDescriptionMixLevel(leveldB));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::getAudioDescriptionMixLevel, leveldB));
 }
 
 status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return statusTFromBinderStatus(mStream->setAudioDescriptionMixLevel(leveldB));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::setAudioDescriptionMixLevel, leveldB));
 }
 
 status_t StreamOutHalAidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
@@ -977,7 +1089,8 @@
         return BAD_VALUE;
     }
     ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getPlaybackRateParameters(&aidlRate)));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::getPlaybackRateParameters, &aidlRate)));
     *playbackRate = VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(aidlRate));
     return OK;
@@ -988,7 +1101,8 @@
     if (!mStream) return NO_INIT;
     ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate));
-    return statusTFromBinderStatus(mStream->setPlaybackRateParameters(aidlRate));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::setPlaybackRateParameters, aidlRate));
 }
 
 status_t StreamOutHalAidl::setEventCallback(
@@ -1006,7 +1120,7 @@
     if (!mStream) return NO_INIT;
     ::aidl::android::media::audio::common::AudioLatencyMode aidlMode = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode));
-    return statusTFromBinderStatus(mStream->setLatencyMode(aidlMode));
+    return statusTFromBinderStatus(serializeCall(mStream, &Stream::setLatencyMode, aidlMode));
 };
 
 status_t StreamOutHalAidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
@@ -1016,8 +1130,8 @@
         return BAD_VALUE;
     }
     std::vector<::aidl::android::media::audio::common::AudioLatencyMode> aidlModes;
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mStream->getRecommendedLatencyModes(&aidlModes)));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::getRecommendedLatencyModes, &aidlModes)));
     *modes = VALUE_OR_RETURN_STATUS(
             ::aidl::android::convertContainer<std::vector<audio_latency_mode_t>>(
                     aidlModes,
@@ -1114,7 +1228,8 @@
     if (updateMetadata) {
         AUGMENT_LOG(D, "set offload metadata %s", mOffloadMetadata.toString().c_str());
         if (status_t status = statusTFromBinderStatus(
-                        mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
+                    serializeCall(mStream, &Stream::updateOffloadMetadata, mOffloadMetadata));
+            status != OK) {
             AUGMENT_LOG(E, "updateOffloadMetadata failed %d", status);
             return status;
         }
@@ -1122,6 +1237,18 @@
     return OK;
 }
 
+status_t StreamOutHalAidl::dump(int fd, const Vector<String16>& args) {
+    AUGMENT_LOG(D);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    Vector<String16> newArgs = args;
+    newArgs.push(String16(kDumpFromAudioServerArgument));
+    // Do not serialize the dump call with mCallLock
+    status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size());
+    StreamHalAidl::dump(fd, args);
+    return status;
+}
+
 // static
 ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
 StreamInHalAidl::legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy) {
@@ -1147,7 +1274,7 @@
     if (!mStream) return NO_INIT;
     const size_t channelCount = audio_channel_count_from_in_mask(mConfig.channel_mask);
     std::vector<float> gains(channelCount != 0 ? channelCount : 1, gain);
-    return statusTFromBinderStatus(mStream->setHwGain(gains));
+    return statusTFromBinderStatus(serializeCall(mStream, &Stream::setHwGain, gains));
 }
 
 status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
@@ -1185,7 +1312,8 @@
     auto staticInfo = micInfoProvider->getMicrophoneInfo();
     if (!staticInfo) return INVALID_OPERATION;
     std::vector<MicrophoneDynamicInfo> dynamicInfo;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getActiveMicrophones(&dynamicInfo)));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::getActiveMicrophones, &dynamicInfo)));
     std::vector<media::MicrophoneInfoFw> result;
     result.reserve(dynamicInfo.size());
     for (const auto& d : dynamicInfo) {
@@ -1215,7 +1343,8 @@
     if (!mStream) return NO_INIT;
     ::aidl::android::hardware::audio::common::SinkMetadata aidlMetadata =
               VALUE_OR_RETURN_STATUS(legacy2aidl_SinkMetadata(sinkMetadata));
-    return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::updateMetadata, aidlMetadata));
 }
 
 status_t StreamInHalAidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
@@ -1225,13 +1354,27 @@
               VALUE_OR_RETURN_STATUS(
                       ::aidl::android::legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(
                               direction));
-    return statusTFromBinderStatus(mStream->setMicrophoneDirection(aidlDirection));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::setMicrophoneDirection, aidlDirection));
 }
 
 status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return statusTFromBinderStatus(mStream->setMicrophoneFieldDimension(zoom));
+    return statusTFromBinderStatus(
+            serializeCall(mStream, &Stream::setMicrophoneFieldDimension, zoom));
+}
+
+status_t StreamInHalAidl::dump(int fd, const Vector<String16>& args) {
+    AUGMENT_LOG(D);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    Vector<String16> newArgs = args;
+    newArgs.push(String16(kDumpFromAudioServerArgument));
+    // Do not serialize the dump call with mCallLock
+    status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size());
+    StreamHalAidl::dump(fd, args);
+    return status;
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index a1cdac4..a026f52 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -37,9 +37,6 @@
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
 
-using ::aidl::android::hardware::audio::common::AudioOffloadMetadata;
-using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
-
 namespace android {
 
 class StreamContextAidl {
@@ -51,9 +48,8 @@
     typedef AidlMessageQueue<int8_t,
             ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> DataMQ;
 
-    StreamContextAidl(
-            ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
-            bool isAsynchronous, int ioHandle)
+    StreamContextAidl(::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+                      bool isAsynchronous, int ioHandle, bool hasClipTransitionSupport)
         : mFrameSizeBytes(descriptor.frameSizeBytes),
           mCommandMQ(new CommandMQ(descriptor.command)),
           mReplyMQ(new ReplyMQ(descriptor.reply)),
@@ -62,29 +58,10 @@
           mIsAsynchronous(isAsynchronous),
           mIsMmapped(isMmapped(descriptor)),
           mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)),
-          mIoHandle(ioHandle) {}
-    StreamContextAidl(StreamContextAidl&& other) :
-            mFrameSizeBytes(other.mFrameSizeBytes),
-            mCommandMQ(std::move(other.mCommandMQ)),
-            mReplyMQ(std::move(other.mReplyMQ)),
-            mBufferSizeFrames(other.mBufferSizeFrames),
-            mDataMQ(std::move(other.mDataMQ)),
-            mIsAsynchronous(other.mIsAsynchronous),
-            mIsMmapped(other.mIsMmapped),
-            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)),
-            mIoHandle(other.mIoHandle) {}
-    StreamContextAidl& operator=(StreamContextAidl&& other) {
-        mFrameSizeBytes = other.mFrameSizeBytes;
-        mCommandMQ = std::move(other.mCommandMQ);
-        mReplyMQ = std::move(other.mReplyMQ);
-        mBufferSizeFrames = other.mBufferSizeFrames;
-        mDataMQ = std::move(other.mDataMQ);
-        mIsAsynchronous = other.mIsAsynchronous;
-        mIsMmapped = other.mIsMmapped;
-        mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
-        mIoHandle = other.mIoHandle;
-        return *this;
-    }
+          mIoHandle(ioHandle),
+          mHasClipTransitionSupport(hasClipTransitionSupport) {}
+    StreamContextAidl(StreamContextAidl&&) = default;
+    StreamContextAidl& operator=(StreamContextAidl&&) = default;
     bool isValid() const {
         return mFrameSizeBytes != 0 &&
                 mCommandMQ != nullptr && mCommandMQ->isValid() &&
@@ -107,9 +84,14 @@
     ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
-    const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
+    const ::aidl::android::hardware::audio::core::MmapBufferDescriptor&
+            getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
     size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames; }
     int getIoHandle() const { return mIoHandle; }
+    bool hasClipTransitionSupport() const { return mHasClipTransitionSupport; }
+    void updateMmapBufferDescriptor(
+            ::aidl::android::hardware::audio::core::MmapBufferDescriptor&& desc) {
+        mMmapBufferDescriptor = std::move(desc); }
 
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
@@ -125,7 +107,7 @@
         using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
         return descriptor.audio.getTag() == Tag::mmap;
     }
-    static MmapBufferDescriptor maybeGetMmapBuffer(
+    static ::aidl::android::hardware::audio::core::MmapBufferDescriptor maybeGetMmapBuffer(
             ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
         using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
         if (descriptor.audio.getTag() == Tag::mmap) {
@@ -141,8 +123,9 @@
     std::unique_ptr<DataMQ> mDataMQ;
     bool mIsAsynchronous;
     bool mIsMmapped;
-    MmapBufferDescriptor mMmapBufferDescriptor;
+    ::aidl::android::hardware::audio::core::MmapBufferDescriptor mMmapBufferDescriptor;
     int mIoHandle;
+    bool mHasClipTransitionSupport;
 };
 
 class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
@@ -201,10 +184,16 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
-    struct StatePositions {
+    struct FrameCounters {
         int64_t framesAtFlushOrDrain;
         int64_t framesAtStandby;
     };
+    struct StatePositions {
+        FrameCounters observable;
+        FrameCounters hardware;
+        enum DrainState : int32_t { NONE, ALL, EN /*early notify*/, EN_RECEIVED };
+        DrainState drainState;
+    };
 
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
@@ -289,13 +278,22 @@
 
     status_t exit();
 
+    template <typename T, typename Callable, typename... Args>
+    auto serializeCall(const std::shared_ptr<T>& obj, Callable&& func, Args&&... args)
+            EXCLUDES(mCallLock) {
+        std::lock_guard lock(mCallLock);
+        return std::invoke(std::forward<Callable&&>(func),
+                           std::forward<const std::shared_ptr<T>&>(obj),
+                           std::forward<Args&&>(args)...);
+    }
+
     void onAsyncTransferReady();
     void onAsyncDrainReady();
     void onAsyncError();
 
     const bool mIsInput;
     const audio_config_base_t mConfig;
-    const StreamContextAidl mContext;
+    StreamContextAidl mContext;
     // This lock is used to make sending of a command and receiving a reply an atomic
     // operation. Otherwise, when two threads are trying to send a command, they may both advance to
     // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
@@ -325,7 +323,17 @@
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
             StatePositions* statePositions = nullptr);
 
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
+    /*
+     * This lock is exclusively intended to serialize binder calls to remote
+     * IStream[Common|Out|In] objects in Audio HAL. Thereby, preventing any race conditions in Audio
+     * HAL. The only exception for above is when calling the IStream[Common|Out|In]::dump API.
+     * Please note that lock doesn't prevent access to IStream[Common|Out|In] class fields. That
+     * explains why there is no 'GUARDED_BY' annotations.
+     */
+    std::mutex mCallLock;
+
+    using Stream = ::aidl::android::hardware::audio::core::IStreamCommon;
+    const std::shared_ptr<Stream> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
     const int64_t mLastReplyLifeTimeNs;
     std::mutex mLock;
@@ -337,6 +345,8 @@
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
+    int32_t mAidlInterfaceVersion = -1;
+    bool mSupportsCreateMmapBuffer = false;
 };
 
 class CallbackBroker;
@@ -430,17 +440,20 @@
     void onDrainReady() override;
     void onError(bool isHardError) override;
 
+    status_t dump(int fd, const Vector<String16>& args) override;
+
   private:
     friend class sp<StreamOutHalAidl>;
 
     static ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
     legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy);
 
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
+    using Stream = ::aidl::android::hardware::audio::core::IStreamOut;
+    const std::shared_ptr<Stream> mStream;
     const wp<CallbackBroker> mCallbackBroker;
     mediautils::atomic_wp<StreamOutHalInterfaceCallback> mClientCallback;
 
-    AudioOffloadMetadata mOffloadMetadata;
+    ::aidl::android::hardware::audio::common::AudioOffloadMetadata mOffloadMetadata;
 
     // Can not be constructed directly by clients.
     StreamOutHalAidl(
@@ -487,13 +500,16 @@
     // Called when the metadata of the stream's sink has been changed.
     status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
 
+    status_t dump(int fd, const Vector<String16>& args) override;
+
   private:
     friend class sp<StreamInHalAidl>;
 
     static ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
     legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy);
 
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn> mStream;
+    using Stream = ::aidl::android::hardware::audio::core::IStreamIn;
+    const std::shared_ptr<Stream> mStream;
     const wp<MicrophoneInfoProvider> mMicInfoProvider;
 
     // Can not be constructed directly by clients.
diff --git a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
index cf8d7f0..9fdcd67 100644
--- a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_HARDWARE_EFFECT_HAL_INTERFACE_H
 
 #include <media/audiohal/EffectBufferHalInterface.h>
+#include <media/AudioDeviceTypeAddr.h>
 #include <system/audio_effect.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
@@ -54,6 +55,9 @@
 
     virtual status_t dump(int fd) = 0;
 
+    // Only implemented in AIDL effect HAL: set a vector of AudioDeviceTypeAddr
+    virtual status_t setDevices(const AudioDeviceTypeAddrVector& deviceTypes) = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     EffectHalInterface() = default;
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 4bd7e3d..6397e99 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -151,9 +151,9 @@
  *               | Mixed      | Direct       | Direct
  *               |            | non-offload  | offload
  * ==============|============|==============|==============
- *  PCM and      | Continuous |              |
- *  encapsulated |            |              |
- *  bitstream    |            |              |
+ *  PCM          | Continuous |              |
+ *               |            |              |
+ *               |            |              |
  * --------------|------------| Continuous†  |
  *  Bitstream    |            |              | Reset on
  *  encapsulated |            |              | flush, drain
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index f6a7eea..e369d8b 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -76,3 +76,14 @@
     header_libs: ["libaudiohalimpl_headers"],
     static_libs: ["libgmock"],
 }
+
+cc_test {
+    name: "EffectHalAidlTest",
+    srcs: [
+        ":audio_effect_hal_aidl_src_files",
+        "EffectHalAidl_test.cpp",
+    ],
+    defaults: ["libaudiohal_aidl_test_default"],
+    header_libs: ["libaudiohalimpl_headers"],
+    static_libs: ["libgmock"],
+}
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 0f5334f..1730bfa 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -61,6 +61,10 @@
 
 class VendorParameterMock {
   public:
+    void clearParameters() {
+        mAsyncParameters.clear();
+        mSyncParameters.clear();
+    }
     const std::vector<std::string>& getRetrievedParameterIds() const { return mGetParameterIds; }
     const std::vector<VendorParameter>& getAsyncParameters() const { return mAsyncParameters; }
     const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
@@ -827,15 +831,18 @@
   public:
     void SetUp() override {
         mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
-        mDevice = sp<DeviceHalAidl>::make("test", mModule, nullptr /*vext*/);
+        mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
+        mDevice = sp<DeviceHalAidl>::make("test", mModule, mVendorExt);
     }
     void TearDown() override {
         mDevice.clear();
+        mVendorExt.reset();
         mModule.reset();
     }
 
   protected:
     std::shared_ptr<ModuleMock> mModule;
+    std::shared_ptr<TestHalAdapterVendorExtension> mVendorExt;
     sp<DeviceHalAidl> mDevice;
 };
 
@@ -988,9 +995,12 @@
         mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
         struct audio_config config = AUDIO_CONFIG_INITIALIZER;
         ::aidl::android::hardware::audio::core::StreamDescriptor descriptor;
-        StreamContextAidl context(descriptor, false /*isAsynchronous*/, 0);
+        StreamContextAidl context(descriptor, false /*isAsynchronous*/, 0,
+                                  false /*hasClipTransitionSupport*/);
         mStream = sp<StreamHalAidl>::make("test", false /*isInput*/, config, 0 /*nominalLatency*/,
                                           std::move(context), mStreamCommon, mVendorExt);
+        // The stream may check for some properties after creating.
+        mStreamCommon->clearParameters();
     }
     void TearDown() override {
         mStream.clear();
diff --git a/media/libaudiohal/tests/EffectHalAidl_test.cpp b/media/libaudiohal/tests/EffectHalAidl_test.cpp
new file mode 100644
index 0000000..4a0eb31
--- /dev/null
+++ b/media/libaudiohal/tests/EffectHalAidl_test.cpp
@@ -0,0 +1,349 @@
+/*
+ * Copyright 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHalAidlTest"
+
+#include "EffectHalAidl.h"
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+
+#include <aidl/android/hardware/audio/effect/Parameter.h>
+#include <android/binder_status.h>
+#include <media/AudioDeviceTypeAddr.h>
+#include <utils/Log.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <utility>
+#include <vector>
+
+namespace {
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioDeviceType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using android::AudioDeviceTypeAddr;
+using android::AudioDeviceTypeAddrVector;
+using android::OK;
+using android::sp;
+using android::effect::EffectHalAidl;
+
+using ::testing::_;
+using ::testing::Return;
+
+class IFactoryMock : public ::aidl::android::hardware::audio::effect::BnFactory {
+  public:
+    IFactoryMock() = default;
+
+    ndk::ScopedAStatus queryEffects(const std::optional<AudioUuid>&,
+                                    const std::optional<AudioUuid>&,
+                                    const std::optional<AudioUuid>&,
+                                    std::vector<Descriptor>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+    ndk::ScopedAStatus queryProcessing(const std::optional<Processing::Type>&,
+                                       std::vector<Processing>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+    ndk::ScopedAStatus createEffect(const AudioUuid&, std::shared_ptr<IEffect>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+    ndk::ScopedAStatus destroyEffect(const std::shared_ptr<IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+class IEffectMock : public ::aidl::android::hardware::audio::effect::BnEffect {
+  public:
+    IEffectMock() = default;
+
+    MOCK_METHOD(ndk::ScopedAStatus, open,
+                (const Parameter::Common& common, const std::optional<Parameter::Specific>& spec,
+                 IEffect::OpenEffectReturn* ret),
+                (override));
+    MOCK_METHOD(ndk::ScopedAStatus, close, (), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getDescriptor, (Descriptor * desc), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, command, (CommandId commandId), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getState, (State * state), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getParameter, (const Parameter::Id& id, Parameter* param),
+                (override));
+    MOCK_METHOD(ndk::ScopedAStatus, reopen, (IEffect::OpenEffectReturn * ret), (override));
+
+    ndk::ScopedAStatus setParameter(const Parameter& param) {
+        if (param == mExpectParam)
+            return ndk::ScopedAStatus::ok();
+        else {
+            ALOGW("%s mismatch, %s vs %s", __func__, param.toString().c_str(),
+                  mExpectParam.toString().c_str());
+            return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+        }
+    }
+
+    void setExpectParameter(const Parameter& param) { mExpectParam = param; }
+
+  private:
+    Parameter mExpectParam;
+};
+
+// Predefined vector of {audio_devices_t, AudioDeviceDescription} pair
+static const std::vector<std::pair<audio_devices_t, AudioDeviceDescription>>& kAudioDevicePairs = {
+        {AUDIO_DEVICE_NONE, AudioDeviceDescription{.type = AudioDeviceType::NONE}},
+        {AUDIO_DEVICE_OUT_EARPIECE,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_SPEAKER_EARPIECE}},
+        {AUDIO_DEVICE_OUT_SPEAKER, AudioDeviceDescription{.type = AudioDeviceType::OUT_SPEAKER}},
+        {AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEADPHONE,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_OUT_BLUETOOTH_SCO,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_SCO}},
+        {AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_CARKIT,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_SCO}},
+        {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEADPHONE,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_A2DP}},
+        {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_SPEAKER,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_A2DP}},
+        {AUDIO_DEVICE_OUT_TELEPHONY_TX,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_TELEPHONY_TX}},
+        {AUDIO_DEVICE_OUT_AUX_LINE, AudioDeviceDescription{.type = AudioDeviceType::OUT_LINE_AUX}},
+        {AUDIO_DEVICE_OUT_SPEAKER_SAFE,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_SPEAKER_SAFE}},
+        {AUDIO_DEVICE_OUT_HEARING_AID,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEARING_AID,
+                                .connection = AudioDeviceDescription::CONNECTION_WIRELESS}},
+        {AUDIO_DEVICE_OUT_ECHO_CANCELLER,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_ECHO_CANCELLER}},
+        {AUDIO_DEVICE_OUT_BLE_SPEAKER,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_SPEAKER,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_LE}},
+        {AUDIO_DEVICE_OUT_BLE_BROADCAST,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_BROADCAST,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_LE}},
+        {AUDIO_DEVICE_OUT_MULTICHANNEL_GROUP,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_MULTICHANNEL_GROUP,
+                                .connection = AudioDeviceDescription::CONNECTION_VIRTUAL}},
+
+        {AUDIO_DEVICE_OUT_DEFAULT, AudioDeviceDescription{.type = AudioDeviceType::OUT_DEFAULT}},
+        {AUDIO_DEVICE_OUT_WIRED_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_SCO}},
+        {AUDIO_DEVICE_OUT_HDMI,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_HDMI}},
+        {AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DOCK,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DOCK,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_OUT_USB_ACCESSORY,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_ACCESSORY,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_OUT_USB_DEVICE,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_OUT_FM, AudioDeviceDescription{.type = AudioDeviceType::OUT_FM}},
+        {AUDIO_DEVICE_OUT_LINE,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_OUT_SPDIF,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_SPDIF}},
+        {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_A2DP}},
+        {AUDIO_DEVICE_OUT_IP,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_IP_V4}},
+        {AUDIO_DEVICE_OUT_BUS, AudioDeviceDescription{.type = AudioDeviceType::OUT_BUS}},
+        {AUDIO_DEVICE_OUT_PROXY,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_AFE_PROXY,
+                                .connection = AudioDeviceDescription::CONNECTION_VIRTUAL}},
+        {AUDIO_DEVICE_OUT_USB_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_OUT_HDMI_ARC,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_HDMI_ARC}},
+        {AUDIO_DEVICE_OUT_HDMI_EARC,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_HDMI_EARC}},
+        {AUDIO_DEVICE_OUT_BLE_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_LE}},
+        {AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+         AudioDeviceDescription{.type = AudioDeviceType::OUT_SUBMIX,
+                                .connection = AudioDeviceDescription::CONNECTION_VIRTUAL}},
+        {AUDIO_DEVICE_IN_BUILTIN_MIC,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_MICROPHONE}},
+        {AUDIO_DEVICE_IN_BACK_MIC,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_MICROPHONE_BACK}},
+        {AUDIO_DEVICE_IN_TELEPHONY_RX,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_TELEPHONY_RX}},
+        {AUDIO_DEVICE_IN_TV_TUNER, AudioDeviceDescription{.type = AudioDeviceType::IN_TV_TUNER}},
+        {AUDIO_DEVICE_IN_LOOPBACK, AudioDeviceDescription{.type = AudioDeviceType::IN_LOOPBACK}},
+        {AUDIO_DEVICE_IN_BLUETOOTH_BLE,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_LE}},
+        {AUDIO_DEVICE_IN_ECHO_REFERENCE,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_ECHO_REFERENCE}},
+        {AUDIO_DEVICE_IN_DEFAULT, AudioDeviceDescription{.type = AudioDeviceType::IN_DEFAULT}},
+        {AUDIO_DEVICE_IN_WIRED_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_SCO}},
+        {AUDIO_DEVICE_IN_HDMI,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_HDMI}},
+        {AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DOCK,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DOCK,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_IN_USB_ACCESSORY,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_ACCESSORY,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_IN_USB_DEVICE,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_IN_FM_TUNER, AudioDeviceDescription{.type = AudioDeviceType::IN_FM_TUNER}},
+        {AUDIO_DEVICE_IN_LINE,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_ANALOG}},
+        {AUDIO_DEVICE_IN_SPDIF,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_SPDIF}},
+        {AUDIO_DEVICE_IN_BLUETOOTH_A2DP,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_A2DP}},
+        {AUDIO_DEVICE_IN_IP,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_IP_V4}},
+        {AUDIO_DEVICE_IN_BUS, AudioDeviceDescription{.type = AudioDeviceType::IN_BUS}},
+        {AUDIO_DEVICE_IN_PROXY,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_AFE_PROXY,
+                                .connection = AudioDeviceDescription::CONNECTION_VIRTUAL}},
+        {AUDIO_DEVICE_IN_USB_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_USB}},
+        {AUDIO_DEVICE_IN_HDMI_ARC,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_HDMI_ARC}},
+        {AUDIO_DEVICE_IN_HDMI_EARC,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_DEVICE,
+                                .connection = AudioDeviceDescription::CONNECTION_HDMI_EARC}},
+        {AUDIO_DEVICE_IN_BLE_HEADSET,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_HEADSET,
+                                .connection = AudioDeviceDescription::CONNECTION_BT_LE}},
+        {AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+         AudioDeviceDescription{.type = AudioDeviceType::IN_SUBMIX,
+                                .connection = AudioDeviceDescription::CONNECTION_VIRTUAL}},
+};
+
+}  // namespace
+
+class EffectHalAidlTest : public testing::Test {
+  public:
+    void SetUp() override {
+        mIEffectMock = ndk::SharedRefBase::make<IEffectMock>();
+        mIFactoryMock = ndk::SharedRefBase::make<IFactoryMock>();
+        mEffect = sp<EffectHalAidl>::make(mIFactoryMock, mIEffectMock, 0 /*session*/, 0 /*ioId*/,
+                                          mDescMock /* descriptor */, false /*isProxyEffect*/);
+    }
+    void TearDown() override {
+        mIEffectMock.reset();
+        mIFactoryMock.reset();
+        mEffect.clear();
+    }
+
+    // Helper function to setDevice with one to multi (window size) device pairs set to effect, and
+    // expect the same from IEffect mocking object.
+    void setDevicesWithWindow(size_t window = 1) {
+        for (size_t i = 0; i + window < kAudioDevicePairs.size(); i += window) {
+            AudioDeviceTypeAddrVector deviceTypes;
+            std::vector<AudioDeviceDescription> deviceDescs;
+            for (size_t w = 0; w < window; w++) {
+                deviceTypes.emplace_back(kAudioDevicePairs[i + w].first /* audio_device_t */, "");
+                deviceDescs.emplace_back(
+                        kAudioDevicePairs[i + w].second /* AudioDeviceDescription */);
+            }
+            const Parameter expect = Parameter::make<Parameter::deviceDescription>(deviceDescs);
+            mIEffectMock->setExpectParameter(expect);
+            EXPECT_EQ(OK, mEffect->setDevices(deviceTypes))
+                    << " setDevices: "
+                    << dumpAudioDeviceTypeAddrVector(deviceTypes, false /*includeSensitiveInfo*/)
+                    << " expect: " << expect.toString();
+        }
+    }
+
+  protected:
+    std::shared_ptr<IEffectMock> mIEffectMock;
+    std::shared_ptr<IFactoryMock> mIFactoryMock;
+    Descriptor mDescMock;
+    sp<EffectHalAidl> mEffect;
+};
+
+TEST_F(EffectHalAidlTest, emptyDeviceSet) {
+    AudioDeviceTypeAddr deviceType(AUDIO_DEVICE_NONE, "");
+    AudioDeviceTypeAddrVector deviceTypes{deviceType};
+    std::vector<AudioDeviceDescription> deviceDescs;
+
+    Parameter expect = Parameter::make<Parameter::deviceDescription>(deviceDescs);
+    mIEffectMock->setExpectParameter(expect);
+    EXPECT_NE(OK, mEffect->setDevices(deviceTypes))
+            << " expecting error with setDevices: "
+            << dumpAudioDeviceTypeAddrVector(deviceTypes, false /*includeSensitiveInfo*/)
+            << " expect: " << expect.toString();
+
+    deviceDescs.emplace_back(AudioDeviceDescription{.type = AudioDeviceType::NONE});
+    expect = Parameter::make<Parameter::deviceDescription>(deviceDescs);
+    mIEffectMock->setExpectParameter(expect);
+    EXPECT_EQ(OK, mEffect->setDevices(deviceTypes))
+            << " setDevices: "
+            << dumpAudioDeviceTypeAddrVector(deviceTypes, false /*includeSensitiveInfo*/)
+            << " expect: " << expect.toString();
+}
+
+// go over the `kAudioDevicePairs` pair, and setDevice for each pair
+TEST_F(EffectHalAidlTest, deviceSinglePairSet) {
+    ASSERT_NO_FATAL_FAILURE(setDevicesWithWindow());
+}
+
+// SetDevice with multiple device pairs from `kAudioDevicePairs`
+TEST_F(EffectHalAidlTest, deviceMultiplePairSet) {
+    for (size_t window = 2; window < kAudioDevicePairs.size(); window++) {
+        ASSERT_NO_FATAL_FAILURE(setDevicesWithWindow(window));
+    }
+}
diff --git a/media/libaudiopermission/Android.bp b/media/libaudiopermission/Android.bp
index 161e5a7..12853a7 100644
--- a/media/libaudiopermission/Android.bp
+++ b/media/libaudiopermission/Android.bp
@@ -13,6 +13,7 @@
     name: "libaudiopermission",
 
     srcs: [
+        "AppOpsSession.cpp",
         "NativePermissionController.cpp",
         "ValidatedAttributionSourceState.cpp",
     ],
@@ -21,10 +22,12 @@
     ],
 
     header_libs: [
+        "libaudio_system_headers",
         "libcutils_headers",
         "liberror_headers",
     ],
     export_header_lib_headers: [
+        "libaudio_system_headers",
         "liberror_headers",
     ],
     static_libs: [
@@ -35,6 +38,7 @@
         "libbase",
         "libbinder",
         "liblog",
+        "libpermission",
         "libutils",
     ],
 
@@ -63,8 +67,6 @@
     tidy_checks: [
         "android-*",
         "bugprone-*",
-        "cert-*",
-        "clang-analyzer-security*",
         "google-*",
         "misc-*",
         "modernize-*",
@@ -73,8 +75,6 @@
     tidy_checks_as_errors: [
         "android-*",
         "bugprone-*",
-        "cert-*",
-        "clang-analyzer-security*",
         "google-*",
         "misc-*",
         "modernize-*",
@@ -98,6 +98,7 @@
         "libbase",
         "libbinder",
         "liblog",
+        "libpermission",
         "libutils",
     ],
     srcs: ["tests/*.cpp"],
diff --git a/media/libaudiopermission/AppOpsSession.cpp b/media/libaudiopermission/AppOpsSession.cpp
new file mode 100644
index 0000000..6fc3190
--- /dev/null
+++ b/media/libaudiopermission/AppOpsSession.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/AppOpsSession.h>
+#include <media/AttrSourceIter.h>
+
+#include <binder/AppOpsManager.h>
+#include <binder/PermissionController.h>
+
+using ::android::content::AttributionSourceState;
+
+namespace android::media::permission {
+
+// Package name param is unreliable (can be empty), but we should only get valid events based on
+// how we register the listener.
+binder::Status DefaultAppOpsFacade::OpMonitor::opChanged(int32_t op, int32_t, const String16&,
+                                                         const String16&) {
+    if (mOps.attributedOp != op && mOps.additionalOp != op) return binder::Status::ok();
+    DefaultAppOpsFacade x{};
+    const auto allowed = x.checkAccess(mAttr, mOps);
+    std::lock_guard l_{mLock};
+    if (mCb != nullptr) {
+        mCb(allowed);
+    }
+    return binder::Status::ok();
+}
+
+bool DefaultAppOpsFacade::startAccess(const ValidatedAttributionSourceState& attr_, Ops ops) {
+    const AttributionSourceState& attr = attr_;
+    // TODO(b/384845037) no support for additional op at the moment
+    if (ops.attributedOp == AppOpsManager::OP_NONE) return true;  // nothing to do
+    // TODO(b/384845037) caching and sync up-call marking
+    AppOpsManager ap{};
+    return ap.startOpNoThrow(
+        /*op=*/ ops.attributedOp,
+        /*uid=*/ attr.uid,
+        /*callingPackage=*/ String16{attr.packageName.value_or("").c_str()},
+        /*startIfModeDefault=*/ false,
+        /*attributionTag=*/ attr.attributionTag.has_value() ?
+            String16{attr.attributionTag.value().c_str()}
+                                            : String16{},
+        /*message=*/ String16{"AppOpsSession start"})
+    == AppOpsManager::MODE_ALLOWED;
+}
+
+void DefaultAppOpsFacade::stopAccess(const ValidatedAttributionSourceState& attr_, Ops ops) {
+    const AttributionSourceState& attr = attr_;
+    // TODO(b/384845037) caching and sync up-call marking
+    AppOpsManager ap{};
+    return ap.finishOp(
+        /*op=*/ ops.attributedOp,
+        /*uid=*/ attr.uid,
+        /*callingPackage=*/ String16{attr.packageName.value_or("").c_str()},
+        /*attributionTag=*/ attr.attributionTag.has_value() ?
+                                            String16{attr.attributionTag.value().c_str()}
+                                            : String16{});
+}
+
+bool DefaultAppOpsFacade::checkAccess(const ValidatedAttributionSourceState& attr, Ops ops) {
+    const auto check = [&](int32_t op) -> bool {
+        if (op == AppOpsManager::OP_NONE) return true;
+        return std::all_of(
+                AttrSourceIter::cbegin(attr), AttrSourceIter::cend(), [&](const auto& x) {
+                    return AppOpsManager{}.checkOp(op, x.uid,
+                                                   String16{x.packageName.value_or("").c_str()}) ==
+                                   AppOpsManager::MODE_ALLOWED;
+                });
+    };
+    return check(ops.attributedOp) && check(ops.additionalOp);
+}
+
+uintptr_t DefaultAppOpsFacade::addChangeCallback(const ValidatedAttributionSourceState& attr,
+                                                 Ops ops, std::function<void(bool)> cb) {
+    const auto listener = sp<OpMonitor>::make(attr, ops, std::move(cb));
+    const auto reg = [&](int32_t op) {
+        std::for_each(AttrSourceIter::cbegin(attr), AttrSourceIter::cend(),
+                      [&listener, op](const auto& x) {
+                          AppOpsManager{}.startWatchingMode(
+                                  op, String16{x.packageName.value_or("").c_str()},
+                                  AppOpsManager::WATCH_FOREGROUND_CHANGES, listener);
+                      });
+    };
+    if (ops.attributedOp != AppOpsManager::OP_NONE) reg(ops.attributedOp);
+    if (ops.additionalOp != AppOpsManager::OP_NONE) reg(ops.additionalOp);
+    std::lock_guard l_{sMapLock};
+    const auto cookie = reinterpret_cast<uintptr_t>(listener.get());
+    sCbMap[cookie] = std::move(listener);
+    return cookie;
+}
+
+void DefaultAppOpsFacade::removeChangeCallback(uintptr_t ptr) {
+    sp<OpMonitor> monitor;
+    {
+        std::lock_guard l_{sMapLock};
+        if (const auto iter = sCbMap.find(ptr); iter != sCbMap.end()) {
+            monitor = std::move(iter->second);
+            sCbMap.erase(iter);
+        }
+    }
+    LOG_ALWAYS_FATAL_IF(monitor == nullptr, "Unexpected nullptr in cb map");
+    monitor->stopListening();
+    // Callbacks are stored via binder identity in AppOpsService, so unregistering the callback
+    // removes it regardless of how many calls to startWatchingMode occurred
+    AppOpsManager{}.stopWatchingMode(monitor);
+}
+
+}  // namespace android::media::permission
diff --git a/media/libaudiopermission/NativePermissionController.cpp b/media/libaudiopermission/NativePermissionController.cpp
index 5743076..6234202 100644
--- a/media/libaudiopermission/NativePermissionController.cpp
+++ b/media/libaudiopermission/NativePermissionController.cpp
@@ -14,6 +14,9 @@
  * limitations under the License.
  */
 
+// #define LOG_NDEBUG 0
+#define LOG_TAG "NativePermissionController"
+
 #include <media/NativePermissionController.h>
 
 #include <algorithm>
@@ -23,6 +26,7 @@
 #include <android-base/expected.h>
 #include <cutils/android_filesystem_config.h>
 #include <utils/Errors.h>
+#include <utils/Log.h>
 
 using ::android::binder::Status;
 using ::android::error::BinderResult;
@@ -35,9 +39,9 @@
         case AID_ROOT:
             return "root";
         case AID_SYSTEM:
-            return "system";
+            return "android";
         case AID_SHELL:
-            return "shell";
+            return "com.android.shell";
         case AID_MEDIA:
             return "media";
         case AID_AUDIOSERVER:
@@ -66,6 +70,7 @@
 
 Status NativePermissionController::updatePackagesForUid(const UidPackageState& newPackageState) {
     std::lock_guard l{m_};
+    ALOGD("%s, %s", __func__, newPackageState.toString().c_str());
     package_map_.insert_or_assign(newPackageState.uid, newPackageState.packageNames);
     const auto& cursor = package_map_.find(newPackageState.uid);
 
@@ -85,6 +90,7 @@
 
 Status NativePermissionController::populatePermissionState(PermissionEnum perm,
                                                            const std::vector<int>& uids) {
+    ALOGV("%s, %d", __func__, static_cast<int>(perm));
     if (perm >= PermissionEnum::ENUM_SIZE || static_cast<int>(perm) < 0) {
         return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT);
     }
@@ -128,8 +134,8 @@
     uid = uid % AID_USER_OFFSET;
     const auto fixed_package_opt = getFixedPackageName(uid);
     if (fixed_package_opt.has_value()) {
-        return (uid == AID_ROOT || uid == AID_SYSTEM) ? true :
-                packageName == fixed_package_opt.value();
+        return (uid == AID_ROOT || uid == AID_SYSTEM) ? true
+                                                      : packageName == fixed_package_opt.value();
     }
     std::lock_guard l{m_};
     if (!is_package_populated_) {
@@ -138,18 +144,26 @@
                 "NPC::validatedUidPackagePair: controller never populated by system_server");
     }
     const auto cursor = package_map_.find(uid);
-    return (cursor != package_map_.end()) &&
-           (std::find(cursor->second.begin(), cursor->second.end(), packageName) !=
+    if (cursor == package_map_.end()) {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_ARGUMENT,
+                "NPC::validatedUidPackagePair: unknown uid");
+    }
+    return (std::find(cursor->second.begin(), cursor->second.end(), packageName) !=
             cursor->second.end());
 }
 
 BinderResult<bool> NativePermissionController::checkPermission(PermissionEnum perm,
                                                                uid_t uid) const {
+    ALOGV("%s: checking %d for %u", __func__, static_cast<int>(perm), uid);
     if (uid == AID_ROOT || uid == AID_SYSTEM || uid == getuid()) return true;
     std::lock_guard l{m_};
     const auto& uids = permission_map_[static_cast<size_t>(perm)];
     if (!uids.empty()) {
-        return std::binary_search(uids.begin(), uids.end(), uid);
+        const bool ret = std::binary_search(uids.begin(), uids.end(), uid);
+        // Log locally until all call-sites log errors well
+        ALOGD_IF(!ret, "%s: missing %d for %u", __func__, static_cast<int>(perm), uid);
+        return ret;
     } else {
         return unexpectedExceptionCode(
                 Status::EX_ILLEGAL_STATE,
diff --git a/media/libaudiopermission/include/media/AppOpsSession.h b/media/libaudiopermission/include/media/AppOpsSession.h
new file mode 100644
index 0000000..5fcb3ab
--- /dev/null
+++ b/media/libaudiopermission/include/media/AppOpsSession.h
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <com/android/internal/app/BnAppOpsCallback.h>
+#include <cutils/android_filesystem_config.h>
+#include <log/log.h>
+#include <utils/RefBase.h>
+
+#include <functional>
+
+#include "media/ValidatedAttributionSourceState.h"
+
+namespace android::media::permission {
+
+using ValidatedAttributionSourceState =
+        com::android::media::permission::ValidatedAttributionSourceState;
+
+struct Ops {
+    int attributedOp = -1;  // same as OP_NONE
+    int additionalOp = -1;
+};
+
+/**
+ * This session manages an ongoing data access corresponding with appops.
+ *
+ * This access can be temporarily stopped by appops or the data source. When access is revoked by
+ * AppOps, the registered callback will be called in order to ensure that the data delivery is
+ * halted. When halted by the data source, AppOps will be notified that the access ended.
+ * Note, this session does not ref-count on itself. It should represent a single access, which
+ * necessarily cannot nest.
+ * This class is fully locked since notifications from appops are async. Public interface can be
+ * slow due to binder calls.
+ */
+template <typename AppOpsFacade>
+// Abstract interface that permits minor differences in how appops is called per client usage
+    requires requires(AppOpsFacade x, const ValidatedAttributionSourceState attr) {
+        { x.startAccess(attr, Ops{}) } -> std::same_as<bool>;  // true if permitted
+        { x.stopAccess(attr, Ops{}) } -> std::same_as<void>;
+        { x.checkAccess(attr, Ops{}) } -> std::same_as<bool>;  // true if permitted
+        {
+            x.addChangeCallback(attr, Ops{}, std::function<void(bool)>{})
+        } -> std::same_as<uintptr_t>;
+        // no more calls after return is required
+        { x.removeChangeCallback(uintptr_t{}) } -> std::same_as<void>;
+    }
+class AppOpsSession {
+  public:
+    /**
+     * @param attr - AttributionChain which the access is attributed to.
+     * @param ops - The ops required for this delivery
+     * @param opChangedCb - A callback (async) which  notifies the data source that the permitted
+     * state due to appops has changed. This is only called if a delivery request is ongoing (i.e.
+     * after a `beginDeliveryRequest` but before a `endDeliveryRequest`, regardless of the return
+     * value of the former). Upon calling the cb, appops has been updated, so the post-condition is
+     * that the data source delivers data iff the parameter is true. If the delivery fails for some
+     * reason, `endDeliveryRequest` should be called shortly, however, there is no re-entrancy into
+     * this class. The client should never change the access request state based on this cb.
+     * @param appOpsFacade - See the requires clause -- an interface which encapsulates the calls to
+     * AppOpsService.
+     */
+    AppOpsSession(ValidatedAttributionSourceState attr, Ops ops,
+                  std::function<void(bool)> opChangedCb, AppOpsFacade appOpsFacade = {})
+        : mAttr(std::move(attr)),
+          mOps(ops),
+          mCb(std::move(opChangedCb)),
+          mAppOps(std::move(appOpsFacade)),
+          mCookie(mAppOps.addChangeCallback(mAttr, ops,
+                                            [this](bool x) { this->onPermittedChanged(x); })),
+          mDeliveryRequested(false),
+          mDeliveryPermitted(mAppOps.checkAccess(mAttr, ops)) { }
+
+    ~AppOpsSession() {
+        endDeliveryRequest();
+        mAppOps.removeChangeCallback(mCookie);
+    }
+
+    /**
+     * Source intends to start delivering data. Updates AppOps if applicable.
+     * @return true if data should be delivered (i.e. AppOps also permits delivery)
+     */
+    bool beginDeliveryRequest() {
+        std::lock_guard l{mLock};
+        if (mDeliveryRequested) {
+            ALOG(LOG_WARN, "AppOpsSession", "Redundant beginDeliveryRequest ignored");
+            return mDeliveryPermitted;
+        }
+        mDeliveryRequested = true;
+        if (mDeliveryPermitted) {
+            mDeliveryPermitted = mAppOps.startAccess(mAttr, mOps);
+        }
+        return mDeliveryPermitted;
+    }
+
+    /**
+     * Source intends to stop delivering data. Updates AppOps if applicable.
+     */
+    void endDeliveryRequest() {
+        std::lock_guard l{mLock};
+        if (!mDeliveryRequested) return;
+        mDeliveryRequested = false;
+        if (mDeliveryPermitted) {
+            mAppOps.stopAccess(mAttr, mOps);
+        }
+    }
+
+    /**
+     * Check if delivery is permitted.
+     */
+    bool isDeliveryPermitted() const {
+        std::lock_guard l{mLock};
+        return mDeliveryPermitted;
+    }
+
+  private:
+    /**
+     * AppOps permitted state has changed. From callback thread.
+     */
+    void onPermittedChanged(bool isPermitted) {
+        std::lock_guard l{mLock};
+        if (mDeliveryPermitted == isPermitted) return;
+        const bool oldIsPermitted = mDeliveryPermitted;
+        mDeliveryPermitted = isPermitted;
+        if (!mDeliveryRequested) return;
+        if (mDeliveryPermitted) {
+            mDeliveryPermitted = mAppOps.startAccess(mAttr, mOps);
+        } else {
+            mAppOps.stopAccess(mAttr, mOps);
+        }
+        if (oldIsPermitted != mDeliveryPermitted) {
+            mCb(mDeliveryPermitted);
+        }
+    }
+
+    mutable std::mutex mLock{};
+    const ValidatedAttributionSourceState mAttr;
+    const Ops mOps;
+    const std::function<void(bool)> mCb;
+    AppOpsFacade mAppOps GUARDED_BY(mLock);
+    const uintptr_t mCookie;
+    bool mDeliveryRequested GUARDED_BY(mLock);
+    bool mDeliveryPermitted GUARDED_BY(mLock);
+};
+
+class DefaultAppOpsFacade {
+  public:
+    bool startAccess(const ValidatedAttributionSourceState&, Ops);
+    void stopAccess(const ValidatedAttributionSourceState&, Ops);
+    bool checkAccess(const ValidatedAttributionSourceState&, Ops);
+    uintptr_t addChangeCallback(const ValidatedAttributionSourceState&, Ops,
+                                std::function<void(bool)> cb);
+    void removeChangeCallback(uintptr_t);
+
+    class OpMonitor : public com::android::internal::app::BnAppOpsCallback {
+      public:
+        OpMonitor(ValidatedAttributionSourceState attr, Ops ops, std::function<void(bool)> cb)
+            : mAttr(std::move(attr)), mOps(ops), mCb(std::move(cb)) { }
+
+        binder::Status opChanged(int32_t op, int32_t uid, const String16& packageName,
+                                 const String16& persistenDeviceId) override;
+
+        void stopListening() {
+            std::lock_guard l_{mLock};
+            mCb = nullptr;
+        }
+
+      private:
+        const ValidatedAttributionSourceState mAttr;
+        const Ops mOps;
+        std::mutex mLock;
+        std::function<void(bool)> mCb GUARDED_BY(mLock);
+    };
+
+  private:
+    static inline std::mutex sMapLock{};
+    static inline std::unordered_map<uintptr_t, sp<OpMonitor>> sCbMap{};
+};
+
+}  // namespace android::media::permission
diff --git a/media/libaudiopermission/include/media/AttrSourceIter.h b/media/libaudiopermission/include/media/AttrSourceIter.h
index 609d218..4d4eaea 100644
--- a/media/libaudiopermission/include/media/AttrSourceIter.h
+++ b/media/libaudiopermission/include/media/AttrSourceIter.h
@@ -105,4 +105,4 @@
 inline ConstIter cend() {
     return ConstIter::end();
 }
-}  // namespace com::android::media::permission::AttrSourceIter
+}  // namespace android::media::permission::AttrSourceIter
diff --git a/media/libaudiopermission/include/media/AudioPermissionPolicy.h b/media/libaudiopermission/include/media/AudioPermissionPolicy.h
new file mode 100644
index 0000000..d90f242
--- /dev/null
+++ b/media/libaudiopermission/include/media/AudioPermissionPolicy.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <cutils/android_filesystem_config.h>
+#include <log/log.h>
+#include <system/audio.h>
+#include <utils/RefBase.h>
+
+namespace android::media::permission {
+
+/**
+ * Tracking ops for the following uids are pointless -- system always has ops and isn't tracked,
+ * and native only services don't have packages which is what appops tracks over.
+ * So, we skip tracking, and always permit access.
+ * Notable omissions are AID_SHELL, AID_RADIO, and AID_BLUETOOTH, which are non-app uids which
+ * interface with us, but are associated with packages so can still be attributed to.
+ */
+inline bool skipOpsForUid(uid_t uid) {
+    switch (uid % AID_USER_OFFSET) {
+        case AID_ROOT:
+        case AID_SYSTEM:
+        case AID_MEDIA:
+        case AID_AUDIOSERVER:
+        case AID_CAMERASERVER:
+            return true;
+        default:
+            return false;
+    }
+}
+
+inline bool isSystemUsage(audio_usage_t usage) {
+    const std::array SYSTEM_USAGES{AUDIO_USAGE_CALL_ASSISTANT, AUDIO_USAGE_EMERGENCY,
+                                   AUDIO_USAGE_SAFETY, AUDIO_USAGE_VEHICLE_STATUS,
+                                   AUDIO_USAGE_ANNOUNCEMENT, AUDIO_USAGE_SPEAKER_CLEANUP};
+    return std::find(std::begin(SYSTEM_USAGES), std::end(SYSTEM_USAGES), usage) !=
+           std::end(SYSTEM_USAGES);
+}
+
+}  // namespace android::media::permission
diff --git a/media/libaudiopermission/tests/AppOpsSessionTests.cpp b/media/libaudiopermission/tests/AppOpsSessionTests.cpp
new file mode 100644
index 0000000..d1f2132
--- /dev/null
+++ b/media/libaudiopermission/tests/AppOpsSessionTests.cpp
@@ -0,0 +1,261 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/content/AttributionSourceState.h>
+#include <media/AppOpsSession.h>
+#include <media/ValidatedAttributionSourceState.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <functional>
+
+using ::android::content::AttributionSourceState;
+using ::android::media::permission::AppOpsSession;
+using ::android::media::permission::Ops;
+using ::com::android::media::permission::ValidatedAttributionSourceState;
+
+using ::testing::ElementsAreArray;
+using ::testing::IsEmpty;
+using ::testing::Ne;
+
+class AppOpsSessionTests;
+
+class AppOpsTestFacade {
+    friend AppOpsSessionTests;
+
+  public:
+    bool startAccess(const ValidatedAttributionSourceState&, Ops) {
+        if (allowed_) ++running_;
+        return allowed_;
+    }
+
+    void stopAccess(const ValidatedAttributionSourceState&, Ops) { --running_; }
+
+    bool checkAccess(const ValidatedAttributionSourceState&, Ops) { return allowed_; }
+
+    uintptr_t addChangeCallback(const ValidatedAttributionSourceState&, Ops,
+                                std::function<void(bool)> cb) {
+        cb_ = cb;
+        return 42;
+    }
+
+    void removeChangeCallback(uintptr_t) {}
+
+  private:
+    // Static abuse since this is copied into the test, and represents "global" state
+    static inline std::function<void(bool)> cb_;
+    static inline bool allowed_;
+    static inline int running_;
+};
+
+class AppOpsSessionTests : public ::testing::Test {
+  protected:
+    static constexpr Ops mOps = {100, 101};
+
+    // We must manually clear the facade state, since it is static, unlike the members of this
+    // class, since the fixture is constructed per-test.
+    void SetUp() override {
+        AppOpsTestFacade::cb_ = nullptr;
+        AppOpsTestFacade::running_ = 0;
+        AppOpsTestFacade::allowed_ = false;
+    }
+
+    void facadeSetAllowed(bool isAllowed) { AppOpsTestFacade::allowed_ = isAllowed; }
+
+    int facadeGetRunning() { return AppOpsTestFacade::running_; }
+
+    void facadeTriggerChange(bool isPermitted) {
+        EXPECT_THAT(isPermitted, Ne(AppOpsTestFacade::allowed_));
+        facadeSetAllowed(isPermitted);
+        AppOpsTestFacade::cb_(isPermitted);
+    }
+
+    // Trigger a change callback, but without modifying the underlying state.
+    // Allows for simulating a callback which is reversed quickly and callbacks which may not
+    // apply to our package.
+    void facadeTriggerSpuriousChange(bool isPermitted) { facadeSetAllowed(isPermitted); }
+
+    void dataDeliveryCb(bool shouldDeliver) { mDeliveredCbs.push_back(shouldDeliver); }
+
+    const AttributionSourceState mAttr = []() {
+        AttributionSourceState attr;
+        attr.uid = 1;
+        attr.pid = 2;
+        attr.deviceId = 3;
+        return attr;
+    }();
+
+    void initSession() {
+        mAppOpsSession.emplace(
+                ValidatedAttributionSourceState::createFromTrustedSource(mAttr), mOps,
+                [this](bool x) { dataDeliveryCb(x); }, AppOpsTestFacade{});
+    }
+
+    // For verification of delivered callbacks
+    // vector<bool> since it's a test
+    std::vector<bool> mDeliveredCbs;
+    std::optional<AppOpsSession<AppOpsTestFacade>> mAppOpsSession;
+};
+
+TEST_F(AppOpsSessionTests, beginDeliveryRequest_Allowed) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 1);
+
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, beginDeliveryRequest_Denied) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 0);
+
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, endDeliveryRequest_Ongoing) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 1);
+    mAppOpsSession->endDeliveryRequest();
+    EXPECT_EQ(facadeGetRunning(), 0);
+
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, endDeliveryRequest_Paused) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 0);
+    mAppOpsSession->endDeliveryRequest();
+    EXPECT_EQ(facadeGetRunning(), 0);
+
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, endDeliveryRequest_PausedByCb) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 1);
+    facadeTriggerChange(false);
+    EXPECT_EQ(facadeGetRunning(), 0);
+
+    mAppOpsSession->endDeliveryRequest();
+    EXPECT_EQ(facadeGetRunning(), 0);
+}
+
+TEST_F(AppOpsSessionTests, onPermittedFalse_Ongoing_Change) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 1);
+    facadeTriggerChange(false);
+    EXPECT_EQ(facadeGetRunning(), 0);
+    EXPECT_THAT(mDeliveredCbs, ElementsAreArray({false}));
+}
+
+TEST_F(AppOpsSessionTests, onPermittedTrue_Ongoing_Change) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 0);
+    facadeTriggerChange(true);
+    EXPECT_EQ(facadeGetRunning(), 1);
+    EXPECT_THAT(mDeliveredCbs, ElementsAreArray({true}));
+}
+
+TEST_F(AppOpsSessionTests, onPermittedTrue_Ongoing_Change_Spurious) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 0);
+    facadeTriggerSpuriousChange(true);
+    EXPECT_EQ(facadeGetRunning(), 0);
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, onPermittedFalse_Ongoing_Same) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 0);
+    facadeTriggerSpuriousChange(false);
+    EXPECT_EQ(facadeGetRunning(), 0);
+
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, onPermittedTrue_Ongoing_Same) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 1);
+    facadeTriggerSpuriousChange(true);
+    EXPECT_EQ(facadeGetRunning(), 1);
+
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, onPermittedFalse_Paused_Change) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    mAppOpsSession->endDeliveryRequest();
+
+    EXPECT_EQ(facadeGetRunning(), 0);
+    facadeTriggerChange(false);
+    EXPECT_EQ(facadeGetRunning(), 0);
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, onPermittedTrue_Paused_Change) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    mAppOpsSession->endDeliveryRequest();
+
+    facadeTriggerChange(true);
+    EXPECT_EQ(facadeGetRunning(), 0);
+    EXPECT_THAT(mDeliveredCbs, IsEmpty());
+}
+
+TEST_F(AppOpsSessionTests, dtor_Running) {
+    facadeSetAllowed(true);
+    initSession();
+    EXPECT_TRUE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 1);
+
+    // call dtor
+    mAppOpsSession.reset();
+    EXPECT_EQ(facadeGetRunning(), 0);
+}
+
+TEST_F(AppOpsSessionTests, dtor_NotRunning) {
+    facadeSetAllowed(false);
+    initSession();
+    EXPECT_FALSE(mAppOpsSession->beginDeliveryRequest());
+    EXPECT_EQ(facadeGetRunning(), 0);
+
+    // call dtor
+    mAppOpsSession.reset();
+    EXPECT_EQ(facadeGetRunning(), 0);
+}
diff --git a/media/libaudiopermission/tests/NativePermissionControllerTest.cpp b/media/libaudiopermission/tests/NativePermissionControllerTest.cpp
index f2423c1..6f0ef9a 100644
--- a/media/libaudiopermission/tests/NativePermissionControllerTest.cpp
+++ b/media/libaudiopermission/tests/NativePermissionControllerTest.cpp
@@ -57,7 +57,7 @@
                 IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
 
     // fixed uids should work
-    EXPECT_THAT(controller_.getPackagesForUid(1000), IsOkAnd(ElementsAre(std::string{"system"})));
+    EXPECT_THAT(controller_.getPackagesForUid(1000), IsOkAnd(ElementsAre(std::string{"android"})));
 }
 
 TEST_F(NativePermissionControllerTest, validateUidPackagePair_NotPopulated) {
@@ -66,7 +66,7 @@
                 IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
 
     // fixed uids should work
-    EXPECT_THAT(controller_.validateUidPackagePair(1000, "system"), IsOkAnd(IsTrue()));
+    EXPECT_THAT(controller_.validateUidPackagePair(1000, "android"), IsOkAnd(IsTrue()));
 }
 
 // ---  Tests for populatePackagesForUids ----
@@ -170,7 +170,8 @@
 
     EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
 
-    EXPECT_THAT(controller_.validateUidPackagePair(12000, "any.package"), IsOkAnd(IsFalse()));
+    EXPECT_THAT(controller_.validateUidPackagePair(12000, "any.package"),
+            IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
 }
 
 TEST_F(NativePermissionControllerTest, populatePermissionState_InvalidPermission) {
diff --git a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
index c4e0d65..6857c8e 100644
--- a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
+++ b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
@@ -50,7 +50,7 @@
     AUDIO_CHANNEL_OUT_5POINT1POINT4,
     AUDIO_CHANNEL_OUT_7POINT1POINT2,
     AUDIO_CHANNEL_OUT_7POINT1POINT4,
-    AUDIO_CHANNEL_OUT_13POINT_360RA,
+    AUDIO_CHANNEL_OUT_13POINT0,
     AUDIO_CHANNEL_OUT_22POINT2,
 };
 
diff --git a/media/libeffects/downmix/tests/downmix_tests.cpp b/media/libeffects/downmix/tests/downmix_tests.cpp
index 20e19a3..20393b9 100644
--- a/media/libeffects/downmix/tests/downmix_tests.cpp
+++ b/media/libeffects/downmix/tests/downmix_tests.cpp
@@ -48,7 +48,7 @@
     AUDIO_CHANNEL_OUT_5POINT1POINT4,
     AUDIO_CHANNEL_OUT_7POINT1POINT2,
     AUDIO_CHANNEL_OUT_7POINT1POINT4,
-    AUDIO_CHANNEL_OUT_13POINT_360RA,
+    AUDIO_CHANNEL_OUT_13POINT0,
     AUDIO_CHANNEL_OUT_22POINT2,
     audio_channel_mask_t(AUDIO_CHANNEL_OUT_22POINT2
             | AUDIO_CHANNEL_OUT_FRONT_WIDE_LEFT | AUDIO_CHANNEL_OUT_FRONT_WIDE_RIGHT),
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index dd14ac2..81b52aa 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -370,7 +370,8 @@
         StageType type) {
     std::vector<DynamicsProcessing::EqBandConfig> eqBands;
 
-    auto maxBand = mEngineArchitecture.preEqStage.bandCount;
+    auto maxBand = type == StageType::POSTEQ ? mEngineArchitecture.postEqStage.bandCount
+                                             : mEngineArchitecture.preEqStage.bandCount;
     for (int32_t ch = 0; ch < mChannelCount; ch++) {
         auto eq = getEqWithType_l(type, ch);
         if (!eq) {
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 4f04ffb..775568a 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -18,7 +18,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libldnhncr",
 
     vendor: true,
@@ -36,6 +36,7 @@
     ],
 
     shared_libs: [
+        "libaudioutils",
         "libcutils",
         "liblog",
     ],
@@ -64,6 +65,7 @@
         "-Wthread-safety",
     ],
     shared_libs: [
+        "libaudioutils",
         "libcutils",
         "liblog",
     ],
diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
index d61efd3..426a9a1 100644
--- a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
@@ -30,26 +30,8 @@
 #include <audio_effects/effect_loudnessenhancer.h>
 #include "dsp/core/dynamic_range_compression.h"
 
-// BUILD_FLOAT targets building a float effect instead of the legacy int16_t effect.
-#define BUILD_FLOAT
-
-#ifdef BUILD_FLOAT
-
 static constexpr audio_format_t kProcessFormat = AUDIO_FORMAT_PCM_FLOAT;
 
-#else
-
-static constexpr audio_format_t kProcessFormat = AUDIO_FORMAT_PCM_16_BIT;
-
-static inline int16_t clamp16(int32_t sample)
-{
-    if ((sample>>15) ^ (sample>>31))
-        sample = 0x7FFF ^ (sample>>31);
-    return sample;
-}
-
-#endif // BUILD_FLOAT
-
 extern "C" {
 
 // effect_handle_t interface implementation for LE effect
@@ -121,7 +103,9 @@
     if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate) return -EINVAL;
     if (pConfig->inputCfg.channels != pConfig->outputCfg.channels) return -EINVAL;
     if (pConfig->inputCfg.format != pConfig->outputCfg.format) return -EINVAL;
-    if (pConfig->inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) return -EINVAL;
+    if (audio_channel_count_from_out_mask(pConfig->inputCfg.channels) > FCC_LIMIT) {
+        return -EINVAL;
+    }
     if (pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE &&
             pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL;
     if (pConfig->inputCfg.format != kProcessFormat) return -EINVAL;
@@ -296,50 +280,23 @@
     }
 
     //ALOGV("LE about to process %d samples", inBuffer->frameCount);
-    uint16_t inIdx;
-#ifdef BUILD_FLOAT
     constexpr float scale = 1 << 15; // power of 2 is lossless conversion to int16_t range
     constexpr float inverseScale = 1.f / scale;
     const float inputAmp = pow(10, pContext->mTargetGainmB/2000.0f) * scale;
-#else
-    float inputAmp = pow(10, pContext->mTargetGainmB/2000.0f);
-#endif
-    float leftSample, rightSample;
-    for (inIdx = 0 ; inIdx < inBuffer->frameCount ; inIdx++) {
-        // makeup gain is applied on the input of the compressor
-#ifdef BUILD_FLOAT
-        leftSample  = inputAmp * inBuffer->f32[2*inIdx];
-        rightSample = inputAmp * inBuffer->f32[2*inIdx +1];
-        pContext->mCompressor->Compress(&leftSample, &rightSample);
-        inBuffer->f32[2*inIdx]    = leftSample * inverseScale;
-        inBuffer->f32[2*inIdx +1] = rightSample * inverseScale;
-#else
-        leftSample  = inputAmp * (float)inBuffer->s16[2*inIdx];
-        rightSample = inputAmp * (float)inBuffer->s16[2*inIdx +1];
-        pContext->mCompressor->Compress(&leftSample, &rightSample);
-        inBuffer->s16[2*inIdx]    = (int16_t) leftSample;
-        inBuffer->s16[2*inIdx +1] = (int16_t) rightSample;
-#endif // BUILD_FLOAT
-    }
+    const size_t channelCount =
+            audio_channel_count_from_out_mask(pContext->mConfig.outputCfg.channels);
+    pContext->mCompressor->Compress(
+            channelCount, inputAmp, inverseScale, inBuffer->f32, inBuffer->frameCount);
 
     if (inBuffer->raw != outBuffer->raw) {
-#ifdef BUILD_FLOAT
+        const size_t sampleCount = outBuffer->frameCount * channelCount;
         if (pContext->mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-            for (size_t i = 0; i < outBuffer->frameCount*2; i++) {
+            for (size_t i = 0; i < sampleCount; i++) {
                 outBuffer->f32[i] += inBuffer->f32[i];
             }
         } else {
-            memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount * 2 * sizeof(float));
+            memcpy(outBuffer->raw, inBuffer->raw, sampleCount * sizeof(float));
         }
-#else
-        if (pContext->mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-            for (size_t i = 0; i < outBuffer->frameCount*2; i++) {
-                outBuffer->s16[i] = clamp16(outBuffer->s16[i] + inBuffer->s16[i]);
-            }
-        } else {
-            memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount * 2 * sizeof(int16_t));
-        }
-#endif // BUILD_FLOAT
     }
     if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
         return -ENODATA;
@@ -446,7 +403,7 @@
         case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB:
             pContext->mTargetGainmB = *((int32_t *)p->data + 1);
             ALOGV("set target gain(mB) = %d", pContext->mTargetGainmB);
-            LE_reset(pContext); // apply parameter update
+            pContext->mCompressor->set_target_gain(pow(10, pContext->mTargetGainmB / 2000.f));
             break;
         default:
             *(int32_t *)pReplyData = -EINVAL;
@@ -502,4 +459,3 @@
 };
 
 }; // extern "C"
-
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index 592fd60..7b992c2 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -21,6 +21,7 @@
 
 #include "EffectLoudnessEnhancer.h"
 
+using aidl::android::hardware::audio::common::getChannelCount;
 using aidl::android::hardware::audio::effect::Descriptor;
 using aidl::android::hardware::audio::effect::getEffectImplUuidLoudnessEnhancer;
 using aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer;
@@ -138,6 +139,12 @@
         LOG(DEBUG) << __func__ << " context already exist";
         return mContext;
     }
+    const int channelCount = getChannelCount(common.input.base.channelMask);
+    if (FCC_LIMIT < channelCount) {
+        LOG(ERROR) << __func__
+                   << " channelCount not supported: " << common.input.base.channelMask.toString();
+        return nullptr;
+    }
 
     mContext = std::make_shared<LoudnessEnhancerContext>(1 /* statusFmqDepth */, common);
     return mContext;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index ac8b14a..d692c62 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -43,11 +43,17 @@
     return RetCode::SUCCESS;
 }
 
+RetCode LoudnessEnhancerContext::reset() {
+    const float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
+    LOG(VERBOSE) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
+    mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
+    return RetCode::SUCCESS;
+}
+
 RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
-    float targetAmp = pow(10, gainMb / 2000.0f);  // mB to linear amplification
     if (mCompressor != nullptr) {
-        // Get samplingRate from input
-        mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
+        const float targetAmp = pow(10, gainMb / 2000.f);  // mB to linear amplification
+        mCompressor->set_target_gain(targetAmp);
     }
     mGain = gainMb;
     return RetCode::SUCCESS;
@@ -69,43 +75,20 @@
     constexpr float scale = 1 << 15;  // power of 2 is lossless conversion to int16_t range
     constexpr float inverseScale = 1.f / scale;
     const float inputAmp = pow(10, mGain / 2000.0f) * scale;
-    float leftSample, rightSample;
-
     if (mCompressor != nullptr) {
-        for (int inIdx = 0; inIdx < samples; inIdx += 2) {
-            // makeup gain is applied on the input of the compressor
-            leftSample = inputAmp * in[inIdx];
-            rightSample = inputAmp * in[inIdx + 1];
-            mCompressor->Compress(&leftSample, &rightSample);
-            in[inIdx] = leftSample * inverseScale;
-            in[inIdx + 1] = rightSample * inverseScale;
-        }
-    } else {
-        for (int inIdx = 0; inIdx < samples; inIdx += 2) {
-            leftSample = inputAmp * in[inIdx];
-            rightSample = inputAmp * in[inIdx + 1];
-            in[inIdx] = leftSample * inverseScale;
-            in[inIdx + 1] = rightSample * inverseScale;
-        }
+        const size_t channelCount = aidl::android::hardware::audio::common::getChannelCount(
+                mCommon.input.base.channelMask);
+        const size_t frameCount = samples / channelCount;
+        mCompressor->Compress(channelCount, inputAmp, inverseScale, in, frameCount);
     }
-    bool accumulate = false;
     if (in != out) {
-        for (int i = 0; i < samples; i++) {
-            if (accumulate) {
-                out[i] += in[i];
-            } else {
-                out[i] = in[i];
-            }
-        }
+        // nit: update Compress() to write to out.
+        memcpy(out, in, samples * sizeof(float));
     }
     return {STATUS_OK, samples, samples};
 }
 
 void LoudnessEnhancerContext::init_params() {
-    int channelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            mCommon.input.base.channelMask);
-    LOG_ALWAYS_FATAL_IF(channelCount != 2, "channel count %d not supported", channelCount);
-
     mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
     float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
     LOG(VERBOSE) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index 67ccd24..c6d2734 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -36,7 +36,7 @@
 
     RetCode enable() override;
     RetCode disable() override;
-
+    RetCode reset() override;
     RetCode setLeGain(int gainMb);
     int getLeGain() const { return mGain; }
 
diff --git a/media/libeffects/loudness/common/core/basic_types.h b/media/libeffects/loudness/common/core/basic_types.h
deleted file mode 100644
index 593e914..0000000
--- a/media/libeffects/loudness/common/core/basic_types.h
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
-#define LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
-
-#include <stddef.h>
-#include <stdlib.h>
-#include <string>
-using ::std::string;
-using ::std::basic_string;
-#include <vector>
-using ::std::vector;
-
-#include "common/core/os.h"
-
-// -----------------------------------------------------------------------------
-// Definitions of common basic types:
-// -----------------------------------------------------------------------------
-
-#if !defined(G_COMPILE) && !defined(BASE_INTEGRAL_TYPES_H_)
-
-namespace le_fx {
-
-typedef signed char         schar;
-typedef signed char         int8;
-typedef short               int16;
-typedef int                 int32;
-typedef long long           int64;
-
-typedef unsigned char       uint8;
-typedef unsigned short      uint16;
-typedef unsigned int        uint32;
-typedef unsigned long long  uint64;
-
-}  // namespace le_fx
-
-#endif
-
-namespace le_fx {
-
-struct FloatArray {
-  int length;
-  float *data;
-
-  FloatArray(void) {
-    data = NULL;
-    length = 0;
-  }
-};
-
-struct Int16Array {
-  int length;
-  int16 *data;
-
-  Int16Array(void) {
-    data = NULL;
-    length = 0;
-  }
-};
-
-struct Int32Array {
-  int length;
-  int32 *data;
-
-  Int32Array(void) {
-    data = NULL;
-    length = 0;
-  }
-};
-
-struct Int8Array {
-  int length;
-  uint8 *data;
-
-  Int8Array(void) {
-    data = NULL;
-    length = 0;
-  }
-};
-
-//
-// Simple wrapper for waveform data:
-//
-class WaveData : public vector<int16> {
- public:
-  WaveData();
-  ~WaveData();
-
-  void Set(int number_samples, int sampling_rate, int16 *data);
-  int sample_rate(void) const;
-  void set_sample_rate(int sample_rate);
-  bool Equals(const WaveData &wave_data, int threshold = 0) const;
-
- private:
-  int sample_rate_;
-};
-
-}  // namespace le_fx
-
-#endif  // LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
diff --git a/media/libeffects/loudness/common/core/byte_swapper.h b/media/libeffects/loudness/common/core/byte_swapper.h
deleted file mode 100644
index 8f0caf3..0000000
--- a/media/libeffects/loudness/common/core/byte_swapper.h
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
-#define LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
-
-#include <stdio.h>
-#include <string.h>
-
-#include "common/core/basic_types.h"
-#include "common/core/os.h"
-
-namespace le_fx {
-
-namespace arch {
-
-inline bool IsLittleEndian(void) {
-  int16 word = 1;
-  char *cp = reinterpret_cast<char *>(&word);
-  return cp[0] != 0;
-}
-
-inline bool IsBigEndian(void) {
-  return !IsLittleEndian();
-}
-
-template <typename T, unsigned int kValSize>
-struct ByteSwapper {
-  static T Swap(const T &val) {
-    T new_val = val;
-    char *first = &new_val, *last = first + kValSize - 1, x;
-    for (; first < last; ++first, --last) {
-      x = *last;
-      *last = *first;
-      *first = x;
-    }
-    return new_val;
-  }
-};
-
-template <typename T>
-struct ByteSwapper<T, 1> {
-  static T Swap(const T &val) {
-    return val;
-  }
-};
-
-template <typename T>
-struct ByteSwapper<T, 2> {
-  static T Swap(const T &val) {
-    T new_val;
-    const char *o = (const char *)&val;
-    char *p = reinterpret_cast<char *>(&new_val);
-    p[0] = o[1];
-    p[1] = o[0];
-    return new_val;
-  }
-};
-
-template <typename T>
-struct ByteSwapper<T, 4> {
-  static T Swap(const T &val) {
-    T new_val;
-    const char *o = (const char *)&val;
-    char *p = reinterpret_cast<char *>(&new_val);
-    p[0] = o[3];
-    p[1] = o[2];
-    p[2] = o[1];
-    p[3] = o[0];
-    return new_val;
-  }
-};
-
-template <typename T>
-struct ByteSwapper<T, 8> {
-  static T Swap(const T &val) {
-    T new_val = val;
-    const char *o = (const char *)&val;
-    char *p = reinterpret_cast<char *>(&new_val);
-    p[0] = o[7];
-    p[1] = o[6];
-    p[2] = o[5];
-    p[3] = o[4];
-    p[4] = o[3];
-    p[5] = o[2];
-    p[6] = o[1];
-    p[7] = o[0];
-    return new_val;
-  }
-};
-
-template <typename T>
-T SwapBytes(const T &val, bool force_swap) {
-  if (force_swap) {
-#if !defined(LE_FX__NEED_BYTESWAP)
-    return ByteSwapper<T, sizeof(T)>::Swap(val);
-#else
-    return val;
-#endif  // !LE_FX_NEED_BYTESWAP
-  } else {
-#if !defined(LE_FX_NEED_BYTESWAP)
-    return val;
-#else
-    return ByteSwapper<T, sizeof(T)>::Swap(val);
-#endif  // !LE_FX_NEED_BYTESWAP
-  }
-}
-
-template <typename T>
-const T *SwapBytes(const T *vals, unsigned int num_items, bool force_swap) {
-  if (force_swap) {
-#if !defined(LE_FX_NEED_BYTESWAP)
-    T *writeable_vals = const_cast<T *>(vals);
-    for (unsigned int i = 0; i < num_items; i++) {
-      writeable_vals[i] = ByteSwapper<T, sizeof(T)>::Swap(vals[i]);
-    }
-    return writeable_vals;
-#else
-    return vals;
-#endif  // !LE_FX_NEED_BYTESWAP
-  } else {
-#if !defined(LE_FX_NEED_BYTESWAP)
-    return vals;
-#else
-    T *writeable_vals = const_cast<T *>(vals);
-    for (unsigned int i = 0; i < num_items; i++) {
-      writeable_vals[i] = ByteSwapper<T, sizeof(T)>::Swap(vals[i]);
-    }
-    return writeable_vals;
-#endif  // !LE_FX_NEED_BYTESWAP
-  }
-}
-
-}  // namespace arch
-
-}  // namespace le_fx
-
-#endif  // LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
diff --git a/media/libeffects/loudness/common/core/math.h b/media/libeffects/loudness/common/core/math.h
deleted file mode 100644
index 3f302cc..0000000
--- a/media/libeffects/loudness/common/core/math.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_COMMON_CORE_MATH_H_
-#define LE_FX_ENGINE_COMMON_CORE_MATH_H_
-
-#include <math.h>
-#include <algorithm>
-using ::std::min;
-using ::std::max;
-using ::std::fill;
-using ::std::fill_n;using ::std::lower_bound;
-#include <cmath>
-#include <math.h>
-//using ::std::fpclassify;
-
-#include "common/core/os.h"
-#include "common/core/types.h"
-
-namespace le_fx {
-namespace math {
-
-// A fast approximation to log2(.)
-inline float fast_log2(float val) {
-  int* const exp_ptr = reinterpret_cast <int *> (&val);
-  int x = *exp_ptr;
-  const int log_2 = ((x >> 23) & 255) - 128;
-  x &= ~(255 << 23);
-  x += 127 << 23;
-  *exp_ptr = x;
-  val = ((-1.0f / 3) * val + 2) * val - 2.0f / 3;
-  return static_cast<float>(val + log_2);
-}
-
-// A fast approximation to log(.)
-inline float fast_log(float val) {
-  return fast_log2(val) *
-      0.693147180559945286226763982995180413126945495605468750f;
-}
-
-// An approximation of the exp(.) function using a 5-th order Taylor expansion.
-// It's pretty accurate between +-0.1 and accurate to 10e-3 between +-1
-template <typename T>
-inline T ExpApproximationViaTaylorExpansionOrder5(T x) {
-  const T x2 = x * x;
-  const T x3 = x2 * x;
-  const T x4 = x2 * x2;
-  const T x5 = x3 * x2;
-  return 1.0f + x + 0.5f * x2 +
-      0.16666666666666665741480812812369549646973609924316406250f * x3 +
-      0.0416666666666666643537020320309238741174340248107910156250f * x4 +
-      0.008333333333333333217685101601546193705871701240539550781250f * x5;
-}
-
-}  // namespace math
-}  // namespace le_fx
-
-// Math functions missing in Android NDK:
-#if defined(LE_FX_OS_ANDROID)
-
-namespace std {
-
-//
-// Round to the nearest integer: We need this implementation
-// since std::round is missing on android.
-//
-template <typename T>
-inline T round(const T &x) {
-  return static_cast<T>(std::floor(static_cast<double>(x) + 0.5));
-}
-
-}  // namespace std
-
-#endif  // LE_FX_OS_ANDROID
-
-#endif  // LE_FX_ENGINE_COMMON_CORE_MATH_H_
diff --git a/media/libeffects/loudness/common/core/os.h b/media/libeffects/loudness/common/core/os.h
deleted file mode 100644
index 4a8ce82..0000000
--- a/media/libeffects/loudness/common/core/os.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_COMMON_CORE_OS_H_
-#define LE_FX_ENGINE_COMMON_CORE_OS_H_
-
-// -----------------------------------------------------------------------------
-// OS Identification:
-// -----------------------------------------------------------------------------
-
-#define LE_FX_OS_UNIX
-#if defined(__ANDROID__)
-#    define LE_FX_OS_ANDROID
-#endif  // Android
-
-#endif // LE_FX_ENGINE_COMMON_CORE_OS_H_
diff --git a/media/libeffects/loudness/common/core/types.h b/media/libeffects/loudness/common/core/types.h
deleted file mode 100644
index d1b6c6a..0000000
--- a/media/libeffects/loudness/common/core/types.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_COMMON_CORE_TYPES_H_
-#define LE_FX_ENGINE_COMMON_CORE_TYPES_H_
-
-#include "common/core/os.h"
-
-#include "common/core/basic_types.h"
-
-#ifndef LE_FX_DISALLOW_COPY_AND_ASSIGN
-#define LE_FX_DISALLOW_COPY_AND_ASSIGN(TypeName) \
-  TypeName(const TypeName&); \
-  void operator=(const TypeName&)
-#endif  // LE_FX_DISALLOW_COPY_AND_ASSIGN
-
-
-#endif  // LE_FX_ENGINE_COMMON_CORE_TYPES_H_
diff --git a/media/libeffects/loudness/dsp/core/basic-inl.h b/media/libeffects/loudness/dsp/core/basic-inl.h
deleted file mode 100644
index 3f77147..0000000
--- a/media/libeffects/loudness/dsp/core/basic-inl.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
-#define LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
-
-#include <math.h>
-
-namespace le_fx {
-
-namespace sigmod {
-
-template <typename T>
-int SearchIndex(const T x_data[],
-                T x,
-                int start_index,
-                int end_index) {
-  int start = start_index;
-  int end = end_index;
-  while (end > start + 1) {
-    int i = (end + start) / 2;
-    if (x_data[i] > x) {
-      end = i;
-    } else {
-      start = i;
-    }
-  }
-  return start;
-}
-
-}  // namespace sigmod
-
-}  // namespace le_fx
-
-#endif  // LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/basic.h b/media/libeffects/loudness/dsp/core/basic.h
deleted file mode 100644
index 27e0a8d..0000000
--- a/media/libeffects/loudness/dsp/core/basic.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_H_
-#define LE_FX_ENGINE_DSP_CORE_BASIC_H_
-
-#include <limits.h>
-#include "common/core/math.h"
-#include "common/core/types.h"
-
-namespace le_fx {
-
-namespace sigmod {
-
-// Searchs for the interval that contains <x> using a divide-and-conquer
-// algorithm.
-// X[]: a vector of sorted values (X[i+1] > X[i])
-// x:   a value
-// StartIndex: the minimum searched index
-// EndIndex: the maximum searched index
-// returns: the index <i> that satisfies: X[i] <= x <= X[i+1] &&
-//          StartIndex <= i <= (EndIndex-1)
-template <typename T>
-int SearchIndex(const T x_data[],
-                T x,
-                int start_index,
-                int end_index);
-
-}  // namespace sigmod
-
-}  // namespace le_fx
-
-#include "dsp/core/basic-inl.h"
-
-#endif  // LE_FX_ENGINE_DSP_CORE_BASIC_H_
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
deleted file mode 100644
index 7ea0593..0000000
--- a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
-#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
-
-#ifndef LOG_TAG
-#define LOG_TAG NULL
-#endif
-//#define LOG_NDEBUG 0
-
-#include <log/log.h>
-
-namespace le_fx {
-
-inline void AdaptiveDynamicRangeCompression::set_knee_threshold(float decibel) {
-  // Converts to 1og-base
-  knee_threshold_in_decibel_ = decibel;
-  knee_threshold_ = 0.1151292546497023061569109358970308676362037658691406250f *
-      decibel + 10.39717719035538401328722102334722876548767089843750f;
-}
-
-
-inline void AdaptiveDynamicRangeCompression::set_knee_threshold_via_target_gain(
-    float target_gain) {
-  const float decibel = target_gain_to_knee_threshold_.Interpolate(
-        target_gain);
-  ALOGV("set_knee_threshold_via_target_gain: decibel =%.3fdB", decibel);
-  set_knee_threshold(decibel);
-}
-
-}  // namespace le_fx
-
-
-#endif  // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
index 33f6779..3d71184 100644
--- a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
@@ -15,45 +15,25 @@
  */
 //#define LOG_NDEBUG 0
 
-#include <cmath>
-
-#include "common/core/math.h"
-#include "common/core/types.h"
-#include "dsp/core/basic.h"
-#include "dsp/core/interpolation.h"
+#include <log/log.h>
 #include "dsp/core/dynamic_range_compression.h"
-
-#include <android/log.h>
+#include <system/audio.h>
 
 namespace le_fx {
 
-// Definitions for static const class members declared in
-// dynamic_range_compression.h.
-const float AdaptiveDynamicRangeCompression::kMinAbsValue = 0.000001f;
-const float AdaptiveDynamicRangeCompression::kMinLogAbsValue =
-    0.032766999999999997517097227728299912996590137481689453125f;
-const float AdaptiveDynamicRangeCompression::kFixedPointLimit = 32767.0f;
-const float AdaptiveDynamicRangeCompression::kInverseFixedPointLimit =
-    1.0f / AdaptiveDynamicRangeCompression::kFixedPointLimit;
-const float AdaptiveDynamicRangeCompression::kDefaultKneeThresholdInDecibel =
-    -8.0f;
-const float AdaptiveDynamicRangeCompression::kCompressionRatio = 7.0f;
-const float AdaptiveDynamicRangeCompression::kTauAttack = 0.001f;
-const float AdaptiveDynamicRangeCompression::kTauRelease = 0.015f;
-
 AdaptiveDynamicRangeCompression::AdaptiveDynamicRangeCompression() {
-  static const float kTargetGain[] = {
+  static constexpr float kTargetGain[] = {
       1.0f, 2.0f, 3.0f, 4.0f, 5.0f };
-  static const float kKneeThreshold[] = {
+  static constexpr float kKneeThreshold[] = {
       -8.0f, -8.0f, -8.5f, -9.0f, -10.0f };
   target_gain_to_knee_threshold_.Initialize(
       &kTargetGain[0], &kKneeThreshold[0],
-      sizeof(kTargetGain) / sizeof(kTargetGain[0]));
+      std::size(kTargetGain));
 }
 
 bool AdaptiveDynamicRangeCompression::Initialize(
         float target_gain, float sampling_rate) {
-  set_knee_threshold_via_target_gain(target_gain);
+  set_target_gain(target_gain);
   sampling_rate_ = sampling_rate;
   state_ = 0.0f;
   compressor_gain_ = 1.0f;
@@ -69,69 +49,53 @@
   } else {
     alpha_release_ = 0.0f;
   }
-  // Feed-forward topology
-  slope_ = 1.0f / kCompressionRatio - 1.0f;
   return true;
 }
 
-float AdaptiveDynamicRangeCompression::Compress(float x) {
-  const float max_abs_x = std::max(std::fabs(x), kMinLogAbsValue);
-  const float max_abs_x_dB = math::fast_log(max_abs_x);
-  // Subtract Threshold from log-encoded input to get the amount of overshoot
-  const float overshoot = max_abs_x_dB - knee_threshold_;
-  // Hard half-wave rectifier
-  const float rect = std::max(overshoot, 0.0f);
-  // Multiply rectified overshoot with slope
-  const float cv = rect * slope_;
-  const float prev_state = state_;
-  if (cv <= state_) {
-    state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv;
-  } else {
-    state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
-  }
-  compressor_gain_ *= expf(state_ - prev_state);
-  x *= compressor_gain_;
-  if (x > kFixedPointLimit) {
-    return kFixedPointLimit;
-  }
-  if (x < -kFixedPointLimit) {
-    return -kFixedPointLimit;
-  }
-  return x;
-}
+// Instantiate Compress for supported channel counts.
+#define INSTANTIATE_COMPRESS(CHANNEL_COUNT) \
+case CHANNEL_COUNT: \
+    if constexpr (CHANNEL_COUNT <= FCC_LIMIT) { \
+        Compress(inputAmp, inverseScale, \
+                reinterpret_cast<internal_array_t<float, CHANNEL_COUNT>*>(buffer), frameCount); \
+        return; \
+    } \
+    break;
 
-void AdaptiveDynamicRangeCompression::Compress(float *x1, float *x2) {
-  // Taking the maximum amplitude of both channels
-  const float max_abs_x = std::max(std::fabs(*x1),
-    std::max(std::fabs(*x2), kMinLogAbsValue));
-  const float max_abs_x_dB = math::fast_log(max_abs_x);
-  // Subtract Threshold from log-encoded input to get the amount of overshoot
-  const float overshoot = max_abs_x_dB - knee_threshold_;
-  // Hard half-wave rectifier
-  const float rect = std::max(overshoot, 0.0f);
-  // Multiply rectified overshoot with slope
-  const float cv = rect * slope_;
-  const float prev_state = state_;
-  if (cv <= state_) {
-    state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv;
-  } else {
-    state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
-  }
-  compressor_gain_ *= expf(state_ - prev_state);
-  *x1 *= compressor_gain_;
-  if (*x1 > kFixedPointLimit) {
-    *x1 = kFixedPointLimit;
-  }
-  if (*x1 < -kFixedPointLimit) {
-    *x1 = -kFixedPointLimit;
-  }
-  *x2 *= compressor_gain_;
-  if (*x2 > kFixedPointLimit) {
-    *x2 = kFixedPointLimit;
-  }
-  if (*x2 < -kFixedPointLimit) {
-    *x2 = -kFixedPointLimit;
-  }
+ void AdaptiveDynamicRangeCompression::Compress(size_t channelCount,
+        float inputAmp, float inverseScale, float* buffer, size_t frameCount) {
+    using android::audio_utils::intrinsics::internal_array_t;
+    switch (channelCount) {
+        INSTANTIATE_COMPRESS(1);
+        INSTANTIATE_COMPRESS(2);
+        INSTANTIATE_COMPRESS(3);
+        INSTANTIATE_COMPRESS(4);
+        INSTANTIATE_COMPRESS(5);
+        INSTANTIATE_COMPRESS(6);
+        INSTANTIATE_COMPRESS(7);
+        INSTANTIATE_COMPRESS(8);
+        INSTANTIATE_COMPRESS(9);
+        INSTANTIATE_COMPRESS(10);
+        INSTANTIATE_COMPRESS(11);
+        INSTANTIATE_COMPRESS(12);
+        INSTANTIATE_COMPRESS(13);
+        INSTANTIATE_COMPRESS(14);
+        INSTANTIATE_COMPRESS(15);
+        INSTANTIATE_COMPRESS(16);
+        INSTANTIATE_COMPRESS(17);
+        INSTANTIATE_COMPRESS(18);
+        INSTANTIATE_COMPRESS(19);
+        INSTANTIATE_COMPRESS(20);
+        INSTANTIATE_COMPRESS(21);
+        INSTANTIATE_COMPRESS(22);
+        INSTANTIATE_COMPRESS(23);
+        INSTANTIATE_COMPRESS(24);
+        INSTANTIATE_COMPRESS(25);
+        INSTANTIATE_COMPRESS(26);
+        INSTANTIATE_COMPRESS(27);
+        INSTANTIATE_COMPRESS(28);
+    }
+    LOG_ALWAYS_FATAL("%s: channelCount: %zu not supported", __func__, channelCount);
 }
 
 }  // namespace le_fx
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
index 04455c5..4d11129 100644
--- a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
@@ -14,16 +14,57 @@
  * limitations under the License.
  */
 #ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
-#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
 
-//#define LOG_NDEBUG 0
-
-#include "common/core/types.h"
-#include "common/core/math.h"
-#include "dsp/core/basic.h"
 #include "dsp/core/interpolation.h"
+#include <audio_utils/intrinsic_utils.h>
 
-#include <android/log.h>
+namespace le_fx {
+namespace math {
+// taken from common/core/math.h
+// A fast approximation to log2(.)
+inline float fast_log2(float val) {
+    int* const exp_ptr = reinterpret_cast <int *> (&val);
+    int x = *exp_ptr;
+    const int log_2 = ((x >> 23) & 255) - 128;
+    x &= ~(255 << 23);
+    x += 127 << 23;
+    *exp_ptr = x;
+    val = ((-1.0f / 3) * val + 2) * val - 2.0f / 3;
+    return static_cast<float>(val + log_2);
+}
+
+// A fast approximation to log(.)
+inline float fast_log(float val) {
+    return fast_log2(val) *
+           0.693147180559945286226763982995180413126945495605468750f;
+}
+
+// An approximation of the exp(.) function using a 5-th order Taylor expansion.
+// It's pretty accurate between +-0.1 and accurate to 10e-3 between +-1
+//
+// TODO(b/315070856)
+// ExpApproximationViaTaylorExpansionOrder5() is only marginally faster using expf() itself
+// Keeping as the 5th order expansion may be better for vector parallelization, if so desired.
+//
+// BM_math_ExpApproximationViaTaylorExpansionOrder5 2.11 ns 2.10 ns 332924668
+// BM_math_expf_speccpu2017 2.91 ns 2.90 ns 241714501
+// BM_math_exp_speccpu2017 4.06 ns 4.03 ns 173500928
+
+template <typename T>
+T ExpApproximationViaTaylorExpansionOrder5(T x) {
+    const T x2 = x * x;
+    const T x3 = x2 * x;
+    const T x4 = x2 * x2;
+    const T x5 = x3 * x2;
+    // [sic] this is 1/6, why do we have such precision annotated with 'f' for float.
+    return 1.0f + x + 0.5f * x2 +
+           0.16666666666666665741480812812369549646973609924316406250f * x3 +
+           0.0416666666666666643537020320309238741174340248107910156250f * x4 +
+           0.008333333333333333217685101601546193705871701240539550781250f * x5;
+}
+
+}  // namespace math
+}  // namespace le_fx
 
 namespace le_fx {
 
@@ -33,6 +74,8 @@
 class AdaptiveDynamicRangeCompression {
  public:
     AdaptiveDynamicRangeCompression();
+    AdaptiveDynamicRangeCompression(const AdaptiveDynamicRangeCompression&) = delete;
+    AdaptiveDynamicRangeCompression& operator=(const AdaptiveDynamicRangeCompression&) = delete;
 
     // Initializes the compressor using prior information. It assumes that the
     // input signal is speech from high-quality recordings that is scaled and then
@@ -51,47 +94,73 @@
     // relatively safe choice for many signals.
     bool Initialize(float target_gain, float sampling_rate);
 
-  // A fast version of the algorithm that uses approximate computations for the
-  // log(.) and exp(.).
-  float Compress(float x);
-
-  // Stereo channel version of the compressor
-  void Compress(float *x1, float *x2);
-
-  // This version is slower than Compress(.) but faster than CompressSlow(.)
-  float CompressNormalSpeed(float x);
-
-  // A slow version of the algorithm that is easier for further developement,
-  // tuning and debugging
-  float CompressSlow(float x);
+  // in-place compression.
+  void Compress(size_t channelCount,
+          float inputAmp, float inverseScale, float* buffer, size_t frameCount);
 
   // Sets knee threshold (in decibel).
-  void set_knee_threshold(float decibel);
+  void set_knee_threshold(float decibel) {
+      // Converts to 1og-base
+      knee_threshold_in_decibel_ = decibel;
+      knee_threshold_ = 0.1151292546497023061569109358970308676362037658691406250f *
+                        decibel + 10.39717719035538401328722102334722876548767089843750f;
+  }
 
   // Sets knee threshold via the target gain using an experimentally derived
   // relationship.
-  void set_knee_threshold_via_target_gain(float target_gain);
+  void set_target_gain(float target_gain) {
+        const float decibel = target_gain_to_knee_threshold_.Interpolate(
+                target_gain);
+        // ALOGV("set_knee_threshold_via_target_gain: decibel =%.3fdB", decibel);
+        set_knee_threshold(decibel);
+  }
 
  private:
-  // The minimum accepted absolute input value and it's natural logarithm. This
-  // is to prevent numerical issues when the input is close to zero
-  static const float kMinAbsValue;
-  static const float kMinLogAbsValue;
+
+  // Templated Compress routine.
+  template <typename V>
+  void Compress(float inputAmp, float inverseScale, V* buffer, size_t frameCount) {
+    for (size_t i = 0; i < frameCount; ++i) {
+        auto v = android::audio_utils::intrinsics::vmul(buffer[i], inputAmp);
+        const float max_abs_x = android::audio_utils::intrinsics::vmaxv(
+                android::audio_utils::intrinsics::vabs(v));
+        const float max_abs_x_dB = math::fast_log(std::max(max_abs_x, kMinLogAbsValue));
+        // Subtract Threshold from log-encoded input to get the amount of overshoot
+        const float overshoot = max_abs_x_dB - knee_threshold_;
+        // Hard half-wave rectifier
+        const float rect = std::max(overshoot, 0.0f);
+        // Multiply rectified overshoot with slope
+        const float cv = rect * kSlope;
+        const float prev_state = state_;
+        const float alpha = (cv <= state_) ? alpha_attack_ : alpha_release_;
+        state_ = alpha * state_ + (1.0f - alpha) * cv;
+        compressor_gain_ *= expf(state_ - prev_state);
+        const auto x = android::audio_utils::intrinsics::vmul(v, compressor_gain_);
+        v = android::audio_utils::intrinsics::vclamp(x, -kFixedPointLimit, kFixedPointLimit);
+        buffer[i] = android::audio_utils::intrinsics::vmul(inverseScale, v);
+    }
+  }
+
+  // The minimum accepted absolute input value to prevent numerical issues
+  // when the input is close to zero.
+  static constexpr float kMinLogAbsValue =
+      0.032766999999999997517097227728299912996590137481689453125f;
   // Fixed-point arithmetic limits
-  static const float kFixedPointLimit;
-  static const float kInverseFixedPointLimit;
+  static constexpr float kFixedPointLimit = 32767.0f;
+  static constexpr float kInverseFixedPointLimit = 1.0f / kFixedPointLimit;
   // The default knee threshold in decibel. The knee threshold defines when the
   // compressor is actually starting to compress the value of the input samples
-  static const float kDefaultKneeThresholdInDecibel;
+  static constexpr float kDefaultKneeThresholdInDecibel = -8.0f;
   // The compression ratio is the reciprocal of the slope of the line segment
   // above the threshold (in the log-domain). The ratio controls the
   // effectiveness of the compression.
-  static const float kCompressionRatio;
+  static constexpr float kCompressionRatio = 7.0f;
   // The attack time of the envelope detector
-  static const float kTauAttack;
+  static constexpr float kTauAttack = 0.001f;
   // The release time of the envelope detector
-  static const float kTauRelease;
+  static constexpr float kTauRelease = 0.015f;
 
+  static constexpr float kSlope = 1.0f / kCompressionRatio - 1.0f;
   float sampling_rate_;
   // the internal state of the envelope detector
   float state_;
@@ -101,19 +170,13 @@
   float alpha_attack_;
   // release constant for exponential dumping
   float alpha_release_;
-  float slope_;
-  // The knee threshold
   float knee_threshold_;
   float knee_threshold_in_decibel_;
   // This interpolator provides the function that relates target gain to knee
   // threshold.
   sigmod::InterpolatorLinear<float> target_gain_to_knee_threshold_;
-
-  LE_FX_DISALLOW_COPY_AND_ASSIGN(AdaptiveDynamicRangeCompression);
 };
 
 }  // namespace le_fx
 
-#include "dsp/core/dynamic_range_compression-inl.h"
-
 #endif  // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolation.h b/media/libeffects/loudness/dsp/core/interpolation.h
index 23c287c..85d3103 100644
--- a/media/libeffects/loudness/dsp/core/interpolation.h
+++ b/media/libeffects/loudness/dsp/core/interpolation.h
@@ -16,7 +16,6 @@
 #ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
 #define LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
 
-#include "common/core/math.h"
 #include "dsp/core/interpolator_base.h"
 #include "dsp/core/interpolator_linear.h"
 
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
deleted file mode 100644
index fb87c79..0000000
--- a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
-#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
-#ifndef LOG_TAG
-#define LOG_TAG NULL
-#endif
-//#define LOG_NDEBUG 0
-
-#include <log/log.h>
-
-#include "dsp/core/basic.h"
-
-namespace le_fx {
-
-namespace sigmod {
-
-template <typename T, class Algorithm>
-InterpolatorBase<T, Algorithm>::InterpolatorBase() {
-  status_ = false;
-  cached_index_ = 0;
-  x_data_ = NULL;
-  y_data_ = NULL;
-  data_length_ = 0;
-  own_x_data_ = false;
-  x_start_offset_ = 0.0;
-  last_element_index_ = -1;
-  x_inverse_sampling_interval_ = 0.0;
-  state_ = NULL;
-}
-
-template <typename T, class Algorithm>
-InterpolatorBase<T, Algorithm>::~InterpolatorBase() {
-  delete [] state_;
-  if (own_x_data_) {
-    delete [] x_data_;
-  }
-}
-
-template <typename T, class Algorithm>
-bool InterpolatorBase<T, Algorithm>::Initialize(const vector<T> &x_data,
-                                                const vector<T> &y_data) {
-#ifndef NDEBUG
-  if (x_data.size() != y_data.size()) {
-    LoggerError("InterpolatorBase::Initialize: xData size (%d) != yData size"
-                  " (%d)", x_data.size(), y_data.size());
-  }
-#endif
-  return Initialize(&x_data[0], &y_data[0], x_data.size());
-}
-
-template <typename T, class Algorithm>
-bool InterpolatorBase<T, Algorithm>::Initialize(double x_start_offset,
-                                                double x_sampling_interval,
-                                                const vector<T> &y_data) {
-  return Initialize(x_start_offset,
-                    x_sampling_interval,
-                    &y_data[0],
-                    y_data.size());
-}
-
-template <typename T, class Algorithm>
-bool InterpolatorBase<T, Algorithm>::Initialize(double x_start_offset,
-                                                double x_sampling_interval,
-                                                const T *y_data,
-                                                int data_length) {
-  // Constructs and populate x-axis data: `x_data_`
-  T *x_data_tmp = new T[data_length];
-  float time_offset = x_start_offset;
-  for (int n = 0; n < data_length; n++) {
-    x_data_tmp[n] = time_offset;
-    time_offset += x_sampling_interval;
-  }
-  Initialize(x_data_tmp, y_data, data_length);
-  // Sets-up the regularly sampled interpolation mode
-  x_start_offset_ = x_start_offset;
-  x_inverse_sampling_interval_ = 1.0 / x_sampling_interval;
-  own_x_data_ = true;
-  return status_;
-}
-
-
-template <typename T, class Algorithm>
-bool InterpolatorBase<T, Algorithm>::Initialize(
-    const T *x_data, const T *y_data, int data_length) {
-  // Default settings
-  cached_index_ = 0;
-  data_length_ = 0;
-  x_start_offset_ = 0;
-  x_inverse_sampling_interval_ = 0;
-  state_ = NULL;
-  // Input data is externally owned
-  own_x_data_ = false;
-  x_data_ = x_data;
-  y_data_ = y_data;
-  data_length_ = data_length;
-  last_element_index_ = data_length - 1;
-  // Check input data sanity
-  for (int n = 0; n < last_element_index_; ++n) {
-    if (x_data_[n + 1] <= x_data_[n]) {
-      ALOGE("InterpolatorBase::Initialize: xData are not ordered or "
-              "contain equal values (X[%d] <= X[%d]) (%.5e <= %.5e)",
-            n + 1, n, x_data_[n + 1], x_data_[n]);
-      status_ = false;
-      return false;
-    }
-  }
-  // Pre-compute internal state by calling the corresponding function of the
-  // derived class.
-  status_ = static_cast<Algorithm*>(this)->SetInternalState();
-  return status_;
-}
-
-template <typename T, class Algorithm>
-T InterpolatorBase<T, Algorithm>::Interpolate(T x) {
-#ifndef NDEBUG
-  if (cached_index_ < 0 || cached_index_ > data_length_ - 2) {
-    LoggerError("InterpolatorBase:Interpolate: CachedIndex_ out of bounds "
-                  "[0, %d, %d]", cached_index_, data_length_ - 2);
-  }
-#endif
-  // Search for the containing interval
-  if (x <= x_data_[cached_index_]) {
-    if (cached_index_ <= 0) {
-      cached_index_ = 0;
-      return y_data_[0];
-    }
-    if (x >= x_data_[cached_index_ - 1]) {
-      cached_index_--;  // Fast descending
-    } else {
-      if (x <= x_data_[0]) {
-        cached_index_ = 0;
-        return y_data_[0];
-      }
-      cached_index_ = SearchIndex(x_data_, x, 0, cached_index_);
-    }
-  } else {
-    if (cached_index_ >= last_element_index_) {
-      cached_index_ = last_element_index_;
-      return y_data_[last_element_index_];
-    }
-    if (x > x_data_[cached_index_ + 1]) {
-      if (cached_index_ + 2 > last_element_index_) {
-        cached_index_ = last_element_index_ - 1;
-        return y_data_[last_element_index_];
-      }
-      if (x <= x_data_[cached_index_ + 2]) {
-        cached_index_++;  // Fast ascending
-      } else {
-        if (x >= x_data_[last_element_index_]) {
-          cached_index_ = last_element_index_ - 1;
-          return y_data_[last_element_index_];
-        }
-        cached_index_ = SearchIndex(
-            x_data_, x, cached_index_, last_element_index_);
-      }
-    }
-  }
-  // Compute interpolated value by calling the corresponding function of the
-  // derived class.
-  return static_cast<Algorithm*>(this)->MethodSpecificInterpolation(x);
-}
-
-}  // namespace sigmod
-
-}  // namespace le_fx
-
-#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base.h b/media/libeffects/loudness/dsp/core/interpolator_base.h
index 0cd1a35..8bcf904 100644
--- a/media/libeffects/loudness/dsp/core/interpolator_base.h
+++ b/media/libeffects/loudness/dsp/core/interpolator_base.h
@@ -17,8 +17,7 @@
 #ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
 #define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
 
-#include "common/core/types.h"
-
+#include <vector>
 namespace le_fx {
 
 namespace sigmod {
@@ -40,13 +39,61 @@
 template <typename T, class Algorithm>
 class InterpolatorBase {
  public:
-  InterpolatorBase();
-  ~InterpolatorBase();
+  InterpolatorBase() = default;
+  InterpolatorBase(const InterpolatorBase&) = delete;
+  InterpolatorBase& operator=(const InterpolatorBase&) = delete;
 
+  ~InterpolatorBase() {
+    delete [] state_;
+    if (own_x_data_) {
+        delete [] x_data_;
+    }
+  }
   // Generic random-access interpolation with arbitrary spaced x-axis samples.
   // Below X[0], the interpolator returns Y[0]. Above X[data_length-1], it
   // returns Y[data_length-1].
-  T Interpolate(T x);
+  T Interpolate(T x) {
+      // Search for the containing interval
+      if (x <= x_data_[cached_index_]) {
+          if (cached_index_ <= 0) {
+              cached_index_ = 0;
+              return y_data_[0];
+          }
+          if (x >= x_data_[cached_index_ - 1]) {
+              cached_index_--;  // Fast descending
+          } else {
+              if (x <= x_data_[0]) {
+                  cached_index_ = 0;
+                  return y_data_[0];
+              }
+              cached_index_ = SearchIndex(x_data_, x, 0, cached_index_);
+          }
+      } else {
+          if (cached_index_ >= last_element_index_) {
+              cached_index_ = last_element_index_;
+              return y_data_[last_element_index_];
+          }
+          if (x > x_data_[cached_index_ + 1]) {
+              if (cached_index_ + 2 > last_element_index_) {
+                  cached_index_ = last_element_index_ - 1;
+                  return y_data_[last_element_index_];
+              }
+              if (x <= x_data_[cached_index_ + 2]) {
+                  cached_index_++;  // Fast ascending
+              } else {
+                  if (x >= x_data_[last_element_index_]) {
+                      cached_index_ = last_element_index_ - 1;
+                      return y_data_[last_element_index_];
+                  }
+                  cached_index_ = SearchIndex(
+                          x_data_, x, cached_index_, last_element_index_);
+              }
+          }
+      }
+      // Compute interpolated value by calling the corresponding function of the
+      // derived class.
+      return static_cast<Algorithm*>(this)->MethodSpecificInterpolation(x);
+  }
 
   bool get_status() const {
     return status_;
@@ -57,56 +104,117 @@
   //  y_data: [(data_length)x1] y-axis coordinates (interpolation axis)
   //  data_length: number of points
   // returns `true` if everything is ok, `false`, otherwise
-  bool Initialize(const T *x_data, const T *y_data, int data_length);
+  bool Initialize(const T *x_data, const T *y_data, int data_length) {
+    // Default settings
+    cached_index_ = 0;
+    data_length_ = 0;
+    x_start_offset_ = 0;
+    x_inverse_sampling_interval_ = 0;
+    state_ = nullptr;
+    // Input data is externally owned
+    own_x_data_ = false;
+    x_data_ = x_data;
+    y_data_ = y_data;
+    data_length_ = data_length;
+    last_element_index_ = data_length - 1;
+    // Check input data validity
+    for (int n = 0; n < last_element_index_; ++n) {
+        if (x_data_[n + 1] <= x_data_[n]) {
+            ALOGE("InterpolatorBase::Initialize: xData are not ordered or "
+                  "contain equal values (X[%d] <= X[%d]) (%.5e <= %.5e)",
+                  n + 1, n, x_data_[n + 1], x_data_[n]);
+            status_ = false;
+            return false;
+        }
+    }
+    // Pre-compute internal state by calling the corresponding function of the
+    // derived class.
+    status_ = static_cast<Algorithm*>(this)->SetInternalState();
+    return status_;
+}
 
   // Initializes internal buffers.
   //  x_data: x-axis coordinates (searching axis)
   //  y_data: y-axis coordinates (interpolating axis)
   // returns `true` if everything is ok, `false`, otherwise
-  bool Initialize(const vector<T> &x_data, const vector<T> &y_data);
+  bool Initialize(const std::vector<T> &x_data, const std::vector<T> &y_data) {
+    return Initialize(&x_data[0], &y_data[0], x_data.size());
+}
 
-  // Initialization for regularly sampled sequences, where:
+
+    // Initialization for regularly sampled sequences, where:
   //  x_data[i] = x_start_offset + i * x_sampling_interval
   bool Initialize(double x_start_offset,
                   double x_sampling_interval,
-                  const vector<T> &y_data);
+                  const std::vector<T> &y_data) {
+    return Initialize(x_start_offset,
+            x_sampling_interval,
+            &y_data[0],
+            y_data.size());
+}
 
   // Initialization for regularly sampled sequences, where:
   //  x_data[i] = x_start_offset + i * x_sampling_interval
   bool Initialize(double x_start_offset,
                   double x_sampling_interval,
                   const T *y_data,
-                  int data_length);
+                  int data_length) {
+        // Constructs and populate x-axis data: `x_data_`
+        T *x_data_tmp = new T[data_length];
+        float time_offset = x_start_offset;
+        for (int n = 0; n < data_length; n++) {
+            x_data_tmp[n] = time_offset;
+            time_offset += x_sampling_interval;
+        }
+        Initialize(x_data_tmp, y_data, data_length);
+        // Sets-up the regularly sampled interpolation mode
+        x_start_offset_ = x_start_offset;
+        x_inverse_sampling_interval_ = 1.0 / x_sampling_interval;
+        own_x_data_ = true;
+        return status_;
+    }
 
  protected:
   // Is set to false if something goes wrong, and to true if everything is ok.
-  bool status_;
+  bool status_ = false;
 
   // The start-index of the previously searched interval
-  int cached_index_;
+  int cached_index_ = 0;
+
+// Searches for the interval that contains <x> using a divide-and-conquer
+// algorithm.
+// X[]: a std::vector of sorted values (X[i+1] > X[i])
+// x:   a value
+// StartIndex: the minimum searched index
+// EndIndex: the maximum searched index
+// returns: the index <i> that satisfies: X[i] <= x <= X[i+1] &&
+//          StartIndex <= i <= (EndIndex-1)
+
+    int SearchIndex(const T* x_data, T x, int start_index, int end_index) {
+        auto x_begin = x_data + start_index;
+        auto x_end = x_data + end_index - (end_index > start_index);
+        auto iter = std::lower_bound(x_begin, x_end, x);
+        return iter - x_data;
+    }
 
   // Data points
-  const T *x_data_;  // Externally or internally owned, depending on own_x_data_
-  const T *y_data_;  // Externally owned (always)
-  int data_length_;
+  const T *x_data_ = nullptr;  // Externally or internally owned, depending on own_x_data_
+  const T *y_data_ = nullptr;  // Externally owned (always)
+  int data_length_ = 0;
   // Index of the last element `data_length_ - 1` kept here for optimization
-  int last_element_index_;
-  bool own_x_data_;
+  int last_element_index_= -1;
+  bool own_x_data_ = false;
   // For regularly-samples sequences, keep only the boundaries and the intervals
-  T x_start_offset_;
-  float x_inverse_sampling_interval_;
+  T x_start_offset_ = 0;
+  float x_inverse_sampling_interval_ = 0;
 
   // Algorithm state (internally owned)
-  double *state_;
-
- private:
-  LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorBase);
+  double *state_ = nullptr;
 };
 
 }  // namespace sigmod
 
 }  // namespace le_fx
 
-#include "dsp/core/interpolator_base-inl.h"
 
 #endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolator_linear.h b/media/libeffects/loudness/dsp/core/interpolator_linear.h
index 434698a..330ab94 100644
--- a/media/libeffects/loudness/dsp/core/interpolator_linear.h
+++ b/media/libeffects/loudness/dsp/core/interpolator_linear.h
@@ -36,44 +36,37 @@
 template <typename T>
 class InterpolatorLinear: public InterpolatorBase<T, InterpolatorLinear<T> > {
  public:
-  InterpolatorLinear() { }
-  ~InterpolatorLinear() { }
+  InterpolatorLinear() = default;
+  InterpolatorLinear(const InterpolatorLinear&) = delete;
+  InterpolatorLinear& operator=(const InterpolatorLinear&) = delete;
 
  protected:
   // Provides the main implementation of the linear interpolation algorithm.
   // Assumes that: X[cached_index_] < x < X[cached_index_ + 1]
-  T MethodSpecificInterpolation(T x);
+
+  T MethodSpecificInterpolation(T x) {
+      const T dX = x_data_[cached_index_ + 1] - x_data_[cached_index_];
+      const T dY = y_data_[cached_index_ + 1] - y_data_[cached_index_];
+      const T dx = x - x_data_[cached_index_];
+      return y_data_[cached_index_] + (dY * dx) / dX;
+  }
 
   // Pre-compute internal state_ parameters.
-  bool SetInternalState();
-
+  bool SetInternalState() {
+      state_ = nullptr;
+      return true;
+  }
  private:
-  friend class InterpolatorBase<T, InterpolatorLinear<T> >;
-  typedef InterpolatorBase<T, InterpolatorLinear<T> > BaseClass;
+  friend class InterpolatorBase<T, InterpolatorLinear<T>>;
+  typedef InterpolatorBase<T, InterpolatorLinear<T>> BaseClass;
   using BaseClass::status_;
   using BaseClass::cached_index_;
   using BaseClass::x_data_;
   using BaseClass::y_data_;
   using BaseClass::data_length_;
   using BaseClass::state_;
-
-  LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorLinear<T>);
 };
 
-template <typename T>
-inline T InterpolatorLinear<T>::MethodSpecificInterpolation(T x) {
-  T dX = x_data_[cached_index_ + 1] - x_data_[cached_index_];
-  T dY = y_data_[cached_index_ + 1] - y_data_[cached_index_];
-  T dx = x - x_data_[cached_index_];
-  return y_data_[cached_index_] + (dY * dx) / dX;
-}
-
-template <typename T>
-bool InterpolatorLinear<T>::SetInternalState() {
-  state_ = NULL;
-  return true;
-}
-
 }  // namespace sigmod
 
 }  // namespace le_fx
diff --git a/media/libeffects/loudness/tests/Android.bp b/media/libeffects/loudness/tests/Android.bp
new file mode 100644
index 0000000..21f5558
--- /dev/null
+++ b/media/libeffects/loudness/tests/Android.bp
@@ -0,0 +1,36 @@
+// Build the unit tests for loudness effect tests
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "loudness_enhancer_tests",
+    srcs: [
+        "loudness_enhancer_tests.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "liblog",
+    ],
+    static_libs: [
+        "libldnhncr",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libaudioutils_headers",
+    ],
+    include_dirs: [
+        "frameworks/av/media/libeffects/loudness",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
+}
diff --git a/media/libeffects/loudness/tests/loudness_enhancer_tests.cpp b/media/libeffects/loudness/tests/loudness_enhancer_tests.cpp
new file mode 100644
index 0000000..2443fe1
--- /dev/null
+++ b/media/libeffects/loudness/tests/loudness_enhancer_tests.cpp
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "dsp/core/dynamic_range_compression.h"
+#include <audio_effects/effect_loudnessenhancer.h>
+#include <audio_utils/dsp_utils.h>
+#include <gtest/gtest.h>
+#include <log/log.h>
+#include <system/audio_effects/audio_effects_test.h>
+
+using status_t = int32_t;
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+effect_uuid_t loudness_uuid = {0xfa415329, 0x2034, 0x4bea, 0xb5dc,
+    {0x5b, 0x38, 0x1c, 0x8d, 0x1e, 0x2c}};
+
+using namespace android::audio_utils;
+using namespace android::effect::utils;
+
+/*
+Android 16:
+expectedEnergydB: -24.771212  energyIndB: -24.739433
+gaindB: 0.000000  measureddB: 0.000000  energyIndB: -24.739433  energyOutdB: -24.739433
+gaindB: 1.000000  measureddB: 1.000004  energyIndB: -24.739433  energyOutdB: -23.739429
+gaindB: 2.000000  measureddB: 2.000002  energyIndB: -24.739433  energyOutdB: -22.739431
+gaindB: 5.000000  measureddB: 5.000006  energyIndB: -24.739433  energyOutdB: -19.739428
+gaindB: 10.000000  measureddB: 10.000004  energyIndB: -24.739433  energyOutdB: -14.739429
+gaindB: 20.000000  measureddB: 13.513464  energyIndB: -24.739433  energyOutdB: -11.225969
+gaindB: 50.000000  measureddB: 18.649250  energyIndB: -24.739433  energyOutdB: -6.090182
+gaindB: 100.000000  measureddB: 22.874735  energyIndB: -24.739433  energyOutdB: -1.864698
+ */
+
+static constexpr audio_channel_mask_t kOutputChannelMasks[] = {
+AUDIO_CHANNEL_OUT_STEREO,
+AUDIO_CHANNEL_OUT_5POINT1,
+AUDIO_CHANNEL_OUT_7POINT1,
+AUDIO_CHANNEL_OUT_7POINT1POINT4,
+AUDIO_CHANNEL_OUT_9POINT1POINT6,
+};
+
+using LoudnessEnhancerGainParam = std::tuple<int /* channel mask */>;
+
+enum {
+    GAIN_CHANNEL_MASK_POSITION = 0,
+    //GAIN_ACCUMULATE_POSITION = 1,
+};
+
+class LoudnessEnhancerGainTest : public ::testing::TestWithParam<LoudnessEnhancerGainParam> {
+public:
+
+    void testGain(audio_channel_mask_t channelMask) {
+        effect_handle_t handle;
+        ASSERT_EQ(0, AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(
+                &loudness_uuid, 0 /* sessionId */, 0 /* ioId */, &handle));
+
+        constexpr size_t frameCount = 1024;
+        constexpr uint32_t sampleRate = 48000;
+        const size_t channelCount = audio_channel_count_from_out_mask(channelMask);
+        if (channelCount > FCC_LIMIT) return;
+        constexpr float amplitude = 0.1;
+        const size_t sampleCount = channelCount * frameCount;
+        std::vector<float> originalData(sampleCount);
+        initUniformDistribution(originalData, -amplitude, amplitude);
+        std::vector<float> outData(sampleCount);
+
+        ASSERT_EQ(0, effect_set_config(handle, sampleRate, channelMask));
+        ASSERT_EQ(0, effect_enable(handle));
+
+        // expected energy in dB for a uniform distribution from -amplitude to amplitude.
+        const float expectedEnergydB = energyOfUniformDistribution(-amplitude, amplitude);
+        const float energyIndB = energy(originalData);
+        ALOGD("%s: expectedEnergydB: %f  energyIndB: %f", __func__, expectedEnergydB, energyIndB);
+        EXPECT_NEAR(energyIndB, expectedEnergydB, 0.1);  // within 0.1dB.
+        float lastMeasuredGaindB = 0;
+        for (int gainmB : { 0, 100, 200, 500, 1'000, 2'000, 5'000, 10'000 }) {  // millibel Power
+            ASSERT_EQ(0, effect_set_param(
+                    handle, LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, gainmB));
+
+            auto inData = originalData;
+            audio_buffer_t inBuffer{ .frameCount = frameCount, .f32 = inData.data() };
+            audio_buffer_t outBuffer{ .frameCount = frameCount, .f32 = outData.data() };
+            ASSERT_EQ(0, effect_process(handle, &inBuffer, &outBuffer));
+            const float energyOutdB = energy(inData);
+            const float gaindB = gainmB * 1e-2;
+            const float measuredGaindB = energyOutdB - energyIndB;
+
+            // Log our gain and power levels
+            ALOGD("%s: gaindB: %f  measureddB: %f  energyIndB: %f  energyOutdB: %f",
+                  __func__, gaindB, measuredGaindB, energyIndB, energyOutdB);
+
+            // Gain curve testing (move to VTS)?
+            if (gaindB == 0) {
+                EXPECT_EQ(energyIndB, energyOutdB);
+            } else if (energyIndB + gaindB < -10.f) {
+                // less than -10dB from overflow, signal does not saturate.
+                EXPECT_NEAR(gaindB, measuredGaindB, 0.1);
+            } else {  // effective gain saturates.
+                EXPECT_LT(measuredGaindB, gaindB);       // less than the desired gain.
+                EXPECT_GT(measuredGaindB, lastMeasuredGaindB);  // more than the previous gain.
+            }
+            lastMeasuredGaindB = measuredGaindB;
+        }
+        ASSERT_EQ(0, AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(handle));
+    }
+};
+
+/**
+ * The Gain test checks that gain that does not saturate the input signal
+ * will be applied as expected.  Gain that would cause the input signal to
+ * exceed the nominal limit is reduced.
+ */
+
+TEST_P(LoudnessEnhancerGainTest, gain) {
+    testGain(kOutputChannelMasks[std::get<GAIN_CHANNEL_MASK_POSITION>(GetParam())]);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        LoudnessEnhancerTestAll, LoudnessEnhancerGainTest,
+        ::testing::Combine(
+                ::testing::Range(0, (int)std::size(kOutputChannelMasks))),
+        [](const testing::TestParamInfo<LoudnessEnhancerGainTest::ParamType>& info) {
+            const int index = std::get<GAIN_CHANNEL_MASK_POSITION>(info.param);
+            const audio_channel_mask_t channelMask = kOutputChannelMasks[index];
+            const std::string name =
+                    std::string(audio_channel_out_mask_to_string(channelMask)) +
+                    std::to_string(index);
+            return name;
+        });
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index d791fab..232bf7a 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -83,7 +83,6 @@
     header_libs: [
         "libaudioeffects",
         "libhardware_headers",
-        "libwebrtc_absl_headers",
     ],
     cflags: [
         "-Wno-unused-parameter",
diff --git a/media/liberror/Android.bp b/media/liberror/Android.bp
index 5e94b0a..3d13b7d 100644
--- a/media/liberror/Android.bp
+++ b/media/liberror/Android.bp
@@ -25,7 +25,7 @@
     ],
     apex_available: [
         "//apex_available:platform",
-        "com.android.btservices",
+        "com.android.bt",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -51,7 +51,7 @@
     min_sdk_version: "29",
     apex_available: [
         "//apex_available:platform",
-        "com.android.btservices",
+        "com.android.bt",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 2d53b35..2c93d94 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -260,9 +260,9 @@
     srcs: [
         "AudioCapabilities.cpp",
         "CodecCapabilities.cpp",
+        "CodecCapabilitiesUtils.cpp",
         "EncoderCapabilities.cpp",
         "VideoCapabilities.cpp",
-        "CodecCapabilitiesUtils.cpp",
     ],
 
     local_include_dirs: [
@@ -270,12 +270,17 @@
     ],
 
     shared_libs: [
+        "libaconfig_storage_read_api_cc",
         "libbinder",
         "liblog",
         "libstagefright_foundation",
         "libutils",
     ],
 
+    static_libs: [
+        "android.media.codec-aconfig-cc",
+    ],
+
     export_include_dirs: [
         "include",
     ],
@@ -404,21 +409,21 @@
         "libbinder",
         "libbinder_ndk",
         //"libsonivox",
-        "libcutils",
-        "liblog",
-        "libutils",
         "framework-permission-aidl-cpp",
         "libandroidicu",
         "libaudioclient",
         "libaudiofoundation",
         "libcamera_client",
+        "libcutils",
         "libdl",
         "libexpat",
         "libgui",
+        "liblog",
         "libmedia_codeclist",
         "libmedia_omx",
         "libstagefright_foundation",
         "libui",
+        "libutils",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
index 407d376..ccc79e3 100644
--- a/media/libmedia/CodecCapabilities.cpp
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -55,6 +55,8 @@
     Feature(FEATURE_MultipleFrames,   (1 << 5), false),
     Feature(FEATURE_DynamicTimestamp, (1 << 6), false),
     Feature(FEATURE_LowLatency,       (1 << 7), true),
+    Feature(FEATURE_DynamicColorAspects, (1 << 8), true),
+    Feature(FEATURE_DetachedSurface,     (1 << 9), true),
     // feature to exclude codec from REGULAR codec list
     Feature(FEATURE_SpecialCodec,     (1 << 30), false, true),
 };
@@ -65,6 +67,8 @@
     Feature(FEATURE_QpBounds, (1 << 3), false),
     Feature(FEATURE_EncodingStatistics, (1 << 4), false),
     Feature(FEATURE_HdrEditing, (1 << 5), false),
+    Feature(FEATURE_HlgEditing, (1 << 6), true),
+    Feature(FEATURE_Roi, (1 << 7), true),
     // feature to exclude codec from REGULAR codec list
     Feature(FEATURE_SpecialCodec,     (1 << 30), false, true),
 };
@@ -149,7 +153,7 @@
         int32_t yesNo;
         std::string key = KEY_FEATURE_;
         key = key + feat.mName;
-        if (format->findInt32(key.c_str(), &yesNo)) {
+        if (!format->findInt32(key.c_str(), &yesNo)) {
             continue;
         }
         if ((yesNo == 1 && !isFeatureSupported(feat.mName)) ||
@@ -405,9 +409,11 @@
 
     mMaxSupportedInstances = maxConcurrentInstances > 0
             ? maxConcurrentInstances : DEFAULT_MAX_SUPPORTED_INSTANCES;
-
-    int32_t maxInstances = mMaxSupportedInstances;
-    capabilitiesInfo->findInt32("max-concurrent-instances", &maxInstances);
+    AString maxConcurrentInstancesStr;
+    int32_t maxInstances
+            = capabilitiesInfo->findString("max-concurrent-instances", &maxConcurrentInstancesStr)
+            ? (int32_t)strtol(maxConcurrentInstancesStr.c_str(), NULL, 10)
+            : mMaxSupportedInstances;
     mMaxSupportedInstances =
             Range(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances);
 
diff --git a/media/libmedia/CodecCapabilitiesUtils.cpp b/media/libmedia/CodecCapabilitiesUtils.cpp
index 01bb24e..64d24e4 100644
--- a/media/libmedia/CodecCapabilitiesUtils.cpp
+++ b/media/libmedia/CodecCapabilitiesUtils.cpp
@@ -32,6 +32,16 @@
 
 namespace android {
 
+static int32_t SaturateDoubleToInt32(double d) {
+    if (d >= static_cast<double>(std::numeric_limits<int32_t>::max())) {
+        return std::numeric_limits<int32_t>::max();
+    } else if (d <= static_cast<double>(std::numeric_limits<int32_t>::min())) {
+        return std::numeric_limits<int32_t>::min();
+    } else {
+        return static_cast<int32_t>(d);
+    }
+}
+
 // VideoSize
 
 VideoSize::VideoSize(int32_t width, int32_t height) : mWidth(width), mHeight(height) {}
@@ -126,9 +136,10 @@
     int32_t common = std::gcd(num, den);
     num /= common;
     den /= common;
+    // ToDo: Reevaluate how to satureate double to int without drastically changing it.
     return Rational(
-            (int32_t)(mNumerator * (double)num),     // saturate to int
-            (int32_t)(mDenominator * (double)den));  // saturate to int
+            SaturateDoubleToInt32(mNumerator * (double)num),
+            SaturateDoubleToInt32(mDenominator * (double)den));
 }
 
 Range<Rational> Rational::ScaleRange(Range<Rational> range, int32_t num, int32_t den) {
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 3834278..db83a42 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -22,6 +22,7 @@
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include "media/stagefright/foundation/AString.h"
 #include <binder/Parcel.h>
 
 namespace android {
@@ -185,6 +186,10 @@
     return mName.c_str();
 }
 
+const char *MediaCodecInfo::getHalName() const {
+    return mHalName.c_str();
+}
+
 const char *MediaCodecInfo::getOwnerName() const {
     return mOwner.c_str();
 }
@@ -193,11 +198,13 @@
 sp<MediaCodecInfo> MediaCodecInfo::FromParcel(const Parcel &parcel) {
     sMaxSupportedInstances = parcel.readInt32();
     AString name = AString::FromParcel(parcel);
+    AString halName = AString::FromParcel(parcel);
     AString owner = AString::FromParcel(parcel);
     Attributes attributes = static_cast<Attributes>(parcel.readInt32());
     uint32_t rank = parcel.readUint32();
     sp<MediaCodecInfo> info = new MediaCodecInfo;
     info->mName = name;
+    info->mHalName = halName;
     info->mOwner = owner;
     info->mAttributes = attributes;
     info->mRank = rank;
@@ -226,6 +233,7 @@
 status_t MediaCodecInfo::writeToParcel(Parcel *parcel) const {
     parcel->writeInt32(sMaxSupportedInstances);
     mName.writeToParcel(parcel);
+    mHalName.writeToParcel(parcel);
     mOwner.writeToParcel(parcel);
     parcel->writeInt32(mAttributes);
     parcel->writeUint32(mRank);
@@ -278,6 +286,9 @@
 
 void MediaCodecInfoWriter::setName(const char* name) {
     mInfo->mName = name;
+    // Upon creation, we use the same name for HAL and info and
+    // only distinguish them during collision resolution.
+    mInfo->mHalName = name;
 }
 
 void MediaCodecInfoWriter::addAlias(const char* name) {
@@ -331,6 +342,24 @@
     }
 }
 
+sp<MediaCodecInfo> MediaCodecInfo::splitOutType(const char *mediaType,
+        const char *newName) const {
+    sp<MediaCodecInfo> newInfo = new MediaCodecInfo;
+    newInfo->mName = newName;
+    newInfo->mHalName = mHalName;
+    newInfo->mOwner = mOwner;
+    newInfo->mAttributes = mAttributes;
+    newInfo->mRank = mRank;
+    newInfo->mAliases = mAliases;
+    // allow an alias from the (old) HAL name. If there is a collision, this will be ignored.
+    newInfo->mAliases.add(mHalName);
+
+    // note: mediaType is always a supported type. valueAt() will abort otherwise.
+    newInfo->mCaps.add(AString(mediaType), mCaps.valueAt(getCapabilityIndex(mediaType)));
+    newInfo->mCodecCaps.add(AString(mediaType), mCodecCaps.valueAt(getCodecCapIndex(mediaType)));
+    return newInfo;
+}
+
 // static
 std::shared_ptr<CodecCapabilities> MediaCodecInfoWriter::BuildCodecCapabilities(
         const char *mediaType, sp<MediaCodecInfo::Capabilities> caps, bool isEncoder,
diff --git a/media/libmedia/VideoCapabilities.cpp b/media/libmedia/VideoCapabilities.cpp
index bd26b8c..a9eea53 100644
--- a/media/libmedia/VideoCapabilities.cpp
+++ b/media/libmedia/VideoCapabilities.cpp
@@ -19,6 +19,8 @@
 
 #include <android-base/strings.h>
 
+#include <android_media_codec.h>
+
 #include <media/CodecCapabilities.h>
 #include <media/VideoCapabilities.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -69,8 +71,8 @@
         ALOGE("unsupported height");
         return std::nullopt;
     }
-    const int32_t heightInBlocks = divUp(height, mBlockHeight);
 
+    const int32_t heightInBlocks = divUp(height, mBlockHeight);
     // constrain by block count and by block aspect ratio
     const int32_t minWidthInBlocks = std::max(
             divUp(mBlockCountRange.lower(), heightInBlocks),
@@ -94,6 +96,10 @@
             (int32_t)std::ceil(mAspectRatioRange.lower().asDouble()
                     * height),
             (int32_t)(mAspectRatioRange.upper().asDouble() * height));
+    if (range.empty()) {
+        return std::nullopt;
+    }
+
     return range;
 }
 
@@ -104,8 +110,8 @@
         ALOGE("unsupported width");
         return std::nullopt;
     }
-    const int32_t widthInBlocks = divUp(width, mBlockWidth);
 
+    const int32_t widthInBlocks = divUp(width, mBlockWidth);
     // constrain by block count and by block aspect ratio
     const int32_t minHeightInBlocks = std::max(
             divUp(mBlockCountRange.lower(), widthInBlocks),
@@ -129,6 +135,10 @@
             (int32_t)std::ceil(width /
                     mAspectRatioRange.upper().asDouble()),
             (int32_t)(width / mAspectRatioRange.lower().asDouble()));
+    if (range.empty()) {
+        return std::nullopt;
+    }
+
     return range;
 }
 
@@ -142,12 +152,15 @@
 
     const int32_t blockCount =
             divUp(width, mBlockWidth) * divUp(height, mBlockHeight);
-
-    return std::make_optional(Range(
+    Range<double> result = Range(
             std::max(mBlocksPerSecondRange.lower() / (double) blockCount,
                 (double) mFrameRateRange.lower()),
             std::min(mBlocksPerSecondRange.upper() / (double) blockCount,
-                (double) mFrameRateRange.upper())));
+                (double) mFrameRateRange.upper()));
+    if (result.empty()) {
+        return std::nullopt;
+    }
+    return result;
 }
 
 int32_t VideoCapabilities::getBlockCount(int32_t width, int32_t height) const {
@@ -311,8 +324,14 @@
     int32_t width, height;
     format->findInt32(KEY_WIDTH, &width);
     format->findInt32(KEY_HEIGHT, &height);
-    double frameRate;
-    format->findDouble(KEY_FRAME_RATE, &frameRate);
+
+    // Frame rate can be int32 or float. MediaCodec accept both float and int32 values.
+    // We convert to a double since that can represent both i32 and float without precision loss.
+    int32_t i32FrameRate;
+    float fltFrameRate;
+    double frameRate = format->findInt32(KEY_FRAME_RATE, &i32FrameRate) ? (double)i32FrameRate
+            : format->findFloat(KEY_FRAME_RATE, &fltFrameRate) ? (double)fltFrameRate : 0;
+
     PerformancePoint other = PerformancePoint(
             width, height,
             // safely convert ceil(double) to int through float cast and std::round
@@ -410,9 +429,16 @@
             ? std::make_optional<int32_t>(widthVal) : std::nullopt;
     std::optional<int32_t> height = format->findInt32(KEY_HEIGHT, &heightVal)
             ? std::make_optional<int32_t>(heightVal) : std::nullopt;
-    double rateVal;
-    std::optional<double> rate = format->findDouble(KEY_FRAME_RATE, &rateVal)
-            ? std::make_optional<double>(rateVal) : std::nullopt;
+
+    // Frame rate can be int32 or float. MediaCodec accept both float and int32 values.
+    // We convert to a double since that can represent both i32 and float without precision loss.
+    int32_t i32RateVal;
+    float fltRateVal;
+    std::optional<double> rate = format->findInt32(KEY_FRAME_RATE, &i32RateVal)
+            ? std::make_optional<double>((double)i32RateVal)
+            : format->findFloat(KEY_FRAME_RATE, &fltRateVal)
+            ? std::make_optional<double>((double)fltRateVal)
+            : std::nullopt;
 
     if (!supports(width, height, rate)) {
         return false;
@@ -479,11 +505,10 @@
     mBlockAspectRatioRange = POSITIVE_RATIONALS;
     mAspectRatioRange      = POSITIVE_RATIONALS;
 
-    // YUV 4:2:0 requires 2:2 alignment
-    mWidthAlignment = 2;
-    mHeightAlignment = 2;
-    mBlockWidth = 2;
-    mBlockHeight = 2;
+    mWidthAlignment = 1;
+    mHeightAlignment = 1;
+    mBlockWidth = 1;
+    mBlockHeight = 1;
     mSmallerDimensionUpperLimit = VideoSize::GetAllowedDimensionRange().upper();
 }
 
@@ -601,9 +626,16 @@
         return std::nullopt;
     }
 
-    return std::make_optional(std::pair(
-            Range(range.value().first.getWidth(), range.value().second.getWidth()),
-            Range(range.value().first.getHeight(), range.value().second.getHeight())));
+    Range<int32_t> widthRange
+            = Range(range.value().first.getWidth(), range.value().second.getWidth());
+    Range<int32_t> heightRange
+            = Range(range.value().first.getHeight(), range.value().second.getHeight());
+    if (widthRange.empty() || heightRange.empty()) {
+        ALOGW("could not parse size range: %s", str.c_str());
+        return std::nullopt;
+    }
+
+    return std::make_optional(std::pair(widthRange, heightRange));
 }
 
 // static
@@ -1399,8 +1431,8 @@
                 minAlignment /* widthAlignment */, minAlignment /* heightAlignment */);
         mFrameRateRange = Range(1, maxRate);
     } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_VP8)) {
-        maxBlocks = INT_MAX;
-        maxBlocksPerSecond = INT_MAX;
+        maxBlocks = INT32_MAX;
+        maxBlocksPerSecond = INT32_MAX;
 
         // TODO: set to 100Mbps for now, need a number for VP8
         maxBps = 100000000;
@@ -1688,6 +1720,149 @@
                 maxBlocks, maxBlocksPerSecond,
                 blockSize, blockSize,
                 1 /* widthAlignment */, 1 /* heightAlignment */);
+    } else if (android::media::codec::apv_support()
+            && base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_APV)) {
+        maxBlocksPerSecond = 11880;
+        maxBps = 7000000;
+
+        // Sample rate, and Bit rate for APV Codec,
+        // corresponding to the definitions in
+        // "10.1.4. Levels and bands"
+        // found at https://www.ietf.org/archive/id/draft-lim-apv-03.html
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int64_t SR = 0; // luma sample rate
+            int32_t BR = 0;  // bit rate bps
+            switch (profileLevel.mLevel) {
+                case APVLevel1Band0:
+                    SR =      3041280; BR =    7000000; break;
+                case APVLevel1Band1:
+                    SR =      3041280; BR =   11000000; break;
+                case APVLevel1Band2:
+                    SR =      3041280; BR =   14000000; break;
+                case APVLevel1Band3:
+                    SR =      3041280; BR =   21000000; break;
+                case APVLevel11Band0:
+                    SR =      6082560; BR =   14000000; break;
+                case APVLevel11Band1:
+                    SR =      6082560; BR =   21000000; break;
+                case APVLevel11Band2:
+                    SR =      6082560; BR =   28000000; break;
+                case APVLevel11Band3:
+                    SR =      6082560; BR =   42000000; break;
+                case APVLevel2Band0:
+                    SR =     15667200; BR =   36000000; break;
+                case APVLevel2Band1:
+                    SR =     15667200; BR =   53000000; break;
+                case APVLevel2Band2:
+                    SR =     15667200; BR =   71000000; break;
+                case APVLevel2Band3:
+                    SR =     15667200; BR =  106000000; break;
+                case APVLevel21Band0:
+                    SR =     31334400; BR =   71000000; break;
+                case APVLevel21Band1:
+                    SR =     31334400; BR =  106000000; break;
+                case APVLevel21Band2:
+                    SR =     31334400; BR =  141000000; break;
+                case APVLevel21Band3:
+                    SR =     31334400; BR =  212000000; break;
+                case APVLevel3Band0:
+                    SR =     66846720; BR =  101000000; break;
+                case APVLevel3Band1:
+                    SR =     66846720; BR =  151000000; break;
+                case APVLevel3Band2:
+                    SR =     66846720; BR =  201000000; break;
+                case APVLevel3Band3:
+                    SR =     66846720; BR =  301000000; break;
+                case APVLevel31Band0:
+                    SR =    133693440; BR =  201000000; break;
+                case APVLevel31Band1:
+                    SR =    133693440; BR =  301000000; break;
+                case APVLevel31Band2:
+                    SR =    133693440; BR =  401000000; break;
+                case APVLevel31Band3:
+                    SR =    133693440; BR =  602000000; break;
+                case APVLevel4Band0:
+                    SR =    265420800; BR =  401000000; break;
+                case APVLevel4Band1:
+                    SR =    265420800; BR =  602000000; break;
+                case APVLevel4Band2:
+                    SR =    265420800; BR =  780000000; break;
+                case APVLevel4Band3:
+                    SR =    265420800; BR = 1170000000; break;
+                case APVLevel41Band0:
+                    SR =    530841600; BR =  780000000; break;
+                case APVLevel41Band1:
+                    SR =    530841600; BR = 1170000000; break;
+                case APVLevel41Band2:
+                    SR =    530841600; BR = 1560000000; break;
+                case APVLevel41Band3:
+                    // Current API allows bitrates only up to Max Integer
+                    // Hence we are limiting internal limits to INT32_MAX
+                    // even when actual Level/Band limits are higher
+                    SR =    530841600; BR = INT32_MAX; break;
+                case APVLevel5Band0:
+                    SR =   1061683200; BR = 1560000000; break;
+                case APVLevel5Band1:
+                    SR =   1061683200; BR = INT32_MAX; break;
+                case APVLevel5Band2:
+                    SR =   1061683200; BR = INT32_MAX; break;
+                case APVLevel5Band3:
+                    SR =   1061683200; BR = INT32_MAX; break;
+                case APVLevel51Band0:
+                case APVLevel51Band1:
+                case APVLevel51Band2:
+                case APVLevel51Band3:
+                    SR =   2123366400; BR = INT32_MAX; break;
+                case APVLevel6Band0:
+                case APVLevel6Band1:
+                case APVLevel6Band2:
+                case APVLevel6Band3:
+                    SR =  4777574400L; BR = INT32_MAX; break;
+                case APVLevel61Band0:
+                case APVLevel61Band1:
+                case APVLevel61Band2:
+                case APVLevel61Band3:
+                    SR =  8493465600L; BR = INT32_MAX; break;
+                case APVLevel7Band0:
+                case APVLevel7Band1:
+                case APVLevel7Band2:
+                case APVLevel7Band3:
+                    SR = 16986931200L; BR = INT32_MAX; break;
+                case APVLevel71Band0:
+                case APVLevel71Band1:
+                case APVLevel71Band2:
+                case APVLevel71Band3:
+                    SR = 33973862400L; BR = INT32_MAX; break;
+                default:
+                    ALOGW("Unrecognized level %d for %s", profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case APVProfile422_10:
+                case APVProfile422_10HDR10:
+                case APVProfile422_10HDR10Plus:
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            maxBlocksPerSecond = std::max(SR, maxBlocksPerSecond);
+            maxBps = std::max(BR, maxBps);
+        }
+
+        const int32_t blockSize = 16;
+        maxBlocks = INT32_MAX;
+        maxBlocksPerSecond = divUp(maxBlocksPerSecond, blockSize * (int64_t)blockSize);
+        maxBlocks = (int32_t)std::min((int64_t)maxBlocks, maxBlocksPerSecond);
+        // Max frame size in APV is 2^24
+        int32_t maxLengthInBlocks = divUp(1 << 24, blockSize);
+        maxLengthInBlocks = std::min(maxLengthInBlocks, maxBlocks);
+        applyMacroBlockLimits(
+            maxLengthInBlocks, maxLengthInBlocks,
+            maxBlocks, maxBlocksPerSecond,
+            blockSize, blockSize,
+            2 /* widthAlignment */, 1 /* heightAlignment */);
     } else {
         ALOGW("Unsupported mime %s", mediaType);
         // using minimal bitrate here.  should be overridden by
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
index eb62bf9..6e50d51 100644
--- a/media/libmedia/include/media/CodecCapabilitiesUtils.h
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -109,7 +109,7 @@
             Range<T> result = Range<T>(std::max(lower_, range.lower_),
                     std::min(upper_, range.upper_));
             if (result.empty()) {
-                ALOGE("Failed to intersect 2 ranges as they are disjoint");
+                ALOGV("Failed to intersect 2 ranges as they are disjoint");
             }
             return result;
         }
@@ -124,12 +124,10 @@
      * @param lower a non-{@code null} {@code T} reference
      * @param upper a non-{@code null} {@code T} reference
      * @return the intersection of this range and the other range
-     *
-     * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
-     * @throws IllegalArgumentException if the ranges are disjoint.
      */
     Range<T> intersect(T lower, T upper) {
-        return Range(std::max(lower_, lower), std::min(upper_, upper));
+        Range<T> range = Range<T>(lower, upper);
+        return this->intersect(range);
     }
 
     /**
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 60e383a..4d74a67 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -193,7 +193,11 @@
     void getSupportedMediaTypes(Vector<AString> *mediaTypes) const;
     const sp<Capabilities> getCapabilitiesFor(const char *mediaType) const;
     const std::shared_ptr<CodecCapabilities> getCodecCapsFor(const char *mediaType) const;
+
+    /// returns the codec name used by this info
     const char *getCodecName() const;
+    /// returns the codec name as used by the HAL
+    const char *getHalName() const;
 
     /**
      * Returns a vector containing alternate names for the codec.
@@ -229,14 +233,24 @@
     static sp<MediaCodecInfo> FromParcel(const Parcel &parcel);
     status_t writeToParcel(Parcel *parcel) const;
 
+    /**
+     * Create a copy of this MediaCodecInfo supporting a single media type.
+     *
+     * \param mediaType the media type for the new MediaCodecInfo. This must be
+     *                  one of the media types supported by this MediaCodecInfo.
+     * \param newName the new codec name for the new MediaCodecInfo.
+     */
+    sp<MediaCodecInfo> splitOutType(const char *mediaType, const char *newName) const;
+
 private:
     /**
      * Max supported instances setting from MediaCodecList global setting.
      */
     static int32_t sMaxSupportedInstances;
 
-    AString mName;
-    AString mOwner;
+    AString mName;     // codec name for this info
+    AString mHalName;  // codec name at the HAL level
+    AString mOwner;    // owning HAL name
     Attributes mAttributes;
     KeyedVector<AString, sp<Capabilities> > mCaps;
     KeyedVector<AString, std::shared_ptr<CodecCapabilities>> mCodecCaps;
@@ -283,7 +297,13 @@
     /**
      * Set the name of the codec.
      *
-     * @param name The new name.
+     * This sets both the name used internally and the HAL name, as during
+     * creation, they are the same. A new internal name will only be created
+     * during name collision resolution while splitting out media types.
+     *
+     * @param name The new name (from XML).
+     *
+     * @see MediaCodecInfo::splitOutType
      */
     void setName(const char* name);
     /**
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index b5867a6..8d68fad 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -20,7 +20,7 @@
     },
     apex_available: [
         "//apex_available:platform",
-        "com.android.btservices",
+        "com.android.bt",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index e921bd2..5e8ad80 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -91,6 +91,9 @@
 const char * const AudioParameter::keyOffloadCodecDelaySamples = AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES;
 const char * const AudioParameter::keyOffloadCodecPaddingSamples =
         AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES;
+const char * const AudioParameter::keyClipTransitionSupport =
+        AUDIO_PARAMETER_CLIP_TRANSITION_SUPPORT;
+const char * const keyCreateMmapBuffer = AUDIO_PARAMETER_CREATE_MMAP_BUFFER;
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 61e6bcc..7b83fd2 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -151,6 +151,9 @@
     static const char * const keyOffloadCodecDelaySamples;
     static const char * const keyOffloadCodecPaddingSamples;
 
+    static const char * const keyClipTransitionSupport;
+    static const char * const keyCreateMmapBuffer;
+
     String8 toString() const { return toStringImpl(true); }
     String8 keysToString() const { return toStringImpl(false); }
 
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 5214dfe..3dc4587 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -16,6 +16,8 @@
     name: "libmediametrics",
 
     srcs: [
+        "ItemSerialize-ByteString.cpp",
+        "ItemSerialize-Parcel.cpp",
         "MediaMetrics.cpp",
         "MediaMetricsItem.cpp",
     ],
diff --git a/media/libmediametrics/ItemSerialize-ByteString.cpp b/media/libmediametrics/ItemSerialize-ByteString.cpp
new file mode 100644
index 0000000..2f93d73
--- /dev/null
+++ b/media/libmediametrics/ItemSerialize-ByteString.cpp
@@ -0,0 +1,242 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "mediametrics::Item-Serialization"
+
+#include <inttypes.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/endian.h>
+#include <sys/types.h>
+
+#include <cutils/multiuser.h>
+#include <cutils/properties.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/threads.h>
+
+#include <media/MediaMetricsItem.h>
+#include <private/android_filesystem_config.h>
+
+// Max per-property string size before truncation in toString().
+// Do not make too large, as this is used for dumpsys purposes.
+static constexpr size_t kMaxPropertyStringSize = 4096;
+
+namespace android::mediametrics {
+
+status_t mediametrics::Item::writeToByteString(char **pbuffer, size_t *plength) const
+{
+    if (pbuffer == nullptr || plength == nullptr)
+        return BAD_VALUE;
+
+    // get size
+    const size_t keySizeZeroTerminated = strlen(mKey.c_str()) + 1;
+    if (keySizeZeroTerminated > UINT16_MAX) {
+        ALOGW("%s: key size %zu too large", __func__, keySizeZeroTerminated);
+        return INVALID_OPERATION;
+    }
+    const uint16_t version = 0;
+    const uint32_t header_size =
+        sizeof(uint32_t)      // total size
+        + sizeof(header_size) // header size
+        + sizeof(version)     // encoding version
+        + sizeof(uint16_t)    // key size
+        + keySizeZeroTerminated // key, zero terminated
+        + sizeof(int32_t)     // pid
+        + sizeof(int32_t)     // uid
+        + sizeof(int64_t)     // timestamp
+        ;
+
+    uint32_t size = header_size
+        + sizeof(uint32_t) // # properties
+        ;
+    for (auto &prop : *this) {
+        const size_t propSize = prop.getByteStringSize();
+        if (propSize > UINT16_MAX) {
+            ALOGW("%s: prop %s size %zu too large", __func__, prop.getName(), propSize);
+            return INVALID_OPERATION;
+        }
+        if (__builtin_add_overflow(size, propSize, &size)) {
+            ALOGW("%s: item size overflow at property %s", __func__, prop.getName());
+            return INVALID_OPERATION;
+        }
+    }
+
+    // since we fill every byte in the buffer (there is no padding),
+    // malloc is used here instead of calloc.
+    char * const build = (char *)malloc(size);
+    if (build == nullptr) return NO_MEMORY;
+
+    // we write in host byte-order; we think this is always little-endian
+    // for the interesting devices (arm-based android, x86-based android).
+    // we know the reader is running on the same host, so we expect the same
+    // byte order on the consumption side.
+
+    char *filling = build;
+    char *buildmax = build + size;
+    if (insert((uint32_t)size, &filling, buildmax) != NO_ERROR
+            || insert(header_size, &filling, buildmax) != NO_ERROR
+            || insert(version, &filling, buildmax) != NO_ERROR
+            || insert((uint16_t)keySizeZeroTerminated, &filling, buildmax) != NO_ERROR
+            || insert(mKey.c_str(), &filling, buildmax) != NO_ERROR
+            || insert((int32_t)mPid, &filling, buildmax) != NO_ERROR
+            || insert((int32_t)mUid, &filling, buildmax) != NO_ERROR
+            || insert((int64_t)mTimestamp, &filling, buildmax) != NO_ERROR
+            || insert((uint32_t)mProps.size(), &filling, buildmax) != NO_ERROR) {
+        ALOGE("%s:could not write header", __func__);  // shouldn't happen
+        free(build);
+        return INVALID_OPERATION;
+    }
+    for (auto &prop : *this) {
+        if (prop.writeToByteString(&filling, buildmax) != NO_ERROR) {
+            free(build);
+            // shouldn't happen
+            ALOGE("%s:could not write prop %s", __func__, prop.getName());
+            return INVALID_OPERATION;
+        }
+    }
+
+    if (filling != buildmax) {
+        ALOGE("%s: problems populating; wrote=%d planned=%d",
+                __func__, (int)(filling - build), (int)size);
+        free(build);
+        return INVALID_OPERATION;
+    }
+    *pbuffer = build;
+    *plength = size;
+    return NO_ERROR;
+}
+
+status_t mediametrics::Item::readFromByteString(const char *bufferptr, size_t length)
+{
+    if (bufferptr == nullptr) return BAD_VALUE;
+
+    // we read assuming host byte-order; we think this is always little-endian
+    // for the interesting devices (arm-based android, x86-based android).
+    // we know the writer is running on the same host,
+    // and therefore should have this same byte order.
+
+    const char *read = bufferptr;
+    const char *readend = bufferptr + length;
+
+    uint32_t size;
+    uint32_t header_size;
+    uint16_t version;
+    uint16_t key_size;
+    std::string key;
+    int32_t pid;
+    int32_t uid;
+    int64_t timestamp;
+    uint32_t propCount;
+    if (extract(&size, &read, readend) != NO_ERROR
+            || extract(&header_size, &read, readend) != NO_ERROR
+            || extract(&version, &read, readend) != NO_ERROR
+            || extract(&key_size, &read, readend) != NO_ERROR
+            || extract(&key, &read, readend) != NO_ERROR
+            || extract(&pid, &read, readend) != NO_ERROR
+            || extract(&uid, &read, readend) != NO_ERROR
+            || extract(&timestamp, &read, readend) != NO_ERROR
+            || size > length
+            || key.size() + 1 != key_size
+            || header_size > size) {
+        ALOGW("%s: invalid header", __func__);
+        return INVALID_OPERATION;
+    }
+    mKey = std::move(key);
+    const size_t pos = read - bufferptr;
+    if (pos > header_size) {
+        ALOGW("%s: invalid header pos:%zu > header_size:%u",
+                __func__, pos, header_size);
+        return INVALID_OPERATION;
+    } else if (pos < header_size) {
+        ALOGW("%s: mismatched header pos:%zu < header_size:%u, advancing",
+                __func__, pos, header_size);
+        read += (header_size - pos);
+    }
+    if (extract(&propCount, &read, readend) != NO_ERROR) {
+        ALOGD("%s: cannot read prop count", __func__);
+        return INVALID_OPERATION;
+    }
+    mPid = pid;
+    mUid = uid;
+    mTimestamp = timestamp;
+    for (size_t i = 0; i < propCount; ++i) {
+        Prop prop;
+        if (prop.readFromByteString(&read, readend) != NO_ERROR) {
+            ALOGW("%s: cannot read prop %zu", __func__, i);
+            return INVALID_OPERATION;
+        }
+        mProps[prop.getName()] = std::move(prop);
+    }
+    return NO_ERROR;
+}
+
+status_t mediametrics::Item::Prop::readFromByteString(
+        const char **bufferpptr, const char *bufferptrmax)
+{
+    uint16_t len;
+    std::string name;
+    uint8_t type;
+    status_t status = extract(&len, bufferpptr, bufferptrmax)
+            ?: extract(&type, bufferpptr, bufferptrmax)
+            ?: extract(&name, bufferpptr, bufferptrmax);
+    if (status != NO_ERROR) return status;
+    switch (type) {
+    case mediametrics::kTypeInt32: {
+        int32_t value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeInt64: {
+        int64_t value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeDouble: {
+        double value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeRate: {
+        std::pair<int64_t, int64_t> value;
+        status = extract(&value.first, bufferpptr, bufferptrmax)
+                ?: extract(&value.second, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeCString: {
+        std::string value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = std::move(value);
+    } break;
+    case mediametrics::kTypeNone: {
+        mElem = std::monostate{};
+    } break;
+    default:
+        ALOGE("%s: found bad prop type: %d, name %s",
+                __func__, (int)type, mName.c_str());  // no payload sent
+        return BAD_VALUE;
+    }
+    mName = name;
+    return NO_ERROR;
+}
+
+} // namespace android::mediametrics
diff --git a/media/libmediametrics/ItemSerialize-Parcel.cpp b/media/libmediametrics/ItemSerialize-Parcel.cpp
new file mode 100644
index 0000000..c467311
--- /dev/null
+++ b/media/libmediametrics/ItemSerialize-Parcel.cpp
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "mediametrics::Item-Serialization"
+
+#include <inttypes.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/endian.h>
+#include <sys/types.h>
+
+#include <binder/Parcel.h>
+#include <cutils/multiuser.h>
+#include <cutils/properties.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/threads.h>
+
+#include <android/media/BnMediaMetricsService.h> // for direct Binder access
+#include <android/media/IMediaMetricsService.h>
+#include <binder/IServiceManager.h>
+#include <media/MediaMetricsItem.h>
+#include <private/android_filesystem_config.h>
+
+// Max per-property string size before truncation in toString().
+// Do not make too large, as this is used for dumpsys purposes.
+static constexpr size_t kMaxPropertyStringSize = 4096;
+
+namespace android::mediametrics {
+
+// Parcel / serialize things for binder calls
+//
+
+status_t mediametrics::Item::readFromParcel(const Parcel& data) {
+    int32_t version;
+    status_t status = data.readInt32(&version);
+    if (status != NO_ERROR) return status;
+
+    switch (version) {
+    case 0:
+      return readFromParcel0(data);
+    default:
+      ALOGE("%s: unsupported parcel version: %d", __func__, version);
+      return INVALID_OPERATION;
+    }
+}
+
+status_t mediametrics::Item::readFromParcel0(const Parcel& data) {
+    mKey = std::string{data.readString8()};
+    int32_t pid, uid;
+    status_t status = data.readInt32(&pid) ?: data.readInt32(&uid);
+    if (status != NO_ERROR) return status;
+    mPid = (pid_t)pid;
+    mUid = (uid_t)uid;
+    mPkgName = std::string(data.readString8());
+    int32_t count;
+    int64_t version, timestamp;
+    status = data.readInt64(&version) ?: data.readInt64(&timestamp) ?: data.readInt32(&count);
+    if (status != NO_ERROR) return status;
+    if (count < 0) return BAD_VALUE;
+    mPkgVersionCode = version;
+    mTimestamp = timestamp;
+    for (int i = 0; i < count; i++) {
+        Prop prop;
+        status_t status = prop.readFromParcel(data);
+        if (status != NO_ERROR) return status;
+        mProps[prop.getName()] = std::move(prop);
+    }
+    return NO_ERROR;
+}
+
+status_t mediametrics::Item::writeToParcel(Parcel *data) const {
+    if (data == nullptr) return BAD_VALUE;
+
+    const int32_t version = 0;
+    status_t status = data->writeInt32(version);
+    if (status != NO_ERROR) return status;
+
+    switch (version) {
+    case 0:
+      return writeToParcel0(data);
+    default:
+      ALOGE("%s: unsupported parcel version: %d", __func__, version);
+      return INVALID_OPERATION;
+    }
+}
+
+status_t mediametrics::Item::writeToParcel0(Parcel *data) const {
+    status_t status =
+        data->writeString8(String8{mKey})
+        ?: data->writeInt32(mPid)
+        ?: data->writeInt32(mUid)
+        ?: data->writeString8(String8{mPkgName})
+        ?: data->writeInt64(mPkgVersionCode)
+        ?: data->writeInt64(mTimestamp);
+    if (status != NO_ERROR) return status;
+
+    data->writeInt32((int32_t)mProps.size());
+    for (auto &prop : *this) {
+        status = prop.writeToParcel(data);
+        if (status != NO_ERROR) return status;
+    }
+    return NO_ERROR;
+}
+
+status_t mediametrics::Item::Prop::readFromParcel(const Parcel& data)
+{
+    const std::string key {data.readString8()};
+    int32_t type;
+    status_t status = data.readInt32(&type);
+    if (status != NO_ERROR) return status;
+    switch (type) {
+    case mediametrics::kTypeInt32: {
+        int32_t value;
+        status = data.readInt32(&value);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeInt64: {
+        int64_t value;
+        status = data.readInt64(&value);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeDouble: {
+        double value;
+        status = data.readDouble(&value);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeCString: {
+        mElem = std::string{data.readString8()};
+    } break;
+    case mediametrics::kTypeRate: {
+        std::pair<int64_t, int64_t> rate;
+        status = data.readInt64(&rate.first)
+                ?: data.readInt64(&rate.second);
+        if (status != NO_ERROR) return status;
+        mElem = rate;
+    } break;
+    case mediametrics::kTypeNone: {
+        mElem = std::monostate{};
+    } break;
+    default:
+        ALOGE("%s: reading bad item type: %d", __func__, type);
+        return BAD_VALUE;
+    }
+    setName(key);
+    return NO_ERROR;
+}
+
+} // namespace android::mediametrics
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 2c58461..a2792d3 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -163,83 +163,6 @@
     return zapped;
 }
 
-// Parcel / serialize things for binder calls
-//
-
-status_t mediametrics::Item::readFromParcel(const Parcel& data) {
-    int32_t version;
-    status_t status = data.readInt32(&version);
-    if (status != NO_ERROR) return status;
-
-    switch (version) {
-    case 0:
-      return readFromParcel0(data);
-    default:
-      ALOGE("%s: unsupported parcel version: %d", __func__, version);
-      return INVALID_OPERATION;
-    }
-}
-
-status_t mediametrics::Item::readFromParcel0(const Parcel& data) {
-    const char *s = data.readCString();
-    mKey = s == nullptr ? "" : s;
-    int32_t pid, uid;
-    status_t status = data.readInt32(&pid) ?: data.readInt32(&uid);
-    if (status != NO_ERROR) return status;
-    mPid = (pid_t)pid;
-    mUid = (uid_t)uid;
-    s = data.readCString();
-    mPkgName = s == nullptr ? "" : s;
-    int32_t count;
-    int64_t version, timestamp;
-    status = data.readInt64(&version) ?: data.readInt64(&timestamp) ?: data.readInt32(&count);
-    if (status != NO_ERROR) return status;
-    if (count < 0) return BAD_VALUE;
-    mPkgVersionCode = version;
-    mTimestamp = timestamp;
-    for (int i = 0; i < count; i++) {
-        Prop prop;
-        status_t status = prop.readFromParcel(data);
-        if (status != NO_ERROR) return status;
-        mProps[prop.getName()] = std::move(prop);
-    }
-    return NO_ERROR;
-}
-
-status_t mediametrics::Item::writeToParcel(Parcel *data) const {
-    if (data == nullptr) return BAD_VALUE;
-
-    const int32_t version = 0;
-    status_t status = data->writeInt32(version);
-    if (status != NO_ERROR) return status;
-
-    switch (version) {
-    case 0:
-      return writeToParcel0(data);
-    default:
-      ALOGE("%s: unsupported parcel version: %d", __func__, version);
-      return INVALID_OPERATION;
-    }
-}
-
-status_t mediametrics::Item::writeToParcel0(Parcel *data) const {
-    status_t status =
-        data->writeCString(mKey.c_str())
-        ?: data->writeInt32(mPid)
-        ?: data->writeInt32(mUid)
-        ?: data->writeCString(mPkgName.c_str())
-        ?: data->writeInt64(mPkgVersionCode)
-        ?: data->writeInt64(mTimestamp);
-    if (status != NO_ERROR) return status;
-
-    data->writeInt32((int32_t)mProps.size());
-    for (auto &prop : *this) {
-        status = prop.writeToParcel(data);
-        if (status != NO_ERROR) return status;
-    }
-    return NO_ERROR;
-}
-
 const char *mediametrics::Item::toCString() {
     std::string val = toString();
     return strdup(val.c_str());
@@ -370,11 +293,51 @@
 };
 
 static sp<MediaMetricsDeathNotifier> sNotifier;
-// static
-sp<media::IMediaMetricsService> BaseItem::sMediaMetricsService;
+static sp<media::IMediaMetricsService> sMediaMetricsService;
 static std::mutex sServiceMutex;
 static int sRemainingBindAttempts = SVC_TRIES;
 
+// moving this out of the class removes all service references from <MediaMetricsItem.h>
+// and simplifies moving things to a module
+static
+sp<media::IMediaMetricsService> getService() {
+    static const char *servicename = "media.metrics";
+    static const bool enabled = BaseItem::isEnabled(); // singleton initialized
+
+    if (enabled == false) {
+        ALOGD_IF(DEBUG_SERVICEACCESS, "disabled");
+        return nullptr;
+    }
+    std::lock_guard _l(sServiceMutex);
+    // think of remainingBindAttempts as telling us whether service == nullptr because
+    // (1) we haven't tried to initialize it yet
+    // (2) we've tried to initialize it, but failed.
+    if (sMediaMetricsService == nullptr && sRemainingBindAttempts > 0) {
+        const char *badness = "";
+        sp<IServiceManager> sm = defaultServiceManager();
+        if (sm != nullptr) {
+            sp<IBinder> binder = sm->getService(String16(servicename));
+            if (binder != nullptr) {
+                sMediaMetricsService = interface_cast<media::IMediaMetricsService>(binder);
+                sNotifier = new MediaMetricsDeathNotifier();
+                binder->linkToDeath(sNotifier);
+            } else {
+                badness = "did not find service";
+            }
+        } else {
+            badness = "No Service Manager access";
+        }
+        if (sMediaMetricsService == nullptr) {
+            if (sRemainingBindAttempts > 0) {
+                sRemainingBindAttempts--;
+            }
+            ALOGD_IF(DEBUG_SERVICEACCESS, "%s: unable to bind to service %s: %s",
+                    __func__, servicename, badness);
+        }
+    }
+    return sMediaMetricsService;
+}
+
 // static
 void BaseItem::dropInstance() {
     std::lock_guard  _l(sServiceMutex);
@@ -442,284 +405,4 @@
     return status;
 }
 
-//static
-sp<media::IMediaMetricsService> BaseItem::getService() {
-    static const char *servicename = "media.metrics";
-    static const bool enabled = isEnabled(); // singleton initialized
-
-    if (enabled == false) {
-        ALOGD_IF(DEBUG_SERVICEACCESS, "disabled");
-        return nullptr;
-    }
-    std::lock_guard _l(sServiceMutex);
-    // think of remainingBindAttempts as telling us whether service == nullptr because
-    // (1) we haven't tried to initialize it yet
-    // (2) we've tried to initialize it, but failed.
-    if (sMediaMetricsService == nullptr && sRemainingBindAttempts > 0) {
-        const char *badness = "";
-        sp<IServiceManager> sm = defaultServiceManager();
-        if (sm != nullptr) {
-            sp<IBinder> binder = sm->getService(String16(servicename));
-            if (binder != nullptr) {
-                sMediaMetricsService = interface_cast<media::IMediaMetricsService>(binder);
-                sNotifier = new MediaMetricsDeathNotifier();
-                binder->linkToDeath(sNotifier);
-            } else {
-                badness = "did not find service";
-            }
-        } else {
-            badness = "No Service Manager access";
-        }
-        if (sMediaMetricsService == nullptr) {
-            if (sRemainingBindAttempts > 0) {
-                sRemainingBindAttempts--;
-            }
-            ALOGD_IF(DEBUG_SERVICEACCESS, "%s: unable to bind to service %s: %s",
-                    __func__, servicename, badness);
-        }
-    }
-    return sMediaMetricsService;
-}
-
-
-status_t mediametrics::Item::writeToByteString(char **pbuffer, size_t *plength) const
-{
-    if (pbuffer == nullptr || plength == nullptr)
-        return BAD_VALUE;
-
-    // get size
-    const size_t keySizeZeroTerminated = strlen(mKey.c_str()) + 1;
-    if (keySizeZeroTerminated > UINT16_MAX) {
-        ALOGW("%s: key size %zu too large", __func__, keySizeZeroTerminated);
-        return INVALID_OPERATION;
-    }
-    const uint16_t version = 0;
-    const uint32_t header_size =
-        sizeof(uint32_t)      // total size
-        + sizeof(header_size) // header size
-        + sizeof(version)     // encoding version
-        + sizeof(uint16_t)    // key size
-        + keySizeZeroTerminated // key, zero terminated
-        + sizeof(int32_t)     // pid
-        + sizeof(int32_t)     // uid
-        + sizeof(int64_t)     // timestamp
-        ;
-
-    uint32_t size = header_size
-        + sizeof(uint32_t) // # properties
-        ;
-    for (auto &prop : *this) {
-        const size_t propSize = prop.getByteStringSize();
-        if (propSize > UINT16_MAX) {
-            ALOGW("%s: prop %s size %zu too large", __func__, prop.getName(), propSize);
-            return INVALID_OPERATION;
-        }
-        if (__builtin_add_overflow(size, propSize, &size)) {
-            ALOGW("%s: item size overflow at property %s", __func__, prop.getName());
-            return INVALID_OPERATION;
-        }
-    }
-
-    // since we fill every byte in the buffer (there is no padding),
-    // malloc is used here instead of calloc.
-    char * const build = (char *)malloc(size);
-    if (build == nullptr) return NO_MEMORY;
-
-    char *filling = build;
-    char *buildmax = build + size;
-    if (insert((uint32_t)size, &filling, buildmax) != NO_ERROR
-            || insert(header_size, &filling, buildmax) != NO_ERROR
-            || insert(version, &filling, buildmax) != NO_ERROR
-            || insert((uint16_t)keySizeZeroTerminated, &filling, buildmax) != NO_ERROR
-            || insert(mKey.c_str(), &filling, buildmax) != NO_ERROR
-            || insert((int32_t)mPid, &filling, buildmax) != NO_ERROR
-            || insert((int32_t)mUid, &filling, buildmax) != NO_ERROR
-            || insert((int64_t)mTimestamp, &filling, buildmax) != NO_ERROR
-            || insert((uint32_t)mProps.size(), &filling, buildmax) != NO_ERROR) {
-        ALOGE("%s:could not write header", __func__);  // shouldn't happen
-        free(build);
-        return INVALID_OPERATION;
-    }
-    for (auto &prop : *this) {
-        if (prop.writeToByteString(&filling, buildmax) != NO_ERROR) {
-            free(build);
-            // shouldn't happen
-            ALOGE("%s:could not write prop %s", __func__, prop.getName());
-            return INVALID_OPERATION;
-        }
-    }
-
-    if (filling != buildmax) {
-        ALOGE("%s: problems populating; wrote=%d planned=%d",
-                __func__, (int)(filling - build), (int)size);
-        free(build);
-        return INVALID_OPERATION;
-    }
-    *pbuffer = build;
-    *plength = size;
-    return NO_ERROR;
-}
-
-status_t mediametrics::Item::readFromByteString(const char *bufferptr, size_t length)
-{
-    if (bufferptr == nullptr) return BAD_VALUE;
-
-    const char *read = bufferptr;
-    const char *readend = bufferptr + length;
-
-    uint32_t size;
-    uint32_t header_size;
-    uint16_t version;
-    uint16_t key_size;
-    std::string key;
-    int32_t pid;
-    int32_t uid;
-    int64_t timestamp;
-    uint32_t propCount;
-    if (extract(&size, &read, readend) != NO_ERROR
-            || extract(&header_size, &read, readend) != NO_ERROR
-            || extract(&version, &read, readend) != NO_ERROR
-            || extract(&key_size, &read, readend) != NO_ERROR
-            || extract(&key, &read, readend) != NO_ERROR
-            || extract(&pid, &read, readend) != NO_ERROR
-            || extract(&uid, &read, readend) != NO_ERROR
-            || extract(&timestamp, &read, readend) != NO_ERROR
-            || size > length
-            || key.size() + 1 != key_size
-            || header_size > size) {
-        ALOGW("%s: invalid header", __func__);
-        return INVALID_OPERATION;
-    }
-    mKey = std::move(key);
-    const size_t pos = read - bufferptr;
-    if (pos > header_size) {
-        ALOGW("%s: invalid header pos:%zu > header_size:%u",
-                __func__, pos, header_size);
-        return INVALID_OPERATION;
-    } else if (pos < header_size) {
-        ALOGW("%s: mismatched header pos:%zu < header_size:%u, advancing",
-                __func__, pos, header_size);
-        read += (header_size - pos);
-    }
-    if (extract(&propCount, &read, readend) != NO_ERROR) {
-        ALOGD("%s: cannot read prop count", __func__);
-        return INVALID_OPERATION;
-    }
-    mPid = pid;
-    mUid = uid;
-    mTimestamp = timestamp;
-    for (size_t i = 0; i < propCount; ++i) {
-        Prop prop;
-        if (prop.readFromByteString(&read, readend) != NO_ERROR) {
-            ALOGW("%s: cannot read prop %zu", __func__, i);
-            return INVALID_OPERATION;
-        }
-        mProps[prop.getName()] = std::move(prop);
-    }
-    return NO_ERROR;
-}
-
-status_t mediametrics::Item::Prop::readFromParcel(const Parcel& data)
-{
-    const char *key = data.readCString();
-    if (key == nullptr) return BAD_VALUE;
-    int32_t type;
-    status_t status = data.readInt32(&type);
-    if (status != NO_ERROR) return status;
-    switch (type) {
-    case mediametrics::kTypeInt32: {
-        int32_t value;
-        status = data.readInt32(&value);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeInt64: {
-        int64_t value;
-        status = data.readInt64(&value);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeDouble: {
-        double value;
-        status = data.readDouble(&value);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeCString: {
-        const char *s = data.readCString();
-        if (s == nullptr) return BAD_VALUE;
-        mElem = s;
-    } break;
-    case mediametrics::kTypeRate: {
-        std::pair<int64_t, int64_t> rate;
-        status = data.readInt64(&rate.first)
-                ?: data.readInt64(&rate.second);
-        if (status != NO_ERROR) return status;
-        mElem = rate;
-    } break;
-    case mediametrics::kTypeNone: {
-        mElem = std::monostate{};
-    } break;
-    default:
-        ALOGE("%s: reading bad item type: %d", __func__, type);
-        return BAD_VALUE;
-    }
-    setName(key);
-    return NO_ERROR;
-}
-
-status_t mediametrics::Item::Prop::readFromByteString(
-        const char **bufferpptr, const char *bufferptrmax)
-{
-    uint16_t len;
-    std::string name;
-    uint8_t type;
-    status_t status = extract(&len, bufferpptr, bufferptrmax)
-            ?: extract(&type, bufferpptr, bufferptrmax)
-            ?: extract(&name, bufferpptr, bufferptrmax);
-    if (status != NO_ERROR) return status;
-    switch (type) {
-    case mediametrics::kTypeInt32: {
-        int32_t value;
-        status = extract(&value, bufferpptr, bufferptrmax);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeInt64: {
-        int64_t value;
-        status = extract(&value, bufferpptr, bufferptrmax);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeDouble: {
-        double value;
-        status = extract(&value, bufferpptr, bufferptrmax);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeRate: {
-        std::pair<int64_t, int64_t> value;
-        status = extract(&value.first, bufferpptr, bufferptrmax)
-                ?: extract(&value.second, bufferpptr, bufferptrmax);
-        if (status != NO_ERROR) return status;
-        mElem = value;
-    } break;
-    case mediametrics::kTypeCString: {
-        std::string value;
-        status = extract(&value, bufferpptr, bufferptrmax);
-        if (status != NO_ERROR) return status;
-        mElem = std::move(value);
-    } break;
-    case mediametrics::kTypeNone: {
-        mElem = std::monostate{};
-    } break;
-    default:
-        ALOGE("%s: found bad prop type: %d, name %s",
-                __func__, (int)type, mName.c_str());  // no payload sent
-        return BAD_VALUE;
-    }
-    mName = name;
-    return NO_ERROR;
-}
-
 } // namespace android::mediametrics
diff --git a/media/libmediametrics/include/media/MediaMetricsItem.h b/media/libmediametrics/include/media/MediaMetricsItem.h
index 03834d4..d914c31 100644
--- a/media/libmediametrics/include/media/MediaMetricsItem.h
+++ b/media/libmediametrics/include/media/MediaMetricsItem.h
@@ -33,8 +33,6 @@
 
 namespace android {
 
-namespace media { class IMediaMetricsService; }
-
 class Parcel;
 
 /*
@@ -271,8 +269,6 @@
 public:
     // are we collecting metrics data
     static bool isEnabled();
-    // returns the MediaMetrics service if active.
-    static sp<media::IMediaMetricsService> getService();
     // submits a raw buffer directly to the MediaMetrics service - this is highly optimized.
     static status_t submitBuffer(const char *buffer, size_t len);
 
@@ -281,9 +277,6 @@
     static constexpr const char * const EnabledPropertyPersist = "persist.media.metrics.enabled";
     static const int EnabledProperty_default = 1;
 
-    // let's reuse a binder connection
-    static sp<media::IMediaMetricsService> sMediaMetricsService;
-
     static void dropInstance();
 
     template <typename T>
@@ -440,48 +433,42 @@
     }
 
     template <typename T>
-    static status_t writeToParcel(
-            const char *name, const T& value, Parcel *parcel) = delete;
+    static status_t writeToParcel(std::string_view name, const T& value, Parcel *parcel) = delete;
     template <> // static
-    status_t writeToParcel(
-            const char *name, const int32_t& value, Parcel *parcel) {
-        return parcel->writeCString(name)
+    status_t writeToParcel(std::string_view name, const int32_t& value, Parcel *parcel) {
+        return parcel->writeString8(name.data(), name.length())
                ?: parcel->writeInt32(get_type_of<int32_t>::value)
                ?: parcel->writeInt32(value);
     }
     template <> // static
-    status_t writeToParcel(
-            const char *name, const int64_t& value, Parcel *parcel) {
-        return parcel->writeCString(name)
+    status_t writeToParcel(std::string_view name, const int64_t& value, Parcel *parcel) {
+        return parcel->writeString8(name.data(), name.length())
                ?: parcel->writeInt32(get_type_of<int64_t>::value)
                ?: parcel->writeInt64(value);
     }
     template <> // static
-    status_t writeToParcel(
-            const char *name, const double& value, Parcel *parcel) {
-        return parcel->writeCString(name)
+    status_t writeToParcel(std::string_view name, const double& value, Parcel *parcel) {
+        return parcel->writeString8(name.data(), name.length())
                ?: parcel->writeInt32(get_type_of<double>::value)
                ?: parcel->writeDouble(value);
     }
-    template <> // static
-    status_t writeToParcel(
-            const char *name, const std::pair<int64_t, int64_t>& value, Parcel *parcel) {
-        return parcel->writeCString(name)
+    template <>  // static
+    status_t writeToParcel(std::string_view name, const std::pair<int64_t, int64_t>& value,
+                           Parcel* parcel) {
+        return parcel->writeString8(name.data(), name.length())
                ?: parcel->writeInt32(get_type_of< std::pair<int64_t, int64_t>>::value)
                ?: parcel->writeInt64(value.first)
                ?: parcel->writeInt64(value.second);
     }
     template <> // static
-    status_t writeToParcel(
-            const char *name, const std::string& value, Parcel *parcel) {
-        return parcel->writeCString(name)
+    status_t writeToParcel(std::string_view name, const std::string& value, Parcel *parcel) {
+        return parcel->writeString8(name.data(), name.length())
                ?: parcel->writeInt32(get_type_of<std::string>::value)
-               ?: parcel->writeCString(value.c_str());
+               ?: parcel->writeString8(value.data(), value.length());
     }
     template <> // static
-    status_t writeToParcel(
-            const char *name, const std::monostate&, Parcel *parcel) {
-        return parcel->writeCString(name)
+    status_t writeToParcel(std::string_view name, const std::monostate&, Parcel *parcel) {
+        return parcel->writeString8(name.data(), name.length())
                ?: parcel->writeInt32(get_type_of<std::monostate>::value);
     }
 
@@ -809,6 +796,10 @@
             mName = name;
         }
 
+        void setName(std::string name) {
+            mName = std::move(name);
+        }
+
         bool isNamed(const char *name) const {
             return mName == name;
         }
@@ -855,7 +846,7 @@
 
         status_t writeToParcel(Parcel *parcel) const {
             return std::visit([this, parcel](auto &value) {
-                    return BaseItem::writeToParcel(mName.c_str(), value, parcel);}, mElem);
+                    return BaseItem::writeToParcel(mName, value, parcel);}, mElem);
         }
 
         void toStringBuffer(char *buffer, size_t length) const {
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index ee37b71..b1d293f 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -361,10 +361,12 @@
 
                 snprintf(buffer, SIZE - 1, "    owner: \"%s\"\n", info->getOwnerName());
                 result.append(buffer);
+                snprintf(buffer, SIZE - 1, "    hal name: \"%s\"\n", info->getHalName());
+                result.append(buffer);
                 snprintf(buffer, SIZE - 1, "    rank: %u\n", info->getRank());
                 result.append(buffer);
             } else {
-                result.append("    aliases, attributes, owner, rank: see above\n");
+                result.append("    aliases, attributes, owner, hal name, rank: see above\n");
             }
 
             {
@@ -392,7 +394,9 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)
                             ? asString_AV1Profile(pl.mProfile) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION)
-                            ? asString_DolbyVisionProfile(pl.mProfile) : "??";
+                            ? asString_DolbyVisionProfile(pl.mProfile) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AC4)
+                            ? asString_AC4Profile(pl.mProfile) : "??";
                     const char *niceLevel =
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2)
                             ? asString_MPEG2Level(pl.mLevel) :
@@ -411,7 +415,9 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)
                             ? asString_AV1Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION)
-                            ? asString_DolbyVisionLevel(pl.mLevel) : "??";
+                            ? asString_DolbyVisionLevel(pl.mLevel) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AC4)
+                            ? asString_AC4Level(pl.mLevel) : "??";
 
                     list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
                             pl.mProfile, pl.mLevel, niceProfile, niceLevel));
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index fa42da2..b93f226 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1569,6 +1569,9 @@
 
     if (mAudioSource != AUDIO_SOURCE_CNT) {
         source = createAudioSource();
+        if (source == NULL) {
+            return UNKNOWN_ERROR;
+        }
         mAudioEncoderSource = source;
     } else {
         setDefaultVideoEncoderIfNecessary();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index bd43fe2..451cc48 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2077,26 +2077,34 @@
         return;
     }
 
-    int32_t displayWidth, displayHeight;
+    int32_t displayWidth = 0, displayHeight = 0;
     if (outputFormat != NULL) {
         int32_t width, height;
-        CHECK(outputFormat->findInt32("width", &width));
-        CHECK(outputFormat->findInt32("height", &height));
+        if (!outputFormat->findInt32("width", &width)
+                || !outputFormat->findInt32("height", &height)) {
+            ALOGW("Video output format missing dimension: %s",
+                    outputFormat->debugString().c_str());
+            notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+            return;
+        }
 
         int32_t cropLeft, cropTop, cropRight, cropBottom;
-        CHECK(outputFormat->findRect(
-                    "crop",
-                    &cropLeft, &cropTop, &cropRight, &cropBottom));
-
-        displayWidth = cropRight - cropLeft + 1;
-        displayHeight = cropBottom - cropTop + 1;
+        if (outputFormat->findRect(
+                "crop",
+                &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+            displayWidth = cropRight - cropLeft + 1;
+            displayHeight = cropBottom - cropTop + 1;
+        } else {
+            displayWidth = width;
+            displayHeight = height;
+        }
 
         ALOGV("Video output format changed to %d x %d "
-             "(crop: %d x %d @ (%d, %d))",
-             width, height,
-             displayWidth,
-             displayHeight,
-             cropLeft, cropTop);
+                "(crop: %d x %d @ (%d, %d))",
+                width, height,
+                displayWidth,
+                displayHeight,
+                cropLeft, cropTop);
     } else {
         if (!inputFormat->findInt32("width", &displayWidth)
             || !inputFormat->findInt32("height", &displayHeight)) {
@@ -2144,6 +2152,11 @@
         displayHeight = tmp;
     }
 
+    if (displayWidth <= 0 || displayHeight <= 0) {
+        ALOGE("video size is corrupted or bad, reset it to 0");
+        displayWidth = displayHeight = 0;
+    }
+
     notifyListener(
             MEDIA_SET_VIDEO_SIZE,
             displayWidth,
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 3987a67..1c5aaba 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -214,6 +214,28 @@
                     break;
                 }
 
+                case MediaCodec::CB_CRYPTO_ERROR:
+                {
+                    status_t err;
+                    CHECK(msg->findInt32("err", &err));
+                    AString comment;
+                    msg->findString("errorDetail", &comment);
+                    ALOGE("Decoder (%s) reported crypto error : 0x%x (%s)",
+                            mIsAudio ? "audio" : "video", err, comment.c_str());
+
+                    handleError(err);
+                    break;
+                }
+
+                case MediaCodec::CB_REQUIRED_RESOURCES_CHANGED:
+                case MediaCodec::CB_METRICS_FLUSHED:
+                {
+                    // Nothing to do. Informational. Safe to ignore.
+                    break;
+                }
+
+                case MediaCodec::CB_LARGE_FRAME_OUTPUT_AVAILABLE:
+                // unexpected as we are not using large frames
                 default:
                 {
                     TRESPASS();
@@ -374,9 +396,18 @@
     }
     rememberCodecSpecificData(format);
 
-    // the following should work in configured state
-    CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
-    CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));
+    err = mCodec->getOutputFormat(&mOutputFormat);
+    if (err == OK) {
+        err = mCodec->getInputFormat(&mInputFormat);
+    }
+    if (err != OK) {
+        ALOGE("Failed to get input/output format from [%s] decoder (err=%d)",
+                mComponentName.c_str(), err);
+        mCodec->release();
+        mCodec.clear();
+        handleError(err);
+        return;
+    }
 
     {
         Mutex::Autolock autolock(mStatsLock);
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 9a33120..957e982 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -432,32 +432,33 @@
 
             // Implicitly assert on valid trackIndex here, which we ensure by
             // never removing tracks.
-            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            if (trackIndex < mTracks.size()) {
+                TrackInfo *info = &mTracks.editItemAt(trackIndex);
+                sp<AnotherPacketSource> source = info->mSource;
+                if (source != NULL) {
+                    uint32_t rtpTime;
+                    CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
 
-            sp<AnotherPacketSource> source = info->mSource;
-            if (source != NULL) {
-                uint32_t rtpTime;
-                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+                    /* AnotherPacketSource make an assertion if there is no ntp provided
+                       RTPSource should provide ntpUs all the times.
+                    if (!info->mNPTMappingValid) {
+                        // This is a live stream, we didn't receive any normal
+                        // playtime mapping. We won't map to npt time.
+                        source->queueAccessUnit(accessUnit);
+                        break;
+                    }
 
-                /* AnotherPacketSource make an assertion if there is no ntp provided
-                   RTPSource should provide ntpUs all the times.
-                if (!info->mNPTMappingValid) {
-                    // This is a live stream, we didn't receive any normal
-                    // playtime mapping. We won't map to npt time.
+                    int64_t nptUs =
+                        ((double)rtpTime - (double)info->mRTPTime)
+                            / info->mTimeScale
+                            * 1000000ll
+                            + info->mNormalPlaytimeUs;
+
+                    */
+                    accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
+
                     source->queueAccessUnit(accessUnit);
-                    break;
                 }
-
-                int64_t nptUs =
-                    ((double)rtpTime - (double)info->mRTPTime)
-                        / info->mTimeScale
-                        * 1000000ll
-                        + info->mNormalPlaytimeUs;
-
-                */
-                accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
-
-                source->queueAccessUnit(accessUnit);
             }
 
             break;
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index 486a34f..06f3295 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -18,6 +18,15 @@
     srcs: [
         "aidl/android/media/SharedFileRegion.aidl",
     ],
+    backend: {
+        ndk: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
 }
 
 cc_library {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index e06efac..bacf758 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -7084,8 +7084,10 @@
     }
     AString owner = (info->getOwnerName() == nullptr) ? "default" : info->getOwnerName();
 
-    AString componentName;
-    CHECK(msg->findString("componentName", &componentName));
+    AString componentName = info->getCodecName();
+    // we are no longer using "componentName" as we always pass the codec info for owner.
+    // CHECK(msg->findString("componentName", &componentName));
+    AString halName = info->getHalName();
 
     sp<CodecObserver> observer = new CodecObserver(notify);
     sp<IOMX> omx;
@@ -7102,11 +7104,12 @@
     pid_t tid = gettid();
     int prevPriority = androidGetThreadPriority(tid);
     androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
-    err = omx->allocateNode(componentName.c_str(), observer, &omxNode);
+    err = omx->allocateNode(halName.c_str(), observer, &omxNode);
     androidSetThreadPriority(tid, prevPriority);
 
     if (err != OK) {
-        ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err);
+        ALOGE("Unable to instantiate codec '%s' for '%s' with err %#x.",
+                halName.c_str(), componentName.c_str(), err);
 
         mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));
         return false;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index f4a4985..f7949d9 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -325,11 +325,13 @@
         "libaconfig_storage_read_api_cc",
         "aconfig_mediacodec_flags_c_lib",
         "camera_platform_flags_c_lib",
+        "media_quality_aidl_interface-cpp",
     ],
 
     static_libs: [
         "android.media.codec-aconfig-cc",
         "android.media.extractor.flags-aconfig-cc",
+        "android.media.tv.flags-aconfig-cc",
         "com.android.media.flags.editing-aconfig-cc",
         "framework-permission-aidl-cpp",
         "libmedia_ndkformatpriv",
@@ -349,6 +351,7 @@
         "libnativeloader-headers",
         "libstagefright_xmlparser_headers",
         "media_ndk_headers",
+        "media_quality_headers",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index fe1d3b6..b8e8b28 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -474,13 +474,6 @@
         ALOGE("%s: Buffer queue already exists", __FUNCTION__);
         return ALREADY_EXISTS;
     }
-#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    // Create a buffer queue.
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-#endif // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-
 
     uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
@@ -489,28 +482,16 @@
 
     bufferCount += kConsumerBufferCount;
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mVideoBufferConsumer = new BufferItemConsumer(usage, bufferCount);
+    sp<Surface> surface;
+    std::tie(mVideoBufferConsumer, surface) =
+            BufferItemConsumer::create(usage, bufferCount);
     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
-
 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-    mVideoBufferProducer = mVideoBufferConsumer->getSurface();
+    mVideoBufferProducer = surface;
 #else
-    mVideoBufferProducer = mVideoBufferConsumer->getSurface()->getIGraphicBufferProducer();
+    mVideoBufferProducer = surface->getIGraphicBufferProducer();
 #endif  // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 
-#else
-    mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
-    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
-
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-    mVideoBufferProducer = new Surface(producer);
-#else
-    mVideoBufferProducer = producer;
-#endif  // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-
     status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
     if (res != OK) {
         ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
diff --git a/media/libstagefright/CodecBase.cpp b/media/libstagefright/CodecBase.cpp
index b9fb041..c445c36 100644
--- a/media/libstagefright/CodecBase.cpp
+++ b/media/libstagefright/CodecBase.cpp
@@ -66,5 +66,8 @@
     return ERROR_UNSUPPORTED;
 }
 
+std::vector<InstanceResourceInfo> CodecBase::getRequiredSystemResources() {
+    return std::vector<InstanceResourceInfo>{};
+}
 
 } // namespace android
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
index 0fc78ec..2cf5a15 100644
--- a/media/libstagefright/CryptoAsync.cpp
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -71,6 +71,7 @@
        return -ENOSYS;
     }
     bool shouldPost = false;
+    msg->setWhat(kWhatDecrypt);
     Mutexed<std::list<sp<AMessage>>>::Locked pendingBuffers(mPendingBuffers);
     if (mState != kCryptoAsyncActive) {
        ALOGE("Cannot decrypt in errored state");
@@ -132,6 +133,9 @@
     }
     if (err != OK) {
         std::list<sp<AMessage>> errorList;
+        if (buffer->meta()->findObject("cryptoInfos", &obj)) {
+            msg->setObject("cryptoInfos", obj);
+        }
         msg->removeEntryByName("buffer");
         msg->setInt32("err", err);
         msg->setInt32("actionCode", ACTION_CODE_FATAL);
@@ -238,18 +242,16 @@
                 pendingBuffers->pop_front();
                 continue;
             }
-            nextTask = kWhatDecrypt;
+            nextTask = nextBuffer->what();
         }
         return OK;
     };
+    sp<AMessage> thisMsg;
+    uint32_t nextTask = kWhatDoNothing;
+    getCurrentAndNextTask(&thisMsg, nextTask);
     switch(msg->what()) {
         case kWhatDecrypt:
         {
-            sp<AMessage> thisMsg;
-            uint32_t nextTask = kWhatDoNothing;
-            if(OK != getCurrentAndNextTask(&thisMsg, nextTask)) {
-                return;
-            }
             if (thisMsg != nullptr) {
                 int32_t action;
                 err = OK;
@@ -277,15 +279,6 @@
                     mState = kCryptoAsyncError;
                 }
             }
-            // we won't take  next buffers if buffer caused
-            // an error. We want the caller to deal with the error first
-            // Expected behahiour is that the caller acknowledge the error
-            // with a call to stop() which clear the queues.
-            // Then move forward with processing of next set of buffers.
-            if (mState == kCryptoAsyncActive && nextTask != kWhatDoNothing) {
-                sp<AMessage> nextMsg = new AMessage(nextTask,this);
-                nextMsg->post();
-            }
             break;
         }
 
@@ -303,10 +296,13 @@
                 returnList->splice(returnList->end(), std::move(*pendingBuffers));
             }
             pendingBuffers->clear();
+            // stop() is a blocking call.
+            // this is needed as the queue is cleared now and there should
+            // not be any next task. The next buffer when queued will kick off this loop
+            nextTask = kWhatDoNothing;
             mState = kCryptoAsyncActive;
             response->setInt32("err", OK);
             response->postReply(replyID);
-
             break;
         }
 
@@ -318,6 +314,15 @@
             break;
         }
     }
+    // we won't take  next buffers if buffer caused
+    // an error. We want the caller to deal with the error first
+    // Expected behahiour is that the caller acknowledge the error
+    // with a call to stop() which clear the queues.
+    // Then move forward with processing of next set of buffers.
+    if (mState == kCryptoAsyncActive && nextTask != kWhatDoNothing) {
+        sp<AMessage> nextMsg = new AMessage(nextTask,this);
+        nextMsg->post();
+    }
 }
 
 }  // namespace android
diff --git a/media/libstagefright/FrameCaptureLayer.cpp b/media/libstagefright/FrameCaptureLayer.cpp
index 53e4d7d..67a59e3 100644
--- a/media/libstagefright/FrameCaptureLayer.cpp
+++ b/media/libstagefright/FrameCaptureLayer.cpp
@@ -142,7 +142,7 @@
     }
 
     mConsumer = consumer;
-    mSurface = new Surface(producer);
+    mSurface = sp<Surface>::make(producer);
 
     return OK;
 }
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index cc78510..ef3b3bc 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -247,14 +247,14 @@
         case FrameDecoder::kWhatCallbackNotify:
             int32_t callbackId;
             if (!msg->findInt32("callbackID", &callbackId)) {
-                ALOGE("kWhatCallbackNotify: callbackID is expected.");
+                ALOGD("kWhatCallbackNotify: callbackID is expected.");
                 break;
             }
             switch (callbackId) {
                 case MediaCodec::CB_INPUT_AVAILABLE: {
                     int32_t index;
                     if (!msg->findInt32("index", &index)) {
-                        ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+                        ALOGD("CB_INPUT_AVAILABLE: index is expected.");
                         break;
                     }
                     ALOGD("CB_INPUT_AVAILABLE received, index is %d", index);
@@ -269,7 +269,7 @@
                     int64_t timeUs;
                     CHECK(msg->findInt32("index", &index));
                     CHECK(msg->findInt64("timeUs", &timeUs));
-                    ALOGD("CB_OUTPUT_AVAILABLE received, index is %d", index);
+                    ALOGV("CB_OUTPUT_AVAILABLE received, index is %d", index);
                     sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
                     if (frameDecoder != nullptr) {
                         frameDecoder->handleOutputBufferAsync(index, timeUs);
@@ -277,10 +277,10 @@
                     break;
                 }
                 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
-                    ALOGD("CB_OUTPUT_FORMAT_CHANGED received");
+                    ALOGV("CB_OUTPUT_FORMAT_CHANGED received");
                     sp<AMessage> format;
                     if (!msg->findMessage("format", &format) || format == nullptr) {
-                        ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+                        ALOGD("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
                         break;
                     }
                     sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
@@ -294,25 +294,38 @@
                     int32_t actionCode;
                     AString detail;
                     if (!msg->findInt32("err", &err)) {
-                        ALOGE("CB_ERROR: err is expected.");
+                        ALOGD("CB_ERROR: err is expected.");
                         break;
                     }
                     if (!msg->findInt32("actionCode", &actionCode)) {
-                        ALOGE("CB_ERROR: actionCode is expected.");
+                        ALOGD("CB_ERROR: actionCode is expected.");
                         break;
                     }
                     msg->findString("detail", &detail);
-                    ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
+                    ALOGI("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
                           StrMediaError(err).c_str(), actionCode, detail.c_str());
                     break;
                 }
-                default:
-                    ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
+                case MediaCodec::CB_REQUIRED_RESOURCES_CHANGED:
+                case MediaCodec::CB_METRICS_FLUSHED:
+                {
+                    // Nothing to do. Informational. Safe to ignore.
                     break;
+                }
+
+                case MediaCodec::CB_LARGE_FRAME_OUTPUT_AVAILABLE:
+                // unexpected as we are not using large frames
+                case MediaCodec::CB_CRYPTO_ERROR:
+                // unexpected as we are not using crypto
+                default:
+                {
+                    ALOGD("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
+                    break;
+                }
             }
             break;
         default:
-            ALOGE("unexpected message received: %s", msg->debugString().c_str());
+            ALOGD("unexpected message received: %s", msg->debugString().c_str());
             break;
     }
 }
@@ -391,7 +404,8 @@
       mDstFormat(OMX_COLOR_Format16bitRGB565),
       mDstBpp(2),
       mHaveMoreInputs(true),
-      mFirstSample(true) {
+      mFirstSample(true),
+      mSourceStopped(false) {
 }
 
 FrameDecoder::~FrameDecoder() {
@@ -401,7 +415,9 @@
     }
     if (mDecoder != NULL) {
         mDecoder->release();
-        mSource->stop();
+        if (!mSourceStopped) {
+            mSource->stop();
+        }
     }
 }
 
@@ -706,8 +722,12 @@
 
     // wait for handleOutputBufferAsync() to finish
     std::unique_lock _lk(mMutex);
-    mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
-                                 [this] { return mHandleOutputBufferAsyncDone; });
+    if (!mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
+                                 [this] { return mHandleOutputBufferAsyncDone; })) {
+        ALOGE("%s timed out waiting for handleOutputBufferAsync() to complete.", __func__);
+        mSource->stop();
+        mSourceStopped = true;
+    }
     return mHandleOutputBufferAsyncDone ? OK : TIMED_OUT;
 }
 
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 0bfb3dd..a83fd8a 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -883,7 +883,7 @@
 
     // Max file size limit is set
     if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
-        size = mMaxFileSizeLimitBytes * 6 / 1000;
+        size = mMaxFileSizeLimitBytes / 1000 * 6;
     }
 
     // Max file duration limit is set
@@ -2640,6 +2640,11 @@
     }
 
     if (mProperties.empty()) {
+        // Min length of hvcC CSD is 23. (ISO/IEC 14496-15:2014 Chapter 8.4.1.1.2)
+        if (mIsHeif && mCodecSpecificDataSize < 23) {
+            ALOGE("hvcC csd size is less than 23 bytes");
+            return;
+        }
         mProperties.push_back(mOwner->addProperty_l({
             .type = static_cast<uint32_t>(mIsAvif ?
                   FOURCC('a', 'v', '1', 'C') :
@@ -5354,6 +5359,9 @@
     CHECK_GE(mCodecSpecificDataSize, 4u);
 
     mOwner->beginBox("apvC");
+    // apvC extends FullBox and hence the need to write first
+    // 4 bytes here when compared with av1C which extends Box.
+    mOwner->writeInt32(0);  // version=0, flags=0
     mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
     mOwner->endBox();  // apvC
 }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f917aa2..97a9f18 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -17,7 +17,9 @@
 //#define LOG_NDEBUG 0
 #include "hidl/HidlSupport.h"
 #define LOG_TAG "MediaCodec"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <dlfcn.h>
 #include <inttypes.h>
@@ -31,11 +33,13 @@
 #include "include/SoftwareRenderer.h"
 
 #include <android_media_codec.h>
+#include <android_media_tv_flags.h>
 
 #include <android/api-level.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
+#include <android/media/quality/IMediaQualityManager.h>
 
 #include <aidl/android/media/BnResourceManagerClient.h>
 #include <aidl/android/media/IResourceManagerService.h>
@@ -46,7 +50,10 @@
 #include <binder/IMemory.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <cutils/properties.h>
+#include <gui/BufferItem.h>
+#include <gui/BufferItemConsumer.h>
 #include <gui/BufferQueue.h>
 #include <gui/Surface.h>
 #include <hidlmemory/FrameworkUtils.h>
@@ -90,9 +97,10 @@
 
 using Status = ::ndk::ScopedAStatus;
 using aidl::android::media::BnResourceManagerClient;
+using aidl::android::media::ClientInfoParcel;
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
-using aidl::android::media::ClientInfoParcel;
+using media::quality::IMediaQualityManager;
 using server_configurable_flags::GetServerConfigurableFlag;
 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
@@ -416,7 +424,10 @@
     ~ResourceManagerServiceProxy();
     status_t init();
     void addResource(const MediaResourceParcel &resource);
+    void addResource(const std::vector<MediaResourceParcel>& resources);
+    void updateResource(const std::vector<MediaResourceParcel>& resources);
     void removeResource(const MediaResourceParcel &resource);
+    void removeResource(const std::vector<MediaResourceParcel>& resources);
     void removeClient();
     void markClientForPendingRemoval();
     bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
@@ -628,30 +639,54 @@
 
 void MediaCodec::ResourceManagerServiceProxy::addResource(
         const MediaResourceParcel &resource) {
+    std::vector<MediaResourceParcel> resources;
+    resources.push_back(resource);
+    addResource(resources);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::addResource(
+        const std::vector<MediaResourceParcel>& resources) {
     std::scoped_lock lock{mLock};
     std::shared_ptr<IResourceManagerService> service = getService_l();
     if (service == nullptr) {
         ALOGW("Service isn't available");
         return;
     }
-    std::vector<MediaResourceParcel> resources;
-    resources.push_back(resource);
     service->addResource(getClientInfo(), mClient, resources);
-    mMediaResourceParcel.emplace(resource);
+    std::copy(resources.begin(), resources.end(),
+              std::inserter(mMediaResourceParcel, mMediaResourceParcel.end()));
+}
+
+void MediaCodec::ResourceManagerServiceProxy::updateResource(
+        const std::vector<MediaResourceParcel>& resources) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
+    service->updateResource(getClientInfo(), resources);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeResource(
         const MediaResourceParcel &resource) {
+    std::vector<MediaResourceParcel> resources;
+    resources.push_back(resource);
+    removeResource(resources);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::removeResource(
+        const std::vector<MediaResourceParcel>& resources) {
     std::scoped_lock lock{mLock};
     std::shared_ptr<IResourceManagerService> service = getService_l();
     if (service == nullptr) {
         ALOGW("Service isn't available");
         return;
     }
-    std::vector<MediaResourceParcel> resources;
-    resources.push_back(resource);
     service->removeResource(getClientInfo(), resources);
-    mMediaResourceParcel.erase(resource);
+    for (const MediaResourceParcel& resource : resources) {
+        mMediaResourceParcel.erase(resource);
+    }
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
@@ -741,12 +776,48 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+class MediaCodec::ReleaseSurface {
+    public:
+        explicit ReleaseSurface(uint64_t usage) {
+            std::tie(mConsumer, mSurface) = BufferItemConsumer::create(usage);
+
+            struct FrameAvailableListener : public BufferItemConsumer::FrameAvailableListener {
+                FrameAvailableListener(const sp<BufferItemConsumer> &consumer) {
+                    mConsumer = consumer;
+                }
+                void onFrameAvailable(const BufferItem&) override {
+                    BufferItem buffer;
+                    // consume buffer
+                    sp<BufferItemConsumer> consumer = mConsumer.promote();
+                    if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
+                        consumer->releaseBuffer(buffer.mGraphicBuffer, buffer.mFence);
+                    }
+                }
+
+                wp<BufferItemConsumer> mConsumer;
+            };
+            mFrameAvailableListener = sp<FrameAvailableListener>::make(mConsumer);
+            mConsumer->setFrameAvailableListener(mFrameAvailableListener);
+            mConsumer->setName(String8{"MediaCodec.release"});
+        }
+
+        const sp<Surface> &getSurface() {
+            return mSurface;
+        }
+
+    private:
+        sp<BufferItemConsumer> mConsumer;
+        sp<Surface> mSurface;
+        sp<BufferItemConsumer::FrameAvailableListener> mFrameAvailableListener;
+    };
+#else
 class MediaCodec::ReleaseSurface {
 public:
     explicit ReleaseSurface(uint64_t usage) {
         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-        mSurface = new Surface(mProducer, false /* controlledByApp */);
-        struct ConsumerListener : public BnConsumerListener {
+        mSurface = sp<Surface>::make(mProducer, false /* controlledByApp */);
+        struct ConsumerListener : public IConsumerListener {
             ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
                 mConsumer = consumer;
             }
@@ -778,6 +849,7 @@
     sp<IGraphicBufferConsumer> mConsumer;
     sp<Surface> mSurface;
 };
+#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
 
 ////////////////////////////////////////////////////////////////////////////////
 
@@ -803,6 +875,7 @@
     kWhatFirstTunnelFrameReady = 'ftfR',
     kWhatPollForRenderedBuffers = 'plrb',
     kWhatMetricsUpdated      = 'mtru',
+    kWhatRequiredResourcesChanged = 'reqR',
 };
 
 class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
@@ -950,6 +1023,7 @@
     virtual void onOutputBuffersChanged() override;
     virtual void onFirstTunnelFrameReady() override;
     virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
+    virtual void onRequiredResourcesChanged() override;
 private:
     const sp<AMessage> mNotify;
 };
@@ -1083,6 +1157,12 @@
     notify->post();
 }
 
+void CodecCallback::onRequiredResourcesChanged() {
+    sp<AMessage> notify(mNotify->dup());
+    notify->setInt32("what", kWhatRequiredResourcesChanged);
+    notify->post();
+}
+
 static MediaResourceSubType toMediaResourceSubType(bool isHardware, MediaCodec::Domain domain) {
     switch (domain) {
     case MediaCodec::DOMAIN_VIDEO:
@@ -1191,16 +1271,225 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
+inline MediaResourceType getResourceType(const std::string& resourceName) {
+    // Extract id from the resource name ==> resource name = "componentStoreName-id"
+    std::size_t pos = resourceName.rfind("-");
+    if (pos != std::string::npos) {
+        return static_cast<MediaResourceType>(std::atoi(resourceName.substr(pos).c_str()));
+    }
+
+    ALOGE("Resource ID missing in resource Name: [%s]!", resourceName.c_str());
+    return MediaResourceType::kUnspecified;
+}
+
+/**
+ * Get the float/integer value associated with the given key.
+ *
+ * If no such key is found, it will return false without updating
+ * the value.
+ */
+static bool getValueFor(const sp<AMessage>& msg,
+                        const char* key,
+                        float* value) {
+    if (msg->findFloat(key, value)) {
+        return true;
+    }
+
+    int32_t intValue = 0;
+    if (msg->findInt32(key, &intValue)) {
+        *value = (float)intValue;
+        return true;
+    }
+
+    return false;
+}
+
+/*
+ * Use operating frame rate for per frame resource calculation as below:
+ * - Check if operating-rate is available. If so, use it.
+ * - If its encoder and if we have capture-rate, use that as frame rate.
+ * - Else, check if frame-rate is available. If so, use it.
+ * - Else, use the default value.
+ *
+ * NOTE: This function is called with format that could be:
+ *   - format used to configure the codec
+ *   - codec's input format
+ *   - codec's output format
+ *
+ * Some of the key's may not be present in either input or output format or
+ * both.
+ * For example, "capture-rate", this is currently only used in configure format.
+ *
+ * For encoders, in rare cases, we would expect "operating-rate" to be set
+ * for high-speed capture and it's only used during configuration.
+ */
+static float getOperatingFrameRate(const sp<AMessage>& format,
+                                   float defaultFrameRate,
+                                   bool isEncoder) {
+    float operatingRate = 0;
+    if (getValueFor(format, "operating-rate", &operatingRate)) {
+        // Use operating rate to convert per-frame resources into a whole.
+        return operatingRate;
+    }
+
+    float captureRate = 0;
+    if (isEncoder && getValueFor(format, "capture-rate", &captureRate)) {
+        // Use capture rate to convert per-frame resources into a whole.
+        return captureRate;
+    }
+
+    // Otherwise use frame-rate (or fallback to the default framerate passed)
+    float frameRate = defaultFrameRate;
+    getValueFor(format, "frame-rate", &frameRate);
+    return frameRate;
+}
+
+inline MediaResourceParcel getMediaResourceParcel(const InstanceResourceInfo& resourceInfo) {
+    MediaResourceParcel resource;
+    resource.type = getResourceType(resourceInfo.mName);
+    resource.value = resourceInfo.mStaticCount;
+    return resource;
+}
+
+void MediaCodec::updateResourceUsage(
+        const std::vector<InstanceResourceInfo>& oldResources,
+        const std::vector<InstanceResourceInfo>& newResources) {
+    std::vector<MediaResourceParcel> resources;
+
+    // Add all the new resources first.
+    for (const InstanceResourceInfo& resource : newResources) {
+        resources.push_back(getMediaResourceParcel(resource));
+    }
+
+    // Look for resources that aren't required anymore.
+    for (const InstanceResourceInfo& oldRes : oldResources) {
+        auto found = std::find_if(newResources.begin(),
+                                  newResources.end(),
+                                  [oldRes](const InstanceResourceInfo& newRes) {
+                                      return oldRes.mName == newRes.mName; });
+
+        // If this old resource isn't found in updated resources, that means its
+        // not required anymore.
+        // Set the count to 0, so that it will be removed from the RM.
+        if (found == newResources.end()) {
+            MediaResourceParcel res = getMediaResourceParcel(oldRes);
+            res.value = 0;
+            resources.push_back(res);
+        }
+    }
+
+    // update/notify the RM about change in resource usage.
+    if (!resources.empty()) {
+        mResourceManagerProxy->updateResource(resources);
+    }
+}
+
+bool MediaCodec::getRequiredSystemResources() {
+    bool success = false;
+    std::vector<InstanceResourceInfo> oldResources;
+    std::vector<InstanceResourceInfo> newResources;
+
+    if (android::media::codec::codec_availability() &&
+        android::media::codec::codec_availability_support()) {
+        Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(
+                mRequiredResourceInfo);
+        // Make a copy of the previous required resources, if there were any.
+        oldResources = *resourcesLocked;
+        // Get the required system resources now.
+        newResources = mCodec->getRequiredSystemResources();
+        // Update the dynamic resource usage with the current operating frame-rate.
+        newResources = computeDynamicResources(newResources);
+        *resourcesLocked = newResources;
+        success  = !newResources.empty();
+    }
+
+    // Since the required resources has been updated/changed,
+    // we should update/notify the RM with the updated usage.
+    if (!oldResources.empty()) {
+        updateResourceUsage(oldResources, newResources);
+    }
+    return success;
+}
+
+/**
+ * Convert per frame/input/output resources into static_count
+ *
+ * TODO: (girishshetty): In the future, change InstanceResourceInfo to hold:
+ * - resource type (const, per frame, per input/output)
+ * - resource count
+ */
+std::vector<InstanceResourceInfo> MediaCodec::computeDynamicResources(
+        const std::vector<InstanceResourceInfo>& inResources) {
+    std::vector<InstanceResourceInfo> dynamicResources;
+    for (const InstanceResourceInfo& resource : inResources) {
+        // If mStaticCount isn't 0, nothing to be changed because effectively this is a union.
+        if (resource.mStaticCount != 0) {
+            dynamicResources.push_back(resource);
+            continue;
+        }
+        if (resource.mPerFrameCount != 0) {
+            uint64_t staticCount = resource.mPerFrameCount * mFrameRate;
+            // We are tracking everything as static count here. So set per frame count to 0.
+            dynamicResources.emplace_back(resource.mName, staticCount, 0);
+        }
+        // TODO: (girishshetty): Add per input/output resource conversion here.
+    }
+
+    return dynamicResources;
+}
+
 //static
 status_t MediaCodec::getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources) {
     resources.clear();
     // Make sure codec availability feature is on.
-    if (!android::media::codec::codec_availability()) {
+    if (!android::media::codec::codec_availability() ||
+        !android::media::codec::codec_availability_support()) {
         return ERROR_UNSUPPORTED;
     }
-    // TODO: For now this is just an empty function.
-    // The actual implementation should use component store to query the
-    // available resources from hal, and fill in resources with the same.
+
+    // Get binder interface to resource manager.
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
+    std::shared_ptr<IResourceManagerService> service = IResourceManagerService::fromBinder(binder);
+    if (service == nullptr) {
+        ALOGE("Failed to get ResourceManagerService");
+        return INVALID_OPERATION;
+    }
+
+    // Since Codec availability is implemented only through C2 hal,
+    // we aren't tracking (accounting for) any resources for omx based codecs.
+    // But:
+    // - if c2 component store is implemented, then we can query for the system resource
+    // - if no such hal, resources will be empty and this function returns with
+    // ERROR_UNSUPPORTED
+    resources = CCodec::GetGloballyAvailableResources();
+    if (!resources.empty()) {
+        // Get the current resource usage.
+        std::vector<MediaResourceParcel> currentResourceUsage;
+        service->getMediaResourceUsageReport(&currentResourceUsage);
+
+        // Subtract the current usage from the globally available resources.
+        for (GlobalResourceInfo& res : resources) {
+            MediaResourceType type = getResourceType(res.mName);
+            auto used = std::find_if(currentResourceUsage.begin(),
+                                     currentResourceUsage.end(),
+                                     [type](const MediaResourceParcel& item) {
+                                         return item.type == type; });
+
+            if (used != currentResourceUsage.end() && used->value > 0) {
+                // Exclude the used resources.
+                if (res.mAvailable < used->value) {
+                    ALOGW("%s: Resources used (%jd) is more than the Resource Capacity (%jd)!",
+                          __func__, used->value, res.mAvailable);
+                    res.mAvailable = 0;
+                } else {
+                    res.mAvailable -= used->value;
+                }
+            }
+        }
+
+        return OK;
+    }
+
     return ERROR_UNSUPPORTED;
 }
 
@@ -1802,6 +2091,66 @@
     }
 }
 
+void MediaCodec::updatePictureProfile(const sp<AMessage>& msg, bool applyDefaultProfile) {
+    if (!(msg->contains(KEY_PICTURE_PROFILE_HANDLE) || msg->contains(KEY_PICTURE_PROFILE_ID) ||
+          applyDefaultProfile)) {
+        return;
+    }
+
+    sp<IMediaQualityManager> mediaQualityMgr =
+            waitForDeclaredService<IMediaQualityManager>(String16("media_quality"));
+    if (mediaQualityMgr == nullptr) {
+        ALOGE("Media Quality Service not found.");
+        return;
+    }
+
+    int64_t pictureProfileHandle;
+    AString pictureProfileId;
+
+    if (msg->findInt64(KEY_PICTURE_PROFILE_HANDLE, &pictureProfileHandle)) {
+        binder::Status status =
+                mediaQualityMgr->notifyPictureProfileHandleSelection(pictureProfileHandle, 0);
+        if (!status.isOk()) {
+            ALOGE("unexpected status when calling "
+                  "MediaQualityManager.notifyPictureProfileHandleSelection(): %s",
+                  status.toString8().c_str());
+        }
+        msg->setInt64(KEY_PICTURE_PROFILE_HANDLE, pictureProfileHandle);
+        return;
+    } else if (msg->findString(KEY_PICTURE_PROFILE_ID, &pictureProfileId)) {
+        binder::Status status = mediaQualityMgr->getPictureProfileHandleValue(
+                String16(pictureProfileId.c_str()), 0, &pictureProfileHandle);
+        if (status.isOk()) {
+            if (pictureProfileHandle != -1) {
+                msg->setInt64(KEY_PICTURE_PROFILE_HANDLE, pictureProfileHandle);
+            } else {
+                ALOGW("PictureProfileHandle not found for pictureProfileId %s",
+                      pictureProfileId.c_str());
+            }
+        } else {
+            ALOGE("unexpected status when calling "
+                  "MediaQualityManager.getPictureProfileHandleValue(): %s",
+                  status.toString8().c_str());
+        }
+        return;
+    } else {  // applyDefaultProfile
+        binder::Status status =
+                mediaQualityMgr->getDefaultPictureProfileHandleValue(0, &pictureProfileHandle);
+        if (status.isOk()) {
+            if (pictureProfileHandle != -1) {
+                msg->setInt64(KEY_PICTURE_PROFILE_HANDLE, pictureProfileHandle);
+            } else {
+                ALOGW("Default PictureProfileHandle not found");
+            }
+        } else {
+            ALOGE("unexpected status when calling "
+                  "MediaQualityManager.getDefaultPictureProfileHandleValue(): %s",
+                  status.toString8().c_str());
+        }
+        return;
+    }
+}
+
 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
     switch(state) {
         case TunnelPeekState::kLegacyMode:
@@ -2151,6 +2500,7 @@
 }
 
 status_t MediaCodec::init(const AString &name) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::Init#native");
     status_t err = mResourceManagerProxy->init();
     if (err != OK) {
         mErrorLog.log(LOG_TAG, base::StringPrintf(
@@ -2480,10 +2830,14 @@
         const sp<ICrypto> &crypto,
         const sp<IDescrambler> &descrambler,
         uint32_t flags) {
-
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::configure#native");
     // Update the codec importance.
     updateCodecImportance(format);
 
+    if (android::media::tv::flags::apply_picture_profiles()) {
+        updatePictureProfile(format, true /* applyDefaultProfile */);
+    }
+
     // Create and set up metrics for this codec.
     status_t err = OK;
     mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
@@ -2525,6 +2879,14 @@
 
     sp<AMessage> callback = mCallback;
 
+    if (mDomain == DOMAIN_VIDEO) {
+        // Use format to compute initial operating frame rate.
+        // After the successful configuration (and also possibly when output
+        // format change notification), this value will be recalculated.
+        bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
+        mFrameRate = getOperatingFrameRate(format, mFrameRate, isEncoder);
+    }
+
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
             toMediaResourceSubType(mIsHardware, mDomain)));
@@ -2571,7 +2933,8 @@
 status_t MediaCodec::getRequiredResources(std::vector<InstanceResourceInfo>& resources) {
     resources.clear();
     // Make sure codec availability feature is on.
-    if (!android::media::codec::codec_availability()) {
+    if (!android::media::codec::codec_availability() ||
+        !android::media::codec::codec_availability_support()) {
         return ERROR_UNSUPPORTED;
     }
     // Make sure that the codec was configured already.
@@ -2581,15 +2944,12 @@
         return INVALID_OPERATION;
     }
 
-    if (!mRequiredResourceInfo.empty()) {
-        resources = mRequiredResourceInfo;
+    Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(mRequiredResourceInfo);
+    if (!(*resourcesLocked).empty()) {
+        resources = *resourcesLocked;
         return OK;
     }
 
-    // TODO: For now this is just an empty function.
-    // The actual implementation should use component interface
-    // (for example, through mCodec->getRequiredDeviceResources) to query the
-    // the required resources for this configuration, and fill in resources with the same.
     return ERROR_UNSUPPORTED;
 }
 
@@ -3157,6 +3517,7 @@
 }
 
 status_t MediaCodec::start() {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::start#native");
     sp<AMessage> msg = new AMessage(kWhatStart, this);
 
     sp<AMessage> callback;
@@ -3214,6 +3575,7 @@
 }
 
 status_t MediaCodec::stop() {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::stop#native");
     sp<AMessage> msg = new AMessage(kWhatStop, this);
 
     sp<AMessage> response;
@@ -3262,7 +3624,7 @@
     /* When external-facing MediaCodec object is created,
        it is already initialized.  Thus, reset is essentially
        release() followed by init(), plus clearing the state */
-
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::reset#native");
     status_t err = release();
 
     // unregister handlers
@@ -3299,6 +3661,7 @@
         int64_t presentationTimeUs,
         uint32_t flags,
         AString *errorDetailMsg) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueInputBuffer#native");
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
@@ -3320,6 +3683,7 @@
         size_t size,
         const sp<BufferInfosWrapper> &infos,
         AString *errorDetailMsg) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueInputBuffers#native");
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
     uint32_t bufferFlags = 0;
     uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
@@ -3369,6 +3733,7 @@
         int64_t presentationTimeUs,
         uint32_t flags,
         AString *errorDetailMsg) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueSecureInputBuffer#native");
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
@@ -3400,6 +3765,7 @@
         const sp<BufferInfosWrapper> &auInfo,
         const sp<CryptoInfosWrapper> &cryptoInfos,
         AString *errorDetailMsg) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueSecureInputBuffers#native");
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
@@ -3451,6 +3817,7 @@
         const sp<BufferInfosWrapper> &bufferInfos,
         const sp<AMessage> &tunings,
         AString *errorDetailMsg) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueBuffer#native");
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
@@ -3486,6 +3853,7 @@
         const sp<CryptoInfosWrapper> &cryptoInfos,
         const sp<AMessage> &tunings,
         AString *errorDetailMsg) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueEncryptedBuffer#native");
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
@@ -3542,6 +3910,7 @@
         int64_t *presentationTimeUs,
         uint32_t *flags,
         int64_t timeoutUs) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::dequeueOutputBuffer#native");
     sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
     msg->setInt64("timeoutUs", timeoutUs);
 
@@ -3561,6 +3930,7 @@
 }
 
 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
+    ScopedTrace (ATRACE_TAG, "MediaCodec::renderOutputBufferAndRelease#native");
     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
     msg->setSize("index", index);
     msg->setInt32("render", true);
@@ -3570,6 +3940,7 @@
 }
 
 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::renderOutputBufferAndRelease#native");
     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
     msg->setSize("index", index);
     msg->setInt32("render", true);
@@ -3580,6 +3951,7 @@
 }
 
 status_t MediaCodec::releaseOutputBuffer(size_t index) {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::releaseOutputBuffer#native");
     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
     msg->setSize("index", index);
 
@@ -3787,6 +4159,7 @@
 }
 
 status_t MediaCodec::flush() {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::flush#native");
     sp<AMessage> msg = new AMessage(kWhatFlush, this);
 
     sp<AMessage> response;
@@ -4032,6 +4405,21 @@
     clientConfig.id = mCodecId;
 }
 
+void MediaCodec::stopCryptoAsync() {
+    if (mCryptoAsync) {
+        sp<RefBase> obj;
+        sp<MediaCodecBuffer> buffer;
+        std::list<sp<AMessage>> stalebuffers;
+        mCryptoAsync->stop(&stalebuffers);
+        for (sp<AMessage> &msg : stalebuffers) {
+            if (msg->findObject("buffer", &obj)) {
+                buffer = decltype(buffer.get())(obj.get());
+                mBufferChannel->discardBuffer(buffer);
+            }
+        }
+    }
+}
+
 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatCodecNotify:
@@ -4064,10 +4452,7 @@
                     }
                     codecErrorState = kCodecErrorState;
                     origin += stateString(mState);
-                    if (mCryptoAsync) {
-                        //TODO: do some book keeping on the buffers
-                        mCryptoAsync->stop();
-                    }
+                    stopCryptoAsync();
                     switch (mState) {
                         case INITIALIZING:
                         {
@@ -4331,6 +4716,13 @@
                             && usingSwRenderer) {
                         mFlags |= kFlagUsesSoftwareRenderer;
                     }
+
+                    // Use input and output formats to get operating frame-rate.
+                    bool isEncoder = mFlags & kFlagIsEncoder;
+                    mFrameRate = getOperatingFrameRate(mInputFormat, mFrameRate, isEncoder);
+                    mFrameRate = getOperatingFrameRate(mOutputFormat, mFrameRate, isEncoder);
+                    getRequiredSystemResources();
+
                     setState(CONFIGURED);
                     postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
 
@@ -4469,10 +4861,24 @@
                     }
 
                     CHECK_EQ(mState, STARTING);
+
+                    // Add the codec resources upon start.
+                    std::vector<MediaResourceParcel> resources;
                     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
-                        mResourceManagerProxy->addResource(
+                        resources.push_back(
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
+                    if (android::media::codec::codec_availability() &&
+                        android::media::codec::codec_availability_support()) {
+                        Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(
+                                mRequiredResourceInfo);
+                        for (const InstanceResourceInfo& resource : *resourcesLocked) {
+                            resources.push_back(getMediaResourceParcel(resource));
+                        }
+                    }
+                    if (!resources.empty()) {
+                        mResourceManagerProxy->addResource(resources);
+                    }
                     // Notify the RM that the codec is in use (has been started).
                     ClientConfigParcel clientConfig;
                     initClientConfigParcel(clientConfig);
@@ -4751,6 +5157,16 @@
                     break;
                 }
 
+                case kWhatRequiredResourcesChanged:
+                {
+                    // Get the updated required system resources.
+                    if (getRequiredSystemResources()) {
+                        onRequiredResourcesChanged();
+                    }
+
+                    break;
+                }
+
                 case kWhatEOS:
                 {
                     // We already notify the client of this by using the
@@ -4770,6 +5186,22 @@
                         mVideoRenderQualityTracker.resetForDiscontinuity();
                     }
 
+                    // Remove the codec resources upon stop.
+                    std::vector<MediaResourceParcel> resources;
+                    if (android::media::codec::codec_availability() &&
+                        android::media::codec::codec_availability_support()) {
+                        Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(
+                                mRequiredResourceInfo);
+                        for (const InstanceResourceInfo& resource : *resourcesLocked) {
+                            resources.push_back(getMediaResourceParcel(resource));
+                        }
+                        (*resourcesLocked).clear();
+                    }
+                    // Notify the RM to remove those resources.
+                    if (!resources.empty()) {
+                        mResourceManagerProxy->removeResource(resources);
+                    }
+
                     // Notify the RM that the codec has been stopped.
                     ClientConfigParcel clientConfig;
                     initClientConfigParcel(clientConfig);
@@ -5317,9 +5749,7 @@
 
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
-            if (mCryptoAsync) {
-                mCryptoAsync->stop();
-            }
+            stopCryptoAsync();
             sp<AMessage> asyncNotify;
             (void)msg->findMessage("async", &asyncNotify);
             // post asyncNotify if going out of scope.
@@ -5787,11 +6217,7 @@
             mReplyID = replyID;
             // TODO: skip flushing if already FLUSHED
             setState(FLUSHING);
-            if (mCryptoAsync) {
-                std::list<sp<AMessage>> pendingBuffers;
-                mCryptoAsync->stop(&pendingBuffers);
-                //TODO: do something with these buffers
-            }
+            stopCryptoAsync();
             mCodec->signalFlush();
             returnBuffersToCodec();
             TunnelPeekState previousState = mTunnelPeekState;
@@ -6005,12 +6431,24 @@
     // Update the width and the height.
     int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
     bool newSubsession = false;
-    if (android::media::codec::provider_->subsession_metrics()
-            && mOutputFormat->findInt32("width", &width)
-            && mOutputFormat->findInt32("height", &height)
-            && (width != mWidth || height != mHeight)) {
-        // consider a new subsession if the width or height changes.
-        newSubsession = true;
+    if (android::media::codec::provider_->subsession_metrics()) {
+        // consider a new subsession if the actual video size changes
+        // TODO: if the resolution of the clip changes "mid-stream" and crop params did not change
+        // or changed in such a way that the actual video size did not change then new subsession is
+        // not detected.
+        // TODO: although rare, the buffer attributes (rect(...), width, height) need not be a true
+        // representation of actual stream attributes (rect(...), width, height). It is only
+        // required that actual video frame is correctly presented in the rect() region of the
+        // buffer making this approach of detecting subsession less reliable.
+        if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+            if ((right - left + 1) != mWidth || (bottom - top + 1) != mHeight) {
+                newSubsession = true;
+            }
+        } else if (mOutputFormat->findInt32("width", &width) &&
+                   mOutputFormat->findInt32("height", &height) &&
+                   (width != mWidth || height != mHeight)) {
+            newSubsession = true;
+        }
     }
     // TODO: properly detect new audio subsession
 
@@ -6050,6 +6488,21 @@
     }
 
     updateHdrMetrics(false /* isConfig */);
+
+    if (mDomain == DOMAIN_VIDEO) {
+        bool isEncoder = mFlags & kFlagIsEncoder;
+        // Since the output format has changed, see if we need to update
+        // operating frame-rate.
+        float frameRate = getOperatingFrameRate(mOutputFormat, mFrameRate, isEncoder);
+        // if the operating frame-rate has changed, we need to recalibrate the
+        // required system resources again and notify the caller.
+        if (frameRate != mFrameRate) {
+            mFrameRate = frameRate;
+            if (getRequiredSystemResources()) {
+                onRequiredResourcesChanged();
+            }
+        }
+    }
 }
 
 // always called from the looper thread (and therefore not mutexed)
@@ -6304,6 +6757,7 @@
     size_t size = 0;
     int64_t timeUs = 0;
     uint32_t flags = 0;
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::onQueueInputBuffer#native");
     CHECK(msg->findSize("index", &index));
     CHECK(msg->findInt64("timeUs", &timeUs));
     CHECK(msg->findInt32("flags", (int32_t *)&flags));
@@ -6514,7 +6968,11 @@
                     && (mFlags & kFlagUseCryptoAsync)) {
                 // create error detail
                 sp<AMessage> cryptoErrorInfo = new AMessage();
-                buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
+                if (msg->findObject("cryptoInfos", &obj)) {
+                    cryptoErrorInfo->setObject("cryptoInfos", obj);
+                } else {
+                    buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
+                }
                 cryptoErrorInfo->setInt32("err", err);
                 cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
                 cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
@@ -6587,7 +7045,7 @@
             // prepare a message and enqueue
             sp<AMessage> cryptoInfo = new AMessage();
             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
-            mCryptoAsync->decrypt(cryptoInfo);
+            err = mCryptoAsync->decrypt(cryptoInfo);
         } else if (msg->findObject("cryptoInfos", &obj)) {
                 buffer->meta()->setObject("cryptoInfos", obj);
                 err = mBufferChannel->queueSecureInputBuffers(
@@ -7011,6 +7469,7 @@
 }
 
 void MediaCodec::onOutputBufferAvailable() {
+    ScopedTrace trace(ATRACE_TAG, "MediaCodec::onOutputBufferAvailable#native");
     int32_t index;
     while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
         if (discardDecodeOnlyOutputBuffer(index)) {
@@ -7081,11 +7540,8 @@
     }
 }
 
-void MediaCodec::onRequiredResourcesChanged(
-        const std::vector<InstanceResourceInfo>& resourceInfo) {
-    mRequiredResourceInfo = resourceInfo;
-    // Make sure codec availability feature is on.
-    if (mCallback != nullptr && android::media::codec::codec_availability()) {
+void MediaCodec::onRequiredResourcesChanged() {
+    if (mCallback != nullptr) {
         // Post the callback
         sp<AMessage> msg = mCallback->dup();
         msg->setInt32("callbackID", CB_REQUIRED_RESOURCES_CHANGED);
@@ -7135,6 +7591,9 @@
     }
     updateLowLatency(params);
     updateCodecImportance(params);
+    if (android::media::tv::flags::apply_picture_profiles()) {
+        updatePictureProfile(params, false /* applyDefaultProfile */);
+    }
     mapFormat(mComponentName, params, nullptr, false);
     updateTunnelPeek(params);
     mCodec->signalSetParameters(params);
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 4ad3276..daad2c6 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -20,6 +20,11 @@
 
 #include <binder/IServiceManager.h>
 
+#include <android_media_codec.h>
+
+#include <android-base/properties.h>
+#include <android-base/no_destructor.h>
+
 #include <media/IMediaCodecList.h>
 #include <media/IMediaPlayerService.h>
 #include <media/MediaCodecInfo.h>
@@ -50,10 +55,6 @@
 
 namespace {
 
-Mutex sInitMutex;
-
-Mutex sRemoteInitMutex;
-
 constexpr const char* kProfilingResults =
         MediaCodecsXmlParser::defaultProfilingResultsXmlPath;
 
@@ -112,8 +113,98 @@
 
 }  // unnamed namespace
 
-// static
-sp<IMediaCodecList> MediaCodecList::sCodecList;
+class MediaCodecList::InstanceCache {
+public:
+    static InstanceCache &Get() {
+        static base::NoDestructor<InstanceCache> sCache;
+        return *sCache;
+    }
+
+    InstanceCache() : mBootCompleted(false), mBootCompletedRemote(false) {}
+
+    sp<IMediaCodecList> getLocalInstance() {
+        std::unique_lock l(mLocalMutex);
+
+        if (android::media::codec::provider_->in_process_sw_audio_codec_support()
+                && !mBootCompleted) {
+            mBootCompleted = base::GetBoolProperty("sys.boot_completed", false);
+            if (mLocalInstance != nullptr && mBootCompleted) {
+                ALOGI("Boot completed, will reset local instance.");
+                mLocalInstance = nullptr;
+            }
+        }
+        if (mLocalInstance == nullptr) {
+            MediaCodecList *codecList = new MediaCodecList(GetBuilders());
+            if (codecList->initCheck() == OK) {
+                mLocalInstance = codecList;
+
+                if (isProfilingNeeded()) {
+                    ALOGV("Codec profiling needed, will be run in separated thread.");
+                    pthread_t profiler;
+                    if (pthread_create(&profiler, nullptr, profilerThreadWrapper, nullptr) != 0) {
+                        ALOGW("Failed to create thread for codec profiling.");
+                    }
+                }
+            } else {
+                // failure to initialize may be temporary. retry on next call.
+                delete codecList;
+            }
+        }
+
+        return mLocalInstance;
+    }
+
+    void setLocalInstance(const sp<IMediaCodecList> &instance) {
+        std::unique_lock l(mLocalMutex);
+        mLocalInstance = instance;
+    }
+
+    sp<IMediaCodecList> getRemoteInstance() {
+        std::unique_lock l(mRemoteMutex);
+        if (android::media::codec::provider_->in_process_sw_audio_codec_support()
+                && !mBootCompletedRemote) {
+            mBootCompletedRemote = base::GetBoolProperty("sys.boot_completed", false);
+            if (mRemoteInstance != nullptr && mBootCompletedRemote) {
+                ALOGI("Boot completed, will reset remote instance.");
+                mRemoteInstance = nullptr;
+            }
+        }
+        if (mRemoteInstance == nullptr) {
+            mMediaPlayer = defaultServiceManager()->getService(String16("media.player"));
+            sp<IMediaPlayerService> service =
+                interface_cast<IMediaPlayerService>(mMediaPlayer);
+            if (service.get() != nullptr) {
+                mRemoteInstance = service->getCodecList();
+                if (mRemoteInstance != nullptr) {
+                    mBinderDeathObserver = new BinderDeathObserver();
+                    mMediaPlayer->linkToDeath(mBinderDeathObserver.get());
+                }
+            }
+            if (mRemoteInstance == nullptr) {
+                // if failed to get remote list, create local list
+                mRemoteInstance = getLocalInstance();
+            }
+        }
+        return mRemoteInstance;
+    }
+
+    void binderDied() {
+        std::unique_lock l(mRemoteMutex);
+        mRemoteInstance.clear();
+        mBinderDeathObserver.clear();
+    }
+
+private:
+    std::mutex mLocalMutex;
+    bool mBootCompleted                 GUARDED_BY(mLocalMutex);
+    sp<IMediaCodecList> mLocalInstance  GUARDED_BY(mLocalMutex);
+
+    std::mutex mRemoteMutex;
+    bool mBootCompletedRemote                       GUARDED_BY(mRemoteMutex);
+    sp<IMediaCodecList> mRemoteInstance             GUARDED_BY(mRemoteMutex);
+    sp<BinderDeathObserver> mBinderDeathObserver    GUARDED_BY(mRemoteMutex);
+    sp<IBinder> mMediaPlayer                        GUARDED_BY(mRemoteMutex);
+};
 
 // static
 void *MediaCodecList::profilerThreadWrapper(void * /*arg*/) {
@@ -136,69 +227,22 @@
         return nullptr;
     }
 
-    {
-        Mutex::Autolock autoLock(sInitMutex);
-        sCodecList = codecList;
-    }
+    InstanceCache::Get().setLocalInstance(codecList);
     return nullptr;
 }
 
 // static
 sp<IMediaCodecList> MediaCodecList::getLocalInstance() {
-    Mutex::Autolock autoLock(sInitMutex);
-
-    if (sCodecList == nullptr) {
-        MediaCodecList *codecList = new MediaCodecList(GetBuilders());
-        if (codecList->initCheck() == OK) {
-            sCodecList = codecList;
-
-            if (isProfilingNeeded()) {
-                ALOGV("Codec profiling needed, will be run in separated thread.");
-                pthread_t profiler;
-                if (pthread_create(&profiler, nullptr, profilerThreadWrapper, nullptr) != 0) {
-                    ALOGW("Failed to create thread for codec profiling.");
-                }
-            }
-        } else {
-            // failure to initialize may be temporary. retry on next call.
-            delete codecList;
-        }
-    }
-
-    return sCodecList;
+    return InstanceCache::Get().getLocalInstance();
 }
 
-sp<IMediaCodecList> MediaCodecList::sRemoteList;
-
-sp<MediaCodecList::BinderDeathObserver> MediaCodecList::sBinderDeathObserver;
-sp<IBinder> MediaCodecList::sMediaPlayer;  // kept since linked to death
-
 void MediaCodecList::BinderDeathObserver::binderDied(const wp<IBinder> &who __unused) {
-    Mutex::Autolock _l(sRemoteInitMutex);
-    sRemoteList.clear();
-    sBinderDeathObserver.clear();
+    InstanceCache::Get().binderDied();
 }
 
 // static
 sp<IMediaCodecList> MediaCodecList::getInstance() {
-    Mutex::Autolock _l(sRemoteInitMutex);
-    if (sRemoteList == nullptr) {
-        sMediaPlayer = defaultServiceManager()->getService(String16("media.player"));
-        sp<IMediaPlayerService> service =
-            interface_cast<IMediaPlayerService>(sMediaPlayer);
-        if (service.get() != nullptr) {
-            sRemoteList = service->getCodecList();
-            if (sRemoteList != nullptr) {
-                sBinderDeathObserver = new BinderDeathObserver();
-                sMediaPlayer->linkToDeath(sBinderDeathObserver.get());
-            }
-        }
-        if (sRemoteList == nullptr) {
-            // if failed to get remote list, create local list
-            sRemoteList = getLocalInstance();
-        }
-    }
-    return sRemoteList;
+    return InstanceCache::Get().getRemoteInstance();
 }
 
 MediaCodecList::MediaCodecList(std::vector<MediaCodecListBuilderBase*> builders) {
diff --git a/media/libstagefright/MediaCodecListWriter.cpp b/media/libstagefright/MediaCodecListWriter.cpp
index c4fb199..2048f3b 100644
--- a/media/libstagefright/MediaCodecListWriter.cpp
+++ b/media/libstagefright/MediaCodecListWriter.cpp
@@ -19,9 +19,12 @@
 #include <utils/Log.h>
 
 #include <media/stagefright/foundation/AMessage.h>
+#include "media/stagefright/foundation/AString.h"
 #include <media/stagefright/MediaCodecListWriter.h>
 #include <media/MediaCodecInfo.h>
 
+#include <string>
+
 namespace android {
 
 void MediaCodecListWriter::addGlobalSetting(
@@ -56,8 +59,52 @@
 
 void MediaCodecListWriter::writeCodecInfos(
         std::vector<sp<MediaCodecInfo>> *codecInfos) const {
+    // Since the introduction of the NDK MediaCodecList API, each
+    // MediaCodecInfo object can only support a single media type, so infos that
+    // support multiple media types are split into multiple infos.
+    // This process may result in name collisions that are handled here.
+
+    // Prefer codec names that already support a single media type
+    // and also any existing aliases. If an alias matches an existing
+    // codec name, it is ignored, which is the right behavior.
+    std::set<std::string> reservedNames;
     for (const sp<MediaCodecInfo> &info : mCodecInfos) {
-        codecInfos->push_back(info);
+        Vector<AString> mediaTypes;
+        info->getSupportedMediaTypes(&mediaTypes);
+        if (mediaTypes.size() == 1) {
+            reservedNames.insert(info->getCodecName());
+        }
+        Vector<AString> aliases;
+        info->getAliases(&aliases);
+        for (const AString &alias : aliases) {
+            reservedNames.insert(alias.c_str());
+        }
+    }
+
+    for (const sp<MediaCodecInfo> &info : mCodecInfos) {
+        Vector<AString> mediaTypes;
+        info->getSupportedMediaTypes(&mediaTypes);
+        if (mediaTypes.size() == 1) {
+            codecInfos->push_back(info);
+        } else {
+            // disambiguate each type
+            for (const AString &mediaType : mediaTypes) {
+                // get the type name after the first slash (if exists)
+                ssize_t slashPosition = mediaType.find("/");
+                const char *typeName = mediaType.c_str() + (slashPosition + 1);
+
+                // find a unique name for the split codec info starting with "<name>.<type>"
+                AString newName = AStringPrintf("%s.%s", info->getCodecName(), typeName);
+                std::string newNameStr = newName.c_str();
+                // append increasing suffix of the form ".<number>" until a unique name is found
+                for (size_t ix = 1; reservedNames.count(newNameStr) > 0; ++ix) {
+                    newNameStr = AStringPrintf("%s.%zu", newName.c_str(), ix).c_str();
+                }
+
+                codecInfos->push_back(info->splitOutType(mediaType.c_str(), newNameStr.c_str()));
+                reservedNames.insert(newNameStr);
+            }
+        }
     }
 }
 
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index b07f8f7..2a1fc81 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -1030,6 +1030,10 @@
             }
             signalEOS();
        }
+       // MediaCodec::CB_CRYPTO_ERROR is unexpected as we are not using crypto
+       // MediaCodec::CB_LARGE_FRAME_OUTPUT_AVAILABLE is unexpected as we are not using large frames
+       // MediaCodec::CB_METRICS_FLUSHED is safe to ignore as it is informational only
+       // MediaCodec::CB_REQUIRED_RESOURCES_CHANGED is safe to ignore as it is informational only
        break;
     }
     case kWhatStart:
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index b640040..1891954 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -18,8 +18,15 @@
 #define LOG_TAG "MediaSync"
 #include <inttypes.h>
 
-#include <gui/BufferQueue.h>
+#include <com_android_graphics_libgui_flags.h>
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+#include <gui/BufferItemConsumer.h>
+#include <gui/Surface.h>
+#else
 #include <gui/IGraphicBufferConsumer.h>
+#endif
+#include <gui/BufferQueue.h>
 #include <gui/IGraphicBufferProducer.h>
 
 #include <media/AudioTrack.h>
@@ -74,7 +81,11 @@
 
 MediaSync::~MediaSync() {
     if (mInput != NULL) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+        mInput->abandon();
+#else
         mInput->consumerDisconnect();
+#endif
     }
     if (mOutput != NULL) {
         mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
@@ -204,12 +215,44 @@
         return INVALID_OPERATION;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+    int usageFlags = 0;
+    mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usageFlags);
+
+    auto [newInput, surface] = BufferItemConsumer::create(usageFlags);
+
+    sp<InputListener> listener(new InputListener(this));
+    newInput->setFrameAvailableListener(listener);
+    newInput->setName(String8("MediaSync"));
+    // propagate usage bits from output surface
+    status_t status = newInput->setConsumerUsageBits(usageFlags);
+    if (status != OK) {
+        ALOGE("%s: Unable to set usage bits to %d", __FUNCTION__, usageFlags);
+        return status;
+    }
+
+    // set undequeued buffer count
+    int minUndequeuedBuffers;
+    mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
+    status = newInput->setMaxAcquiredBufferCount(minUndequeuedBuffers);
+    if (status != OK) {
+        ALOGE("%s: Unable to set setMaxAcquiredBufferCount to %d", __FUNCTION__,
+              minUndequeuedBuffers);
+        return status;
+    }
+
+    mMaxAcquiredBufferCount = minUndequeuedBuffers;
+    mUsageFlagsFromOutput = usageFlags;
+    mInput = newInput;
+    mListener = listener;
+    *outBufferProducer = surface->getIGraphicBufferProducer();
+    return OK;
+#else
     sp<IGraphicBufferProducer> bufferProducer;
     sp<IGraphicBufferConsumer> bufferConsumer;
     BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer);
 
     sp<InputListener> listener(new InputListener(this));
-    IInterface::asBinder(bufferConsumer)->linkToDeath(listener);
     status_t status =
         bufferConsumer->consumerConnect(listener, false /* controlledByApp */);
     if (status == NO_ERROR) {
@@ -228,6 +271,7 @@
         bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
     }
     return status;
+#endif
 }
 
 void MediaSync::resync_l() {
@@ -340,7 +384,15 @@
 
 void MediaSync::setName(const AString &name) {
     Mutex::Autolock lock(mMutex);
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+    if (mInput) {
+        mInput->setName(String8(name.c_str()));
+    } else {
+        ALOGE("%s with name %s called without an mInput set", __FUNCTION__, name.c_str());
+    }
+#else
     mInput->setConsumerName(String8(name.c_str()));
+#endif
 }
 
 void MediaSync::flush() {
@@ -622,7 +674,11 @@
 
     ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId());
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+    status = mInput->detachBuffer(bufferItem.mGraphicBuffer);
+#else
     status = mInput->detachBuffer(bufferItem.mSlot);
+#endif
     if (status != NO_ERROR) {
         ALOGE("detaching buffer from input failed (%d)", status);
         if (status == NO_INIT) {
@@ -635,7 +691,11 @@
     if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
         // Something is wrong since this buffer should be at our hands, bail.
         ALOGE("received buffer multiple times from input");
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+        mInput->abandon();
+#else
         mInput->consumerDisconnect();
+#endif
         onAbandoned_l(true /* isInput */);
         return;
     }
@@ -688,7 +748,11 @@
 
     if (mBuffersSentToOutput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
         // Something is wrong since this buffer should be held by output now, bail.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+        mInput->abandon();
+#else
         mInput->consumerDisconnect();
+#endif
         onAbandoned_l(true /* isInput */);
         return;
     }
@@ -749,10 +813,18 @@
 
     // Attach and release the buffer back to the input.
     int consumerSlot;
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+    status_t status = mInput->attachBuffer(oldBuffer);
+#else
     status_t status = mInput->attachBuffer(&consumerSlot, oldBuffer);
+#endif
     ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status);
     if (status == NO_ERROR) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+        mInput->releaseBuffer(oldBuffer, fence);
+#else
         status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */, fence);
+#endif
         ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status);
     }
 
@@ -771,7 +843,11 @@
         if (isInput) {
             mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
         } else {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+            mInput->abandon();
+#else
             mInput->consumerDisconnect();
+#endif
         }
         mIsAbandoned = true;
     }
@@ -816,6 +892,7 @@
     mSync->onFrameAvailableFromInput();
 }
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
 // We don't care about sideband streams, since we won't relay them.
 void MediaSync::InputListener::onSidebandStreamChanged() {
     ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly.");
@@ -826,6 +903,7 @@
     Mutex::Autolock lock(mSync->mMutex);
     mSync->onAbandoned_l(true /* isInput */);
 }
+#endif
 
 MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync,
         const sp<IGraphicBufferProducer> &output)
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index df61c29..000ba11 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -8,5 +8,7 @@
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
 
-per-file Camera*.cpp = file:/camera/OWNERS
-per-file Camera*.h = file:/camera/OWNERS
+per-file Camera*.cpp=file:/camera/OWNERS
+per-file Camera*.h=file:/camera/OWNERS
+per-file FrameDecoder.cpp,FrameDecoder.h=set noparent
+per-file FrameDecoder.cpp,FrameDecoder.h=file:platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 354fab0..b3bb865 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -79,11 +79,7 @@
     },
     {
       "name": "CtsMediaTranscodingTestCases"
-    }
-  ],
-  "postsubmit": [
-    // writerTest fails about 5 out of 66
-    // { "name": "writerTest" },
+    },
     {
         "name": "BatteryChecker_test"
     },
@@ -94,6 +90,14 @@
         "name": "HEVCUtilsUnitTest"
     },
     {
+      "name": "MctsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
+    },
+    {
       "name": "MctsMediaDecoderTestCases",
       "options": [
         {
@@ -108,14 +112,10 @@
           "include-annotation": "android.platform.test.annotations.Presubmit"
         }
       ]
-    },
-    {
-      "name": "MctsMediaCodecTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        }
-      ]
     }
   ]
+  // "postsubmit": [
+  //   writerTest fails about 5 out of 66
+  //   { "name": "writerTest" },
+  // ]
 }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index db6d46a..2dec600 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -736,6 +736,41 @@
     }
 }
 
+static void parseAPVProfileLevelFromCsd(const sp<ABuffer>& csd, sp<AMessage>& format) {
+    // Parse CSD structure to extract profile level information
+    // https://github.com/openapv/openapv/blob/main/readme/apv_isobmff.md#syntax-1
+    const uint8_t* data = csd->data();
+    size_t csdSize = csd->size();
+    if (csdSize < 17 || data[0] != 0x01) {  // configurationVersion == 1
+        ALOGE("CSD is not according APV Configuration Standard");
+        return;
+    }
+    uint8_t profileData = data[5];             // profile_idc
+    uint8_t levelData = data[6];               // level_idc
+    uint8_t band = data[7];                    // band_idc
+    uint8_t bitDepth = (data[16] & 0x0F) + 8;  // bit_depth_minus8
+
+    const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> profiles{
+            {{33, 10}, APVProfile422_10},
+            {{44, 12}, APVProfile422_10HDR10Plus},
+    };
+    int32_t profile;
+    if (profiles.map(std::make_pair(profileData, bitDepth), &profile)) {
+        // bump to HDR profile
+        if (isHdr10or10Plus(format) && profile == APVProfile422_10) {
+            if (format->contains("hdr-static-info")) {
+                profile = APVProfile422_10HDR10;
+            }
+        }
+        format->setInt32("profile", profile);
+    }
+    int level_num = (levelData / 30) * 2;
+    if (levelData % 30 == 0) {
+        level_num -= 1;
+    }
+    int32_t level = ((0x100 << (level_num - 1)) | (1 << band));
+    format->setInt32("level", level);
+}
 
 static std::vector<std::pair<const char *, uint32_t>> stringMappings {
     {
@@ -1456,6 +1491,7 @@
         buffer->meta()->setInt32("csd", true);
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-0", buffer);
+        parseAPVProfileLevelFromCsd(buffer, msg);
     } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
         ESDS esds((const char *)data, size);
         if (esds.InitCheck() != (status_t)OK) {
diff --git a/media/libstagefright/codecs/aacdec/Android.bp b/media/libstagefright/codecs/aacdec/Android.bp
deleted file mode 100644
index 5ab49a7..0000000
--- a/media/libstagefright/codecs/aacdec/Android.bp
+++ /dev/null
@@ -1,44 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_aacdec_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_aacdec_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_aacdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: [
-        "SoftAAC2.cpp",
-        "DrcPresModeWrap.cpp",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: ["libFraunhoferAAC"],
-
-    shared_libs: [
-        "libcutils",
-    ],
-}
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
deleted file mode 100644
index 157cab6..0000000
--- a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
+++ /dev/null
@@ -1,371 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "DrcPresModeWrap.h"
-
-#include <assert.h>
-
-#define LOG_TAG "SoftAAC2_DrcWrapper"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-//#define DRC_PRES_MODE_WRAP_DEBUG
-
-#define GPM_ENCODER_TARGET_LEVEL 64
-#define MAX_TARGET_LEVEL 40
-
-CDrcPresModeWrapper::CDrcPresModeWrapper()
-{
-    mDataUpdate = true;
-
-    /* Data from streamInfo. */
-    /* Initialized to the same values as in the aac decoder */
-    mStreamPRL = -1;
-    mStreamDRCPresMode = -1;
-    mStreamNrAACChan = 0;
-    mStreamNrOutChan = 0;
-
-    /* Desired values (set by user). */
-    /* Initialized to the same values as in the aac decoder */
-    mDesTarget = -1;
-    mDesAttFactor = 0;
-    mDesBoostFactor = 0;
-    mDesHeavy = 0;
-
-    mEncoderTarget = -1;
-
-    /* Values from last time. */
-    mLastTarget = -2;
-    mLastAttFactor = -1;
-    mLastBoostFactor = -1;
-    mLastHeavy = 0;
-}
-
-CDrcPresModeWrapper::~CDrcPresModeWrapper()
-{
-}
-
-void
-CDrcPresModeWrapper::setDecoderHandle(const HANDLE_AACDECODER handle)
-{
-    mHandleDecoder = handle;
-}
-
-void
-CDrcPresModeWrapper::submitStreamData(CStreamInfo* pStreamInfo)
-{
-    assert(pStreamInfo);
-
-    if (mStreamPRL != pStreamInfo->drcProgRefLev) {
-        mStreamPRL = pStreamInfo->drcProgRefLev;
-        mDataUpdate = true;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-        ALOGV("DRC presentation mode wrapper: drcProgRefLev is %d\n", mStreamPRL);
-#endif
-    }
-
-    if (mStreamDRCPresMode != pStreamInfo->drcPresMode) {
-        mStreamDRCPresMode = pStreamInfo->drcPresMode;
-        mDataUpdate = true;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-        ALOGV("DRC presentation mode wrapper: drcPresMode is %d\n", mStreamDRCPresMode);
-#endif
-    }
-
-    if (mStreamNrAACChan != pStreamInfo->aacNumChannels) {
-        mStreamNrAACChan = pStreamInfo->aacNumChannels;
-        mDataUpdate = true;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-        ALOGV("DRC presentation mode wrapper: aacNumChannels is %d\n", mStreamNrAACChan);
-#endif
-    }
-
-    if (mStreamNrOutChan != pStreamInfo->numChannels) {
-        mStreamNrOutChan = pStreamInfo->numChannels;
-        mDataUpdate = true;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-        ALOGV("DRC presentation mode wrapper: numChannels is %d\n", mStreamNrOutChan);
-#endif
-    }
-
-
-
-    if (mStreamNrOutChan<mStreamNrAACChan) {
-        mIsDownmix = true;
-    } else {
-        mIsDownmix = false;
-    }
-
-    if (mIsDownmix && (mStreamNrOutChan == 1)) {
-        mIsMonoDownmix = true;
-    } else {
-        mIsMonoDownmix = false;
-    }
-
-    if (mIsDownmix && mStreamNrOutChan == 2){
-        mIsStereoDownmix = true;
-    } else {
-        mIsStereoDownmix = false;
-    }
-
-}
-
-void
-CDrcPresModeWrapper::setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value)
-{
-    switch (param) {
-    case DRC_PRES_MODE_WRAP_DESIRED_TARGET:
-        mDesTarget = value;
-        break;
-    case DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR:
-        mDesAttFactor = value;
-        break;
-    case DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR:
-        mDesBoostFactor = value;
-        break;
-    case DRC_PRES_MODE_WRAP_DESIRED_HEAVY:
-        mDesHeavy = value;
-        break;
-    case DRC_PRES_MODE_WRAP_ENCODER_TARGET:
-        mEncoderTarget = value;
-        break;
-    default:
-        break;
-    }
-    mDataUpdate = true;
-}
-
-void
-CDrcPresModeWrapper::update()
-{
-    // Get Data from Decoder
-    int progRefLevel = mStreamPRL;
-    int drcPresMode = mStreamDRCPresMode;
-
-    // by default, do as desired
-    int newTarget         = mDesTarget;
-    int newAttFactor      = mDesAttFactor;
-    int newBoostFactor    = mDesBoostFactor;
-    int newHeavy          = mDesHeavy;
-
-    if (mDataUpdate) {
-        // sanity check
-        if ((mDesTarget < MAX_TARGET_LEVEL) && (mDesTarget != -1)){
-            mDesTarget = MAX_TARGET_LEVEL;  // limit target level to -10 dB or below
-            newTarget = MAX_TARGET_LEVEL;
-        }
-
-        if (mEncoderTarget != -1) {
-            if (mDesTarget<124) { // if target level > -31 dB
-                if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
-                    // no stereo or mono downmixing, calculated scaling of light DRC
-                    /* use as little compression as possible */
-                    newAttFactor = 0;
-                    newBoostFactor = 0;
-                    if (mDesTarget<progRefLevel) { // if target level > PRL
-                        if (mEncoderTarget < mDesTarget) { // if mEncoderTarget > target level
-                            // mEncoderTarget > target level > PRL
-                            int calcFactor;
-                            float calcFactor_norm;
-                            // 0.0f < calcFactor_norm < 1.0f
-                            calcFactor_norm = (float)(mDesTarget - progRefLevel) /
-                                    (float)(mEncoderTarget - progRefLevel);
-                            calcFactor = (int)(calcFactor_norm*127.0f); // 0 <= calcFactor < 127
-                            // calcFactor is the lower limit
-                            newAttFactor = (calcFactor>newAttFactor) ? calcFactor : newAttFactor;
-                            // new AttFactor will be always = calcFactor, as it is set to 0 before.
-                            newBoostFactor = newAttFactor;
-                        } else {
-                            /* target level > mEncoderTarget > PRL */
-                            // newTDLimiterEnable = 1;
-                            // the time domain limiter must always be active in this case.
-                            //     It is assumed that the framework activates it by default
-                            newAttFactor = 127;
-                            newBoostFactor = 127;
-                        }
-                    } else { // target level <= PRL
-                        // no restrictions required
-                        // newAttFactor = newAttFactor;
-                    }
-                } else { // downmixing
-                    // if target level > -23 dB or mono downmix
-                    if ( (mDesTarget<92) || mIsMonoDownmix ) {
-                        newHeavy = 1;
-                    } else {
-                        // we perform a downmix, so, we need at least full light DRC
-                        newAttFactor = 127;
-                    }
-                }
-            } else { // target level <= -31 dB
-                // playback -31 dB: light DRC only needed if we perform downmixing
-                if (mIsDownmix) {   // we do downmixing
-                    newAttFactor = 127;
-                }
-            }
-        }
-        else { // handle other used encoder target levels
-
-            // Validation check: DRC presentation mode is only specified for max. 5.1 channels
-            if (mStreamNrAACChan > 6) {
-                drcPresMode = 0;
-            }
-
-            switch (drcPresMode) {
-            case 0:
-            default: // presentation mode not indicated
-            {
-
-                if (mDesTarget<124) { // if target level > -31 dB
-                    // no stereo or mono downmixing
-                    if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
-                        if (mDesTarget<progRefLevel) { // if target level > PRL
-                            // newTDLimiterEnable = 1;
-                            // the time domain limiter must always be active in this case.
-                            //    It is assumed that the framework activates it by default
-                            newAttFactor = 127; // at least, use light compression
-                        } else { // target level <= PRL
-                            // no restrictions required
-                            // newAttFactor = newAttFactor;
-                        }
-                    } else { // downmixing
-                        // newTDLimiterEnable = 1;
-                        // the time domain limiter must always be active in this case.
-                        //    It is assumed that the framework activates it by default
-
-                        // if target level > -23 dB or mono downmix
-                        if ( (mDesTarget < 92) || mIsMonoDownmix ) {
-                            newHeavy = 1;
-                        } else{
-                            // we perform a downmix, so, we need at least full light DRC
-                            newAttFactor = 127;
-                        }
-                    }
-                } else { // target level <= -31 dB
-                    if (mIsDownmix) {   // we do downmixing.
-                        // newTDLimiterEnable = 1;
-                        // the time domain limiter must always be active in this case.
-                        //    It is assumed that the framework activates it by default
-                        newAttFactor = 127;
-                    }
-                }
-            }
-            break;
-
-            // Presentation mode 1 and 2 according to ETSI TS 101 154:
-            // Digital Video Broadcasting (DVB); Specification for the use of Video and Audio Coding
-            // in Broadcasting Applications based on the MPEG-2 Transport Stream,
-            // section C.5.4., "Decoding", and Table C.33
-            // ISO DRC            -> newHeavy = 0  (Use light compression, MPEG-style)
-            // Compression_value  -> newHeavy = 1  (Use heavy compression, DVB-style)
-            // scaling restricted -> newAttFactor = 127
-
-            case 1: // presentation mode 1, Light:-31/Heavy:-23
-            {
-                if (mDesTarget < 124) { // if target level > -31 dB
-                    // playback up to -23 dB
-                    newHeavy = 1;
-                } else { // target level <= -31 dB
-                    // playback -31 dB
-                    if (mIsDownmix) {   // we do downmixing.
-                        newAttFactor = 127;
-                    }
-                }
-            }
-            break;
-
-            case 2: // presentation mode 2, Light:-23/Heavy:-23
-            {
-                if (mDesTarget < 124) { // if target level > -31 dB
-                    // playback up to -23 dB
-                    if (mIsMonoDownmix) { // if mono downmix
-                        newHeavy = 1;
-                    } else {
-                        newHeavy = 0;
-                        newAttFactor = 127;
-                    }
-                } else { // target level <= -31 dB
-                    // playback -31 dB
-                    newHeavy = 0;
-                    if (mIsDownmix) {   // we do downmixing.
-                        newAttFactor = 127;
-                    }
-                }
-            }
-            break;
-
-            } // switch()
-        } // if (mEncoderTarget  == GPM_ENCODER_TARGET_LEVEL)
-
-        // validation check again
-        if (newHeavy == 1) {
-            newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
-            newAttFactor = 127;
-        }
-
-        // update the decoder
-        if (newTarget != mLastTarget) {
-            aacDecoder_SetParam(mHandleDecoder, AAC_DRC_REFERENCE_LEVEL, newTarget);
-            mLastTarget = newTarget;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-            if (newTarget != mDesTarget)
-                ALOGV("DRC presentation mode wrapper: forced target level to %d (from %d)\n", newTarget, mDesTarget);
-            else
-                ALOGV("DRC presentation mode wrapper: set target level to %d\n", newTarget);
-#endif
-        }
-
-        if (newAttFactor != mLastAttFactor) {
-            aacDecoder_SetParam(mHandleDecoder, AAC_DRC_ATTENUATION_FACTOR, newAttFactor);
-            mLastAttFactor = newAttFactor;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-            if (newAttFactor != mDesAttFactor)
-                ALOGV("DRC presentation mode wrapper: forced attenuation factor to %d (from %d)\n", newAttFactor, mDesAttFactor);
-            else
-                ALOGV("DRC presentation mode wrapper: set attenuation factor to %d\n", newAttFactor);
-#endif
-        }
-
-        if (newBoostFactor != mLastBoostFactor) {
-            aacDecoder_SetParam(mHandleDecoder, AAC_DRC_BOOST_FACTOR, newBoostFactor);
-            mLastBoostFactor = newBoostFactor;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-            if (newBoostFactor != mDesBoostFactor)
-                ALOGV("DRC presentation mode wrapper: forced boost factor to %d (from %d)\n",
-                        newBoostFactor, mDesBoostFactor);
-            else
-                ALOGV("DRC presentation mode wrapper: set boost factor to %d\n", newBoostFactor);
-#endif
-        }
-
-        if (newHeavy != mLastHeavy) {
-            aacDecoder_SetParam(mHandleDecoder, AAC_DRC_HEAVY_COMPRESSION, newHeavy);
-            mLastHeavy = newHeavy;
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-            if (newHeavy != mDesHeavy)
-                ALOGV("DRC presentation mode wrapper: forced heavy compression to %d (from %d)\n",
-                        newHeavy, mDesHeavy);
-            else
-                ALOGV("DRC presentation mode wrapper: set heavy compression to %d\n", newHeavy);
-#endif
-        }
-
-#ifdef DRC_PRES_MODE_WRAP_DEBUG
-        ALOGV("DRC config: tgt_lev: %3d, cut: %3d, boost: %3d, heavy: %d\n", newTarget,
-                newAttFactor, newBoostFactor, newHeavy);
-#endif
-        mDataUpdate = false;
-
-    } // if (mDataUpdate)
-}
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
deleted file mode 100644
index f0b6cf2..0000000
--- a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#pragma once
-#include "aacdecoder_lib.h"
-
-typedef enum
-{
-    DRC_PRES_MODE_WRAP_DESIRED_TARGET         = 0x0000,
-    DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR     = 0x0001,
-    DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR   = 0x0002,
-    DRC_PRES_MODE_WRAP_DESIRED_HEAVY          = 0x0003,
-    DRC_PRES_MODE_WRAP_ENCODER_TARGET         = 0x0004
-} DRC_PRES_MODE_WRAP_PARAM;
-
-
-class CDrcPresModeWrapper {
-public:
-    CDrcPresModeWrapper();
-    ~CDrcPresModeWrapper();
-    void setDecoderHandle(const HANDLE_AACDECODER handle);
-    void setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value);
-    void submitStreamData(CStreamInfo*);
-    void update();
-
-protected:
-    HANDLE_AACDECODER mHandleDecoder;
-    int mDesTarget;
-    int mDesAttFactor;
-    int mDesBoostFactor;
-    int mDesHeavy;
-
-    int mEncoderTarget;
-
-    int mLastTarget;
-    int mLastAttFactor;
-    int mLastBoostFactor;
-    int mLastHeavy;
-
-    SCHAR mStreamPRL;
-    SCHAR mStreamDRCPresMode;
-    INT mStreamNrAACChan;
-    INT mStreamNrOutChan;
-
-    bool mIsDownmix;
-    bool mIsMonoDownmix;
-    bool mIsStereoDownmix;
-
-    bool mDataUpdate;
-};
diff --git a/media/libstagefright/codecs/aacdec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/aacdec/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/aacdec/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/aacdec/NOTICE b/media/libstagefright/codecs/aacdec/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/aacdec/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
deleted file mode 100644
index 92ec94f..0000000
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ /dev/null
@@ -1,1255 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAAC2"
-#include <utils/Log.h>
-
-#include "SoftAAC2.h"
-#include <OMX_AudioExt.h>
-#include <OMX_IndexExt.h>
-
-#include <cutils/properties.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <utils/misc.h>
-
-#include <math.h>
-
-#define FILEREAD_MAX_LAYERS 2
-
-#define DRC_DEFAULT_MOBILE_REF_LEVEL 64  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_CUT   127 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1   /* switch for heavy compression for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3  /* MPEG-D DRC effect type; 3 => Limited playback range */
-#define DRC_DEFAULT_MOBILE_DRC_ALBUM 0  /* MPEG-D DRC album mode; 0 => album mode is disabled, 1 => album mode is enabled */
-#define DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS -1 /* decoder output loudness; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
-#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
-#define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
-// names of properties that can be used to override the default DRC settings
-#define PROP_DRC_OVERRIDE_REF_LEVEL  "aac_drc_reference_level"
-#define PROP_DRC_OVERRIDE_CUT        "aac_drc_cut"
-#define PROP_DRC_OVERRIDE_BOOST      "aac_drc_boost"
-#define PROP_DRC_OVERRIDE_HEAVY      "aac_drc_heavy"
-#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
-#define PROP_DRC_OVERRIDE_EFFECT     "ro.aac_drc_effect_type"
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-static const OMX_U32 kSupportedProfiles[] = {
-    OMX_AUDIO_AACObjectLC,
-    OMX_AUDIO_AACObjectHE,
-    OMX_AUDIO_AACObjectHE_PS,
-    OMX_AUDIO_AACObjectLD,
-    OMX_AUDIO_AACObjectELD,
-    OMX_AUDIO_AACObjectER_Scalable,
-    OMX_AUDIO_AACObjectXHE,
-};
-
-SoftAAC2::SoftAAC2(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mAACDecoder(NULL),
-      mStreamInfo(NULL),
-      mIsADTS(false),
-      mInputBufferCount(0),
-      mOutputBufferCount(0),
-      mSignalledError(false),
-      mLastInHeader(NULL),
-      mOutputPortSettingsChange(NONE) {
-    initPorts();
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftAAC2::~SoftAAC2() {
-    aacDecoder_Close(mAACDecoder);
-    delete[] mOutputDelayRingBuffer;
-}
-
-void SoftAAC2::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumInputBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/aac");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumOutputBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 4096 * MAX_CHANNEL_COUNT;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-status_t SoftAAC2::initDecoder() {
-    ALOGV("initDecoder()");
-    status_t status = UNKNOWN_ERROR;
-    mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
-    if (mAACDecoder != NULL) {
-        mStreamInfo = aacDecoder_GetStreamInfo(mAACDecoder);
-        if (mStreamInfo != NULL) {
-            status = OK;
-        }
-    }
-
-    mEndOfInput = false;
-    mEndOfOutput = false;
-    mOutputDelayCompensated = 0;
-    mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
-    mOutputDelayRingBuffer = new int16_t[mOutputDelayRingBufferSize];
-    mOutputDelayRingBufferWritePos = 0;
-    mOutputDelayRingBufferReadPos = 0;
-    mOutputDelayRingBufferFilled = 0;
-
-    if (mAACDecoder == NULL) {
-        ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
-    }
-
-    //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
-
-    //init DRC wrapper
-    mDrcWrap.setDecoderHandle(mAACDecoder);
-    mDrcWrap.submitStreamData(mStreamInfo);
-
-    // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
-    // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
-    char value[PROPERTY_VALUE_MAX];
-    //  DRC_PRES_MODE_WRAP_DESIRED_TARGET
-    if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
-        unsigned refLevel = atoi(value);
-        ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d", refLevel,
-                DRC_DEFAULT_MOBILE_REF_LEVEL);
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, refLevel);
-    } else {
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, DRC_DEFAULT_MOBILE_REF_LEVEL);
-    }
-    //  DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
-    if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
-        unsigned cut = atoi(value);
-        ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", cut,
-                DRC_DEFAULT_MOBILE_DRC_CUT);
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, cut);
-    } else {
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
-    }
-    //  DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
-    if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
-        unsigned boost = atoi(value);
-        ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", boost,
-                DRC_DEFAULT_MOBILE_DRC_BOOST);
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, boost);
-    } else {
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
-    }
-    //  DRC_PRES_MODE_WRAP_DESIRED_HEAVY
-    if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
-        unsigned heavy = atoi(value);
-        ALOGV("AAC decoder using desried DRC heavy compression switch of %d instead of %d", heavy,
-                DRC_DEFAULT_MOBILE_DRC_HEAVY);
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, heavy);
-    } else {
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
-    }
-    // DRC_PRES_MODE_WRAP_ENCODER_TARGET
-    if (property_get(PROP_DRC_OVERRIDE_ENC_LEVEL, value, NULL)) {
-        unsigned encoderRefLevel = atoi(value);
-        ALOGV("AAC decoder using encoder-side DRC reference level of %d instead of %d",
-                encoderRefLevel, DRC_DEFAULT_MOBILE_ENC_LEVEL);
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, encoderRefLevel);
-    } else {
-        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
-    }
-    // AAC_UNIDRC_SET_EFFECT
-    int32_t effectType =
-            property_get_int32(PROP_DRC_OVERRIDE_EFFECT, DRC_DEFAULT_MOBILE_DRC_EFFECT);
-    if (effectType < -1 || effectType > 8) {
-        effectType = DRC_DEFAULT_MOBILE_DRC_EFFECT;
-    }
-    ALOGV("AAC decoder using MPEG-D DRC effect type %d (default=%d)",
-            effectType, DRC_DEFAULT_MOBILE_DRC_EFFECT);
-    aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_SET_EFFECT, effectType);
-    // AAC_UNIDRC_ALBUM_MODE
-    int32_t albumMode = DRC_DEFAULT_MOBILE_DRC_ALBUM;
-    ALOGV("AAC decoder using MPEG-D Album mode value %d (default=%d)", albumMode,
-            DRC_DEFAULT_MOBILE_DRC_ALBUM);
-    aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE, albumMode);
-
-    // By default, the decoder creates a 5.1 channel downmix signal.
-    // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
-    aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
-
-    mDrcCompressMode = DRC_DEFAULT_MOBILE_DRC_HEAVY;
-    mDrcTargetRefLevel = DRC_DEFAULT_MOBILE_REF_LEVEL;
-    mDrcEncTargetLevel = DRC_DEFAULT_MOBILE_ENC_LEVEL;
-    mDrcBoostFactor = DRC_DEFAULT_MOBILE_DRC_BOOST;
-    mDrcAttenuationFactor = DRC_DEFAULT_MOBILE_DRC_CUT;
-    mDrcEffectType = DRC_DEFAULT_MOBILE_DRC_EFFECT;
-    mDrcAlbumMode = DRC_DEFAULT_MOBILE_DRC_ALBUM;
-    mDrcOutputLoudness = DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS;
-
-    return status;
-}
-
-OMX_ERRORTYPE SoftAAC2::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch ((OMX_U32) index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingAAC : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAac:
-        {
-            OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
-                (OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            aacParams->nBitRate = 0;
-            aacParams->nAudioBandWidth = 0;
-            aacParams->nAACtools = 0;
-            aacParams->nAACERtools = 0;
-            aacParams->eAACProfile = OMX_AUDIO_AACObjectMain;
-
-            aacParams->eAACStreamFormat =
-                mIsADTS
-                    ? OMX_AUDIO_AACStreamFormatMP4ADTS
-                    : OMX_AUDIO_AACStreamFormatMP4FF;
-
-            aacParams->eChannelMode = OMX_AUDIO_ChannelModeStereo;
-
-            if (!isConfigured()) {
-                aacParams->nChannels = 1;
-                aacParams->nSampleRate = 44100;
-                aacParams->nFrameLength = 0;
-            } else {
-                aacParams->nChannels = mStreamInfo->numChannels;
-                aacParams->nSampleRate = mStreamInfo->sampleRate;
-                aacParams->nFrameLength = mStreamInfo->frameSize;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-            pcmParams->eChannelMapping[2] = OMX_AUDIO_ChannelCF;
-            pcmParams->eChannelMapping[3] = OMX_AUDIO_ChannelLFE;
-            pcmParams->eChannelMapping[4] = OMX_AUDIO_ChannelLS;
-            pcmParams->eChannelMapping[5] = OMX_AUDIO_ChannelRS;
-
-            if (!isConfigured()) {
-                pcmParams->nChannels = 1;
-                pcmParams->nSamplingRate = 44100;
-            } else {
-                pcmParams->nChannels = mStreamInfo->numChannels;
-                pcmParams->nSamplingRate = mStreamInfo->sampleRate;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioProfileQuerySupported:
-        {
-            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *profileParams =
-                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *)params;
-
-            if (!isValidOMXParam(profileParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (profileParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
-                return OMX_ErrorNoMore;
-            }
-
-            profileParams->eProfile =
-                kSupportedProfiles[profileParams->nProfileIndex];
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAndroidAacDrcPresentation:
-        {
-             OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
-                    (OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
-
-            ALOGD("get OMX_IndexParamAudioAndroidAacDrcPresentation");
-
-            if (!isValidOMXParam(aacPresParams)) {
-                return OMX_ErrorBadParameter;
-            }
-            aacPresParams->nDrcEffectType = mDrcEffectType;
-            aacPresParams->nDrcAlbumMode = mDrcAlbumMode;
-            aacPresParams->nDrcBoost =  mDrcBoostFactor;
-            aacPresParams->nDrcCut = mDrcAttenuationFactor;
-            aacPresParams->nHeavyCompression = mDrcCompressMode;
-            aacPresParams->nTargetReferenceLevel = mDrcTargetRefLevel;
-            aacPresParams->nEncodedTargetLevel = mDrcEncTargetLevel;
-            aacPresParams ->nDrcOutputLoudness = mDrcOutputLoudness;
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAAC2::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch ((int)index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.aac",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingAAC)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAac:
-        {
-            const OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
-                (const OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (aacParams->eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4FF) {
-                mIsADTS = false;
-            } else if (aacParams->eAACStreamFormat
-                        == OMX_AUDIO_AACStreamFormatMP4ADTS) {
-                mIsADTS = true;
-            } else {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAndroidAacDrcPresentation:
-        {
-            const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
-                    (const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
-
-            if (!isValidOMXParam(aacPresParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            // for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
-            // a value of -1 implies the parameter is not set by the application:
-            //   nMaxOutputChannels     -1 by default 
-            //   nDrcCut                uses default platform properties, see initDecoder()
-            //   nDrcBoost                idem
-            //   nHeavyCompression        idem
-            //   nTargetReferenceLevel    idem
-            //   nEncodedTargetLevel      idem
-            if (aacPresParams->nMaxOutputChannels >= 0) {
-                int max;
-                if (aacPresParams->nMaxOutputChannels >= 8) { max = 8; }
-                else if (aacPresParams->nMaxOutputChannels >= 6) { max = 6; }
-                else if (aacPresParams->nMaxOutputChannels >= 2) { max = 2; }
-                else {
-                    // -1 or 0: disable downmix,  1: mono
-                    max = aacPresParams->nMaxOutputChannels;
-                }
-                ALOGV("set nMaxOutputChannels=%d", max);
-                aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, max);
-            }
-            if (aacPresParams->nDrcEffectType >= -1) {
-                ALOGV("set nDrcEffectType=%d", aacPresParams->nDrcEffectType);
-                aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_SET_EFFECT, aacPresParams->nDrcEffectType);
-                mDrcEffectType = aacPresParams->nDrcEffectType;
-            }
-            if (aacPresParams->nDrcAlbumMode >= -1) {
-                ALOGV("set nDrcAlbumMode=%d", aacPresParams->nDrcAlbumMode);
-                aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE,
-                        aacPresParams->nDrcAlbumMode);
-                mDrcAlbumMode = aacPresParams->nDrcAlbumMode;
-            }
-            bool updateDrcWrapper = false;
-            if (aacPresParams->nDrcBoost >= 0) {
-                ALOGV("set nDrcBoost=%d", aacPresParams->nDrcBoost);
-                mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR,
-                        aacPresParams->nDrcBoost);
-                updateDrcWrapper = true;
-                mDrcBoostFactor = aacPresParams->nDrcBoost;
-            }
-            if (aacPresParams->nDrcCut >= 0) {
-                ALOGV("set nDrcCut=%d", aacPresParams->nDrcCut);
-                mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, aacPresParams->nDrcCut);
-                updateDrcWrapper = true;
-                mDrcAttenuationFactor = aacPresParams->nDrcCut;
-            }
-            if (aacPresParams->nHeavyCompression >= 0) {
-                ALOGV("set nHeavyCompression=%d", aacPresParams->nHeavyCompression);
-                mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY,
-                        aacPresParams->nHeavyCompression);
-                updateDrcWrapper = true;
-                mDrcCompressMode = aacPresParams->nHeavyCompression;
-            }
-            if (aacPresParams->nTargetReferenceLevel >= -1) {
-                ALOGV("set nTargetReferenceLevel=%d", aacPresParams->nTargetReferenceLevel);
-                mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET,
-                        aacPresParams->nTargetReferenceLevel);
-                updateDrcWrapper = true;
-                mDrcTargetRefLevel = aacPresParams->nTargetReferenceLevel;
-            }
-            if (aacPresParams->nEncodedTargetLevel >= 0) {
-                ALOGV("set nEncodedTargetLevel=%d", aacPresParams->nEncodedTargetLevel);
-                mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET,
-                        aacPresParams->nEncodedTargetLevel);
-                updateDrcWrapper = true;
-                mDrcEncTargetLevel = aacPresParams->nEncodedTargetLevel;
-            }
-            if (aacPresParams->nPCMLimiterEnable >= 0) {
-                aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE,
-                        (aacPresParams->nPCMLimiterEnable != 0));
-            }
-            if (aacPresParams ->nDrcOutputLoudness != DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS) {
-                mDrcOutputLoudness = aacPresParams ->nDrcOutputLoudness;
-            }
-            if (updateDrcWrapper) {
-                mDrcWrap.update();
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-bool SoftAAC2::isConfigured() const {
-    return mInputBufferCount > 0;
-}
-
-bool SoftAAC2::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
-    if (numSamples == 0) {
-        return true;
-    }
-    if (outputDelayRingBufferSpaceLeft() < numSamples) {
-        ALOGE("RING BUFFER WOULD OVERFLOW");
-        return false;
-    }
-    if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
-            && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
-                    || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
-        // faster memcopy loop without checks, if the preconditions allow this
-        for (int32_t i = 0; i < numSamples; i++) {
-            mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
-        }
-
-        if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
-            mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
-        }
-    } else {
-        ALOGV("slow SoftAAC2::outputDelayRingBufferPutSamples()");
-
-        for (int32_t i = 0; i < numSamples; i++) {
-            mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
-            mOutputDelayRingBufferWritePos++;
-            if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
-                mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
-            }
-        }
-    }
-    mOutputDelayRingBufferFilled += numSamples;
-    return true;
-}
-
-int32_t SoftAAC2::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
-
-    if (numSamples > mOutputDelayRingBufferFilled) {
-        ALOGE("RING BUFFER WOULD UNDERRUN");
-        return -1;
-    }
-
-    if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
-            && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
-                    || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
-        // faster memcopy loop without checks, if the preconditions allow this
-        if (samples != 0) {
-            for (int32_t i = 0; i < numSamples; i++) {
-                samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
-            }
-        } else {
-            mOutputDelayRingBufferReadPos += numSamples;
-        }
-        if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
-            mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
-        }
-    } else {
-        ALOGV("slow SoftAAC2::outputDelayRingBufferGetSamples()");
-
-        for (int32_t i = 0; i < numSamples; i++) {
-            if (samples != 0) {
-                samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
-            }
-            mOutputDelayRingBufferReadPos++;
-            if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
-                mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
-            }
-        }
-    }
-    mOutputDelayRingBufferFilled -= numSamples;
-    return numSamples;
-}
-
-int32_t SoftAAC2::outputDelayRingBufferSamplesAvailable() {
-    return mOutputDelayRingBufferFilled;
-}
-
-int32_t SoftAAC2::outputDelayRingBufferSpaceLeft() {
-    return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
-}
-
-
-void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
-    UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
-    UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
-        if (!inQueue.empty()) {
-            INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
-            BufferInfo *inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-            mEndOfInput = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
-
-            if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
-                ALOGE("first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
-                inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
-            }
-            if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
-                BufferInfo *inInfo = *inQueue.begin();
-                OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-                inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
-                inBufferLength[0] = inHeader->nFilledLen;
-
-                AAC_DECODER_ERROR decoderErr =
-                    aacDecoder_ConfigRaw(mAACDecoder,
-                                         inBuffer,
-                                         inBufferLength);
-
-                if (decoderErr != AAC_DEC_OK) {
-                    ALOGW("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
-                    return;
-                }
-
-                mInputBufferCount++;
-                mOutputBufferCount++; // fake increase of outputBufferCount to keep the counters aligned
-
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                mLastInHeader = NULL;
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-
-                // Only send out port settings changed event if both sample rate
-                // and numChannels are valid.
-                if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
-                    ALOGI("Initially configuring decoder: %d Hz, %d channels",
-                        mStreamInfo->sampleRate,
-                        mStreamInfo->numChannels);
-
-                    notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                    mOutputPortSettingsChange = AWAITING_DISABLED;
-                }
-                return;
-            }
-
-            if (inHeader->nFilledLen == 0) {
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                mLastInHeader = NULL;
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-                continue;
-            }
-
-            if (mIsADTS) {
-                size_t adtsHeaderSize = 0;
-                // skip 30 bits, aac_frame_length follows.
-                // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
-
-                const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
-
-                bool signalError = false;
-                if (inHeader->nFilledLen < 7) {
-                    ALOGE("Audio data too short to contain even the ADTS header. "
-                            "Got %d bytes.", inHeader->nFilledLen);
-                    hexdump(adtsHeader, inHeader->nFilledLen);
-                    signalError = true;
-                } else {
-                    bool protectionAbsent = (adtsHeader[1] & 1);
-
-                    unsigned aac_frame_length =
-                        ((adtsHeader[3] & 3) << 11)
-                        | (adtsHeader[4] << 3)
-                        | (adtsHeader[5] >> 5);
-
-                    if (inHeader->nFilledLen < aac_frame_length) {
-                        ALOGE("Not enough audio data for the complete frame. "
-                                "Got %d bytes, frame size according to the ADTS "
-                                "header is %u bytes.",
-                                inHeader->nFilledLen, aac_frame_length);
-                        hexdump(adtsHeader, inHeader->nFilledLen);
-                        signalError = true;
-                    } else {
-                        adtsHeaderSize = (protectionAbsent ? 7 : 9);
-                        if (aac_frame_length < adtsHeaderSize) {
-                            signalError = true;
-                        } else {
-                            inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
-                            inBufferLength[0] = aac_frame_length - adtsHeaderSize;
-
-                            inHeader->nOffset += adtsHeaderSize;
-                            inHeader->nFilledLen -= adtsHeaderSize;
-                        }
-                    }
-                }
-
-                if (signalError) {
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
-                    return;
-                }
-
-                // insert buffer size and time stamp
-                mBufferSizes.add(inBufferLength[0]);
-                if (mLastInHeader != inHeader) {
-                    mBufferTimestamps.add(inHeader->nTimeStamp);
-                    mLastInHeader = inHeader;
-                } else {
-                    int64_t currentTime = mBufferTimestamps.top();
-                    currentTime += mStreamInfo->aacSamplesPerFrame *
-                            1000000LL / mStreamInfo->aacSampleRate;
-                    mBufferTimestamps.add(currentTime);
-                }
-            } else {
-                inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
-                inBufferLength[0] = inHeader->nFilledLen;
-                mLastInHeader = inHeader;
-                mBufferTimestamps.add(inHeader->nTimeStamp);
-                mBufferSizes.add(inHeader->nFilledLen);
-            }
-
-            // Fill and decode
-            bytesValid[0] = inBufferLength[0];
-
-            INT prevSampleRate = mStreamInfo->sampleRate;
-            INT prevNumChannels = mStreamInfo->numChannels;
-
-            aacDecoder_Fill(mAACDecoder,
-                            inBuffer,
-                            inBufferLength,
-                            bytesValid);
-
-            // run DRC check
-            mDrcWrap.submitStreamData(mStreamInfo);
-            mDrcWrap.update();
-
-            UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
-            inHeader->nFilledLen -= inBufferUsedLength;
-            inHeader->nOffset += inBufferUsedLength;
-
-            AAC_DECODER_ERROR decoderErr;
-            int numLoops = 0;
-            do {
-                if (outputDelayRingBufferSpaceLeft() <
-                        (mStreamInfo->frameSize * mStreamInfo->numChannels)) {
-                    ALOGV("skipping decode: not enough space left in ringbuffer");
-                    break;
-                }
-
-                int numConsumed = mStreamInfo->numTotalBytes;
-                decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
-                                           tmpOutBuffer,
-                                           2048 * MAX_CHANNEL_COUNT,
-                                           0 /* flags */);
-
-                numConsumed = mStreamInfo->numTotalBytes - numConsumed;
-                numLoops++;
-
-                if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
-                    break;
-                }
-                mDecodedSizes.add(numConsumed);
-
-                if (decoderErr != AAC_DEC_OK) {
-                    ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
-                }
-
-                if (bytesValid[0] != 0) {
-                    ALOGE("bytesValid[0] != 0 should never happen");
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-
-                size_t numOutBytes =
-                    mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
-
-                if (decoderErr == AAC_DEC_OK) {
-                    if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
-                            mStreamInfo->frameSize * mStreamInfo->numChannels)) {
-                        mSignalledError = true;
-                        notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
-                        return;
-                    }
-                } else {
-                    ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
-
-                    memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
-
-                    if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
-                            mStreamInfo->frameSize * mStreamInfo->numChannels)) {
-                        mSignalledError = true;
-                        notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
-                        return;
-                    }
-
-                    // Discard input buffer.
-                    if (inHeader) {
-                        inHeader->nFilledLen = 0;
-                    }
-
-                    aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
-
-                    // After an error, replace the last entry in mBufferSizes with the sum of the
-                    // last <numLoops> entries from mDecodedSizes to resynchronize the in/out lists.
-                    mBufferSizes.pop();
-                    int n = 0;
-                    for (int i = 0; i < numLoops; i++) {
-                        n += mDecodedSizes.itemAt(mDecodedSizes.size() - numLoops + i);
-                    }
-                    mBufferSizes.add(n);
-
-                    // fall through
-                }
-
-                if ( mDrcOutputLoudness != mStreamInfo->outputLoudness) {
-                    ALOGD("update Loudness, before = %d, now = %d", mDrcOutputLoudness, mStreamInfo->outputLoudness);
-                    mDrcOutputLoudness = mStreamInfo->outputLoudness;
-                }
-
-                /*
-                 * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
-                 * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
-                 * rate system and the sampling rate in the final output is actually
-                 * doubled compared with the core AAC decoder sampling rate.
-                 *
-                 * Explicit signalling is done by explicitly defining SBR audio object
-                 * type in the bitstream. Implicit signalling is done by embedding
-                 * SBR content in AAC extension payload specific to SBR, and hence
-                 * requires an AAC decoder to perform pre-checks on actual audio frames.
-                 *
-                 * Thus, we could not say for sure whether a stream is
-                 * AAC+/eAAC+ until the first data frame is decoded.
-                 */
-                if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
-                    if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) {
-                        ALOGW("Invalid AAC stream");
-                        mSignalledError = true;
-                        notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
-                        return;
-                    }
-                } else if ((mStreamInfo->sampleRate != prevSampleRate) ||
-                           (mStreamInfo->numChannels != prevNumChannels)) {
-                    ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
-                          prevSampleRate, mStreamInfo->sampleRate,
-                          prevNumChannels, mStreamInfo->numChannels);
-
-                    notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                    mOutputPortSettingsChange = AWAITING_DISABLED;
-
-                    if (inHeader && inHeader->nFilledLen == 0) {
-                        inInfo->mOwnedByUs = false;
-                        mInputBufferCount++;
-                        inQueue.erase(inQueue.begin());
-                        mLastInHeader = NULL;
-                        inInfo = NULL;
-                        notifyEmptyBufferDone(inHeader);
-                        inHeader = NULL;
-                    }
-                    return;
-                }
-                if (inHeader && inHeader->nFilledLen == 0) {
-                    inInfo->mOwnedByUs = false;
-                    mInputBufferCount++;
-                    inQueue.erase(inQueue.begin());
-                    mLastInHeader = NULL;
-                    inInfo = NULL;
-                    notifyEmptyBufferDone(inHeader);
-                    inHeader = NULL;
-                } else {
-                    ALOGV("inHeader->nFilledLen = %d", inHeader ? inHeader->nFilledLen : 0);
-                }
-            } while (decoderErr == AAC_DEC_OK);
-        }
-
-        int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
-
-        if (!mEndOfInput && mOutputDelayCompensated < outputDelay) {
-            // discard outputDelay at the beginning
-            int32_t toCompensate = outputDelay - mOutputDelayCompensated;
-            int32_t discard = outputDelayRingBufferSamplesAvailable();
-            if (discard > toCompensate) {
-                discard = toCompensate;
-            }
-            int32_t discarded = outputDelayRingBufferGetSamples(0, discard);
-            mOutputDelayCompensated += discarded;
-            continue;
-        }
-
-        if (mEndOfInput) {
-            while (mOutputDelayCompensated > 0) {
-                // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
-                INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
-
-                // run DRC check
-                mDrcWrap.submitStreamData(mStreamInfo);
-                mDrcWrap.update();
-
-                AAC_DECODER_ERROR decoderErr =
-                    aacDecoder_DecodeFrame(mAACDecoder,
-                                           tmpOutBuffer,
-                                           2048 * MAX_CHANNEL_COUNT,
-                                           AACDEC_FLUSH);
-                if (decoderErr != AAC_DEC_OK) {
-                    ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
-                }
-
-                int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
-                if (tmpOutBufferSamples > mOutputDelayCompensated) {
-                    tmpOutBufferSamples = mOutputDelayCompensated;
-                }
-                outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
-                mOutputDelayCompensated -= tmpOutBufferSamples;
-            }
-        }
-
-        while (!outQueue.empty()
-                && outputDelayRingBufferSamplesAvailable()
-                        >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
-            BufferInfo *outInfo = *outQueue.begin();
-            OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-            if (outHeader->nOffset != 0) {
-                ALOGE("outHeader->nOffset != 0 is not handled");
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-
-            INT_PCM *outBuffer =
-                    reinterpret_cast<INT_PCM *>(outHeader->pBuffer + outHeader->nOffset);
-            int samplesize = mStreamInfo->numChannels * sizeof(int16_t);
-            if (outHeader->nOffset
-                    + mStreamInfo->frameSize * samplesize
-                    > outHeader->nAllocLen) {
-                ALOGE("buffer overflow");
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-
-            }
-
-            int available = outputDelayRingBufferSamplesAvailable();
-            int numSamples = outHeader->nAllocLen / sizeof(int16_t);
-            if (numSamples > available) {
-                numSamples = available;
-            }
-            int64_t currentTime = 0;
-            if (available) {
-
-                int numFrames = numSamples / (mStreamInfo->frameSize * mStreamInfo->numChannels);
-                numSamples = numFrames * (mStreamInfo->frameSize * mStreamInfo->numChannels);
-
-                ALOGV("%d samples available (%d), or %d frames",
-                        numSamples, available, numFrames);
-                int64_t *nextTimeStamp = &mBufferTimestamps.editItemAt(0);
-                currentTime = *nextTimeStamp;
-                int32_t *currentBufLeft = &mBufferSizes.editItemAt(0);
-                for (int i = 0; i < numFrames; i++) {
-                    int32_t decodedSize = mDecodedSizes.itemAt(0);
-                    mDecodedSizes.removeAt(0);
-                    ALOGV("decoded %d of %d", decodedSize, *currentBufLeft);
-                    if (*currentBufLeft > decodedSize) {
-                        // adjust/interpolate next time stamp
-                        *currentBufLeft -= decodedSize;
-                        *nextTimeStamp += mStreamInfo->aacSamplesPerFrame *
-                                1000000LL / mStreamInfo->aacSampleRate;
-                        ALOGV("adjusted nextTimeStamp/size to %lld/%d",
-                                (long long) *nextTimeStamp, *currentBufLeft);
-                    } else {
-                        // move to next timestamp in list
-                        if (mBufferTimestamps.size() > 0) {
-                            mBufferTimestamps.removeAt(0);
-                            nextTimeStamp = &mBufferTimestamps.editItemAt(0);
-                            mBufferSizes.removeAt(0);
-                            currentBufLeft = &mBufferSizes.editItemAt(0);
-                            ALOGV("moved to next time/size: %lld/%d",
-                                    (long long) *nextTimeStamp, *currentBufLeft);
-                        }
-                        // try to limit output buffer size to match input buffers
-                        // (e.g when an input buffer contained 4 "sub" frames, output
-                        // at most 4 decoded units in the corresponding output buffer)
-                        // This is optional. Remove the next three lines to fill the output
-                        // buffer with as many units as available.
-                        numFrames = i + 1;
-                        numSamples = numFrames * mStreamInfo->frameSize * mStreamInfo->numChannels;
-                        break;
-                    }
-                }
-
-                ALOGV("getting %d from ringbuffer", numSamples);
-                int32_t ns = outputDelayRingBufferGetSamples(outBuffer, numSamples);
-                if (ns != numSamples) {
-                    ALOGE("not a complete frame of samples available");
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-            }
-
-            outHeader->nFilledLen = numSamples * sizeof(int16_t);
-
-            if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
-                outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                mEndOfOutput = true;
-            } else {
-                outHeader->nFlags = 0;
-            }
-
-            outHeader->nTimeStamp = currentTime;
-
-            mOutputBufferCount++;
-            outInfo->mOwnedByUs = false;
-            outQueue.erase(outQueue.begin());
-            outInfo = NULL;
-            ALOGV("out timestamp %lld / %d", outHeader->nTimeStamp, outHeader->nFilledLen);
-            notifyFillBufferDone(outHeader);
-            outHeader = NULL;
-        }
-
-        if (mEndOfInput) {
-            int ringBufAvail = outputDelayRingBufferSamplesAvailable();
-            if (!outQueue.empty()
-                    && ringBufAvail < mStreamInfo->frameSize * mStreamInfo->numChannels) {
-                if (!mEndOfOutput) {
-                    // send partial or empty block signaling EOS
-                    mEndOfOutput = true;
-                    BufferInfo *outInfo = *outQueue.begin();
-                    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-                    INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(outHeader->pBuffer
-                            + outHeader->nOffset);
-                    int32_t ns = outputDelayRingBufferGetSamples(outBuffer, ringBufAvail);
-                    if (ns < 0) {
-                        ns = 0;
-                    }
-                    outHeader->nFilledLen = ns;
-                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-                    outHeader->nTimeStamp = mBufferTimestamps.itemAt(0);
-                    mBufferTimestamps.clear();
-                    mBufferSizes.clear();
-                    mDecodedSizes.clear();
-
-                    mOutputBufferCount++;
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    outInfo = NULL;
-                    notifyFillBufferDone(outHeader);
-                    outHeader = NULL;
-                }
-                break; // if outQueue not empty but no more output
-            }
-        }
-    }
-}
-
-void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0) {
-        // Make sure that the next buffer output does not still
-        // depend on fragments from the last one decoded.
-        // drain all existing data
-        drainDecoder();
-        mBufferTimestamps.clear();
-        mBufferSizes.clear();
-        mDecodedSizes.clear();
-        mLastInHeader = NULL;
-        mEndOfInput = false;
-    } else {
-        int avail;
-        while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) {
-            if (avail > mStreamInfo->frameSize * mStreamInfo->numChannels) {
-                avail = mStreamInfo->frameSize * mStreamInfo->numChannels;
-            }
-            int32_t ns = outputDelayRingBufferGetSamples(0, avail);
-            if (ns != avail) {
-                ALOGW("not a complete frame of samples available");
-                break;
-            }
-            mOutputBufferCount++;
-        }
-        mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
-        mEndOfOutput = false;
-    }
-}
-
-void SoftAAC2::drainDecoder() {
-    // flush decoder until outputDelay is compensated
-    while (mOutputDelayCompensated > 0) {
-        // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
-        INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
-
-        // run DRC check
-        mDrcWrap.submitStreamData(mStreamInfo);
-        mDrcWrap.update();
-
-        AAC_DECODER_ERROR decoderErr =
-            aacDecoder_DecodeFrame(mAACDecoder,
-                                   tmpOutBuffer,
-                                   2048 * MAX_CHANNEL_COUNT,
-                                   AACDEC_FLUSH);
-        if (decoderErr != AAC_DEC_OK) {
-            ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
-        }
-
-        int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
-        if (tmpOutBufferSamples > mOutputDelayCompensated) {
-            tmpOutBufferSamples = mOutputDelayCompensated;
-        }
-        outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
-
-        mOutputDelayCompensated -= tmpOutBufferSamples;
-    }
-}
-
-void SoftAAC2::onReset() {
-    drainDecoder();
-    // reset the "configured" state
-    mInputBufferCount = 0;
-    mOutputBufferCount = 0;
-    mOutputDelayCompensated = 0;
-    mOutputDelayRingBufferWritePos = 0;
-    mOutputDelayRingBufferReadPos = 0;
-    mOutputDelayRingBufferFilled = 0;
-    mEndOfInput = false;
-    mEndOfOutput = false;
-    mBufferTimestamps.clear();
-    mBufferSizes.clear();
-    mDecodedSizes.clear();
-    mLastInHeader = NULL;
-
-    // To make the codec behave the same before and after a reset, we need to invalidate the
-    // streaminfo struct. This does that:
-    mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
-
-    mSignalledError = false;
-    mOutputPortSettingsChange = NONE;
-}
-
-void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED:
-        {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            break;
-        }
-
-        default:
-        {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftAAC2(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
deleted file mode 100644
index 9f98aa1..0000000
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AAC_2_H_
-#define SOFT_AAC_2_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-#include "aacdecoder_lib.h"
-#include "DrcPresModeWrap.h"
-
-namespace android {
-
-struct SoftAAC2 : public SimpleSoftOMXComponent {
-    SoftAAC2(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAAC2();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumInputBuffers        = 4,
-        kNumOutputBuffers       = 4,
-        kNumDelayBlocksMax      = 8,
-    };
-
-    HANDLE_AACDECODER mAACDecoder;
-    CStreamInfo *mStreamInfo;
-    bool mIsADTS;
-    bool mIsFirst;
-    size_t mInputBufferCount;
-    size_t mOutputBufferCount;
-    bool mSignalledError;
-    OMX_BUFFERHEADERTYPE *mLastInHeader;
-    Vector<int32_t> mBufferSizes;
-    Vector<int32_t> mDecodedSizes;
-    Vector<int64_t> mBufferTimestamps;
-
-    CDrcPresModeWrapper mDrcWrap;
-
-    enum {
-        NONE,
-        AWAITING_DISABLED,
-        AWAITING_ENABLED
-    } mOutputPortSettingsChange;
-
-    void initPorts();
-    status_t initDecoder();
-    bool isConfigured() const;
-    void drainDecoder();
-
-//      delay compensation
-    bool mEndOfInput;
-    bool mEndOfOutput;
-    int32_t mOutputDelayCompensated;
-    int32_t mOutputDelayRingBufferSize;
-    int16_t *mOutputDelayRingBuffer;
-    int32_t mOutputDelayRingBufferWritePos;
-    int32_t mOutputDelayRingBufferReadPos;
-    int32_t mOutputDelayRingBufferFilled;
-
-    //drc
-    int32_t mDrcCompressMode;
-    int32_t mDrcTargetRefLevel;
-    int32_t mDrcEncTargetLevel;
-    int32_t mDrcBoostFactor;
-    int32_t mDrcAttenuationFactor;
-    int32_t mDrcEffectType;
-    int32_t mDrcAlbumMode;
-    int32_t mDrcOutputLoudness;
-
-    bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
-    int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
-    int32_t outputDelayRingBufferSamplesAvailable();
-    int32_t outputDelayRingBufferSpaceLeft();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2);
-};
-
-}  // namespace android
-
-#endif  // SOFT_AAC_2_H_
diff --git a/media/libstagefright/codecs/aacdec/exports.lds b/media/libstagefright/codecs/aacdec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/aacdec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/aacenc/Android.bp b/media/libstagefright/codecs/aacenc/Android.bp
deleted file mode 100644
index 793125f..0000000
--- a/media/libstagefright/codecs/aacenc/Android.bp
+++ /dev/null
@@ -1,37 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_aacenc_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_aacenc_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_aacenc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftAACEncoder2.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: ["libFraunhoferAAC"],
-}
diff --git a/media/libstagefright/codecs/aacenc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/aacenc/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/aacenc/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/aacenc/NOTICE b/media/libstagefright/codecs/aacenc/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/aacenc/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
deleted file mode 100644
index 90421b9..0000000
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
+++ /dev/null
@@ -1,740 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAACEncoder2"
-#include <log/log.h>
-#include <utils/Log.h>
-
-#include "SoftAACEncoder2.h"
-#include <OMX_AudioExt.h>
-#include <OMX_IndexExt.h>
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <utils/misc.h>
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-static const OMX_U32 kSupportedProfiles[] = {
-    OMX_AUDIO_AACObjectLC,
-    OMX_AUDIO_AACObjectHE,
-    OMX_AUDIO_AACObjectHE_PS,
-    OMX_AUDIO_AACObjectLD,
-    OMX_AUDIO_AACObjectELD,
-};
-
-SoftAACEncoder2::SoftAACEncoder2(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mAACEncoder(NULL),
-      mNumChannels(1),
-      mSampleRate(44100),
-      mBitRate(0),
-      mSBRMode(-1),
-      mSBRRatio(0),
-      mAACProfile(OMX_AUDIO_AACObjectLC),
-      mSentCodecSpecificData(false),
-      mInputSize(0),
-      mInputFrame(NULL),
-      mAllocatedFrameSize(0),
-      mInputTimeUs(-1LL),
-      mSawInputEOS(false),
-      mSignalledError(false) {
-    initPorts();
-    CHECK_EQ(initEncoder(), (status_t)OK);
-    setAudioParams();
-}
-
-SoftAACEncoder2::~SoftAACEncoder2() {
-    aacEncClose(&mAACEncoder);
-
-    onReset();
-}
-
-void SoftAACEncoder2::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kNumSamplesPerFrame * sizeof(int16_t) * 2;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/aac");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
-
-    addPort(def);
-}
-
-status_t SoftAACEncoder2::initEncoder() {
-    if (AACENC_OK != aacEncOpen(&mAACEncoder, 0, 0)) {
-        ALOGE("Failed to init AAC encoder");
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-OMX_ERRORTYPE SoftAACEncoder2::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch ((OMX_U32) index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingPCM : OMX_AUDIO_CodingAAC;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAac:
-        {
-            OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
-                (OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            aacParams->nBitRate = mBitRate;
-            aacParams->nAudioBandWidth = 0;
-            aacParams->nAACtools = 0;
-            aacParams->nAACERtools = 0;
-            aacParams->eAACProfile = (OMX_AUDIO_AACPROFILETYPE) mAACProfile;
-            aacParams->eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
-            aacParams->eChannelMode = OMX_AUDIO_ChannelModeStereo;
-
-            aacParams->nChannels = mNumChannels;
-            aacParams->nSampleRate = mSampleRate;
-            aacParams->nFrameLength = 0;
-
-            switch (mSBRMode) {
-            case 1: // sbr on
-                switch (mSBRRatio) {
-                case 0:
-                    // set both OMX AAC tool flags
-                    aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
-                    aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
-                    break;
-                case 1:
-                    // set single-rate SBR active
-                    aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
-                    aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
-                    break;
-                case 2:
-                    // set dual-rate SBR active
-                    aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
-                    aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
-                    break;
-                default:
-                    ALOGE("invalid SBR ratio %d", mSBRRatio);
-                    TRESPASS();
-                }
-                break;
-            case 0:  // sbr off
-            case -1: // sbr undefined
-                aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
-                aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
-                break;
-            default:
-                ALOGE("invalid SBR mode %d", mSBRMode);
-                TRESPASS();
-            }
-
-
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            pcmParams->nChannels = mNumChannels;
-            pcmParams->nSamplingRate = mSampleRate;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioProfileQuerySupported:
-        {
-            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *profileParams =
-                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *)params;
-
-            if (!isValidOMXParam(profileParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (profileParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
-                return OMX_ErrorNoMore;
-            }
-
-            profileParams->eProfile =
-                kSupportedProfiles[profileParams->nProfileIndex];
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAACEncoder2::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_encoder.aac",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingAAC)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAac:
-        {
-            OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
-                (OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            mBitRate = aacParams->nBitRate;
-            mNumChannels = aacParams->nChannels;
-            mSampleRate = aacParams->nSampleRate;
-            if (aacParams->eAACProfile != OMX_AUDIO_AACObjectNull) {
-                mAACProfile = aacParams->eAACProfile;
-            }
-
-            if (!(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
-                    && !(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
-                mSBRMode = 0;
-                mSBRRatio = 0;
-            } else if ((aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
-                    && !(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
-                mSBRMode = 1;
-                mSBRRatio = 1;
-            } else if (!(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
-                    && (aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
-                mSBRMode = 1;
-                mSBRRatio = 2;
-            } else {
-                mSBRMode = -1; // codec default sbr mode
-                mSBRRatio = 0;
-            }
-
-            if (setAudioParams() != OK) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            mNumChannels = pcmParams->nChannels;
-            mSampleRate = pcmParams->nSamplingRate;
-            if (setAudioParams() != OK) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-static CHANNEL_MODE getChannelMode(OMX_U32 nChannels) {
-    CHANNEL_MODE chMode = MODE_INVALID;
-    switch (nChannels) {
-        case 1: chMode = MODE_1; break;
-        case 2: chMode = MODE_2; break;
-        case 3: chMode = MODE_1_2; break;
-        case 4: chMode = MODE_1_2_1; break;
-        case 5: chMode = MODE_1_2_2; break;
-        case 6: chMode = MODE_1_2_2_1; break;
-        default: chMode = MODE_INVALID;
-    }
-    return chMode;
-}
-
-static AUDIO_OBJECT_TYPE getAOTFromProfile(OMX_U32 profile) {
-    if (profile == OMX_AUDIO_AACObjectLC) {
-        return AOT_AAC_LC;
-    } else if (profile == OMX_AUDIO_AACObjectHE) {
-        return AOT_SBR;
-    } else if (profile == OMX_AUDIO_AACObjectHE_PS) {
-        return AOT_PS;
-    } else if (profile == OMX_AUDIO_AACObjectLD) {
-        return AOT_ER_AAC_LD;
-    } else if (profile == OMX_AUDIO_AACObjectELD) {
-        return AOT_ER_AAC_ELD;
-    } else {
-        ALOGW("Unsupported AAC profile - defaulting to AAC-LC");
-        return AOT_AAC_LC;
-    }
-}
-
-status_t SoftAACEncoder2::setAudioParams() {
-    // We call this whenever sample rate, number of channels, bitrate or SBR mode change
-    // in reponse to setParameter calls.
-
-    ALOGV("setAudioParams: %u Hz, %u channels, %u bps, %i sbr mode, %i sbr ratio",
-         mSampleRate, mNumChannels, mBitRate, mSBRMode, mSBRRatio);
-
-    if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_AOT,
-            getAOTFromProfile(mAACProfile))) {
-        ALOGE("Failed to set AAC encoder parameters");
-        return UNKNOWN_ERROR;
-    }
-
-    if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SAMPLERATE, mSampleRate)) {
-        ALOGE("Failed to set AAC encoder parameters");
-        return UNKNOWN_ERROR;
-    }
-    if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_BITRATE, mBitRate)) {
-        ALOGE("Failed to set AAC encoder parameters");
-        return UNKNOWN_ERROR;
-    }
-    if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_CHANNELMODE,
-            getChannelMode(mNumChannels))) {
-        ALOGE("Failed to set AAC encoder parameters");
-        return UNKNOWN_ERROR;
-    }
-    if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_TRANSMUX, TT_MP4_RAW)) {
-        ALOGE("Failed to set AAC encoder parameters");
-        return UNKNOWN_ERROR;
-    }
-
-    if (mSBRMode != -1 && mAACProfile == OMX_AUDIO_AACObjectELD) {
-        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, mSBRMode)) {
-            ALOGE("Failed to set AAC encoder parameters");
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    /* SBR ratio parameter configurations:
-       0: Default configuration wherein SBR ratio is configured depending on audio object type by
-          the FDK.
-       1: Downsampled SBR (default for ELD)
-       2: Dualrate SBR (default for HE-AAC)
-     */
-    if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_RATIO, mSBRRatio)) {
-        ALOGE("Failed to set AAC encoder parameters");
-        return UNKNOWN_ERROR;
-    }
-
-    return OK;
-}
-
-void SoftAACEncoder2::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    if (!mSentCodecSpecificData) {
-        // The very first thing we want to output is the codec specific
-        // data. It does not require any input data but we will need an
-        // output buffer to store it in.
-
-        if (outQueue.empty()) {
-            return;
-        }
-
-        if (AACENC_OK != aacEncEncode(mAACEncoder, NULL, NULL, NULL, NULL)) {
-            ALOGE("Unable to initialize encoder for profile / sample-rate / bit-rate / channels");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        OMX_U32 actualBitRate  = aacEncoder_GetParam(mAACEncoder, AACENC_BITRATE);
-        if (mBitRate != actualBitRate) {
-            ALOGW("Requested bitrate %u unsupported, using %u", mBitRate, actualBitRate);
-        }
-
-        AACENC_InfoStruct encInfo;
-        if (AACENC_OK != aacEncInfo(mAACEncoder, &encInfo)) {
-            ALOGE("Failed to get AAC encoder info");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        if (outHeader->nOffset + encInfo.confSize > outHeader->nAllocLen) {
-            ALOGE("b/34617444");
-            android_errorWriteLog(0x534e4554,"34617444");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        outHeader->nFilledLen = encInfo.confSize;
-        outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
-
-        uint8_t *out = outHeader->pBuffer + outHeader->nOffset;
-        memcpy(out, encInfo.confBuf, encInfo.confSize);
-
-        outQueue.erase(outQueue.begin());
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-
-        mSentCodecSpecificData = true;
-    }
-
-    size_t numBytesPerInputFrame =
-        mNumChannels * kNumSamplesPerFrame * sizeof(int16_t);
-
-    // Limit input size so we only get one ELD frame
-    if (mAACProfile == OMX_AUDIO_AACObjectELD && numBytesPerInputFrame > 512) {
-        numBytesPerInputFrame = 512;
-    }
-
-    for (;;) {
-        // We do the following until we run out of buffers.
-
-        while (mInputSize < numBytesPerInputFrame) {
-            // As long as there's still input data to be read we
-            // will drain "kNumSamplesPerFrame * mNumChannels" samples
-            // into the "mInputFrame" buffer and then encode those
-            // as a unit into an output buffer.
-
-            if (mSawInputEOS || inQueue.empty()) {
-                return;
-            }
-
-            BufferInfo *inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-            const void *inData = inHeader->pBuffer + inHeader->nOffset;
-
-            size_t copy = numBytesPerInputFrame - mInputSize;
-            if (copy > inHeader->nFilledLen) {
-                copy = inHeader->nFilledLen;
-            }
-
-            if (mInputFrame == NULL) {
-                mInputFrame = new int16_t[numBytesPerInputFrame / sizeof(int16_t)];
-                mAllocatedFrameSize = numBytesPerInputFrame;
-            } else if (mAllocatedFrameSize != numBytesPerInputFrame) {
-                ALOGE("b/34621073: changed size from %d to %d",
-                        (int)mAllocatedFrameSize, (int)numBytesPerInputFrame);
-                android_errorWriteLog(0x534e4554,"34621073");
-                delete mInputFrame;
-                mInputFrame = new int16_t[numBytesPerInputFrame / sizeof(int16_t)];
-                mAllocatedFrameSize = numBytesPerInputFrame;
-
-            }
-
-            if (mInputSize == 0) {
-                mInputTimeUs = inHeader->nTimeStamp;
-            }
-
-            memcpy((uint8_t *)mInputFrame + mInputSize, inData, copy);
-            mInputSize += copy;
-
-            inHeader->nOffset += copy;
-            inHeader->nFilledLen -= copy;
-
-            // "Time" on the input buffer has in effect advanced by the
-            // number of audio frames we just advanced nOffset by.
-            inHeader->nTimeStamp +=
-                (copy * 1000000LL / mSampleRate)
-                    / (mNumChannels * sizeof(int16_t));
-
-            if (inHeader->nFilledLen == 0) {
-                if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                    mSawInputEOS = true;
-
-                    // Pad any remaining data with zeroes.
-                    memset((uint8_t *)mInputFrame + mInputSize,
-                           0,
-                           numBytesPerInputFrame - mInputSize);
-
-                    mInputSize = numBytesPerInputFrame;
-                }
-
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-
-                inData = NULL;
-                inHeader = NULL;
-                inInfo = NULL;
-            }
-        }
-
-        // At this  point we have all the input data necessary to encode
-        // a single frame, all we need is an output buffer to store the result
-        // in.
-
-        if (outQueue.empty()) {
-            return;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        uint8_t *outPtr = (uint8_t *)outHeader->pBuffer + outHeader->nOffset;
-        size_t outAvailable = outHeader->nAllocLen - outHeader->nOffset;
-
-        AACENC_InArgs inargs;
-        AACENC_OutArgs outargs;
-        memset(&inargs, 0, sizeof(inargs));
-        memset(&outargs, 0, sizeof(outargs));
-        inargs.numInSamples = numBytesPerInputFrame / sizeof(int16_t);
-
-        void* inBuffer[]        = { (unsigned char *)mInputFrame };
-        INT   inBufferIds[]     = { IN_AUDIO_DATA };
-        INT   inBufferSize[]    = { (INT)numBytesPerInputFrame };
-        INT   inBufferElSize[]  = { sizeof(int16_t) };
-
-        AACENC_BufDesc inBufDesc;
-        inBufDesc.numBufs           = sizeof(inBuffer) / sizeof(void*);
-        inBufDesc.bufs              = (void**)&inBuffer;
-        inBufDesc.bufferIdentifiers = inBufferIds;
-        inBufDesc.bufSizes          = inBufferSize;
-        inBufDesc.bufElSizes        = inBufferElSize;
-
-        void* outBuffer[]       = { outPtr };
-        INT   outBufferIds[]    = { OUT_BITSTREAM_DATA };
-        INT   outBufferSize[]   = { 0 };
-        INT   outBufferElSize[] = { sizeof(UCHAR) };
-
-        AACENC_BufDesc outBufDesc;
-        outBufDesc.numBufs           = sizeof(outBuffer) / sizeof(void*);
-        outBufDesc.bufs              = (void**)&outBuffer;
-        outBufDesc.bufferIdentifiers = outBufferIds;
-        outBufDesc.bufSizes          = outBufferSize;
-        outBufDesc.bufElSizes        = outBufferElSize;
-
-        // Encode the mInputFrame, which is treated as a modulo buffer
-        AACENC_ERROR encoderErr = AACENC_OK;
-        size_t nOutputBytes = 0;
-
-        do {
-            memset(&outargs, 0, sizeof(outargs));
-
-            outBuffer[0] = outPtr;
-            outBufferSize[0] = outAvailable - nOutputBytes;
-
-            encoderErr = aacEncEncode(mAACEncoder,
-                                      &inBufDesc,
-                                      &outBufDesc,
-                                      &inargs,
-                                      &outargs);
-
-            if (encoderErr == AACENC_OK) {
-                outPtr += outargs.numOutBytes;
-                nOutputBytes += outargs.numOutBytes;
-
-                if (outargs.numInSamples > 0) {
-                    int numRemainingSamples = inargs.numInSamples - outargs.numInSamples;
-                    if (numRemainingSamples > 0) {
-                        memmove(mInputFrame,
-                                &mInputFrame[outargs.numInSamples],
-                                sizeof(int16_t) * numRemainingSamples);
-                    }
-                    inargs.numInSamples -= outargs.numInSamples;
-                }
-            }
-        } while (encoderErr == AACENC_OK && inargs.numInSamples > 0);
-
-        outHeader->nFilledLen = nOutputBytes;
-
-        outHeader->nFlags = OMX_BUFFERFLAG_ENDOFFRAME;
-
-        if (mSawInputEOS) {
-            // We also tag this output buffer with EOS if it corresponds
-            // to the final input buffer.
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-        }
-
-        outHeader->nTimeStamp = mInputTimeUs;
-
-#if 0
-        ALOGI("sending %d bytes of data (time = %lld us, flags = 0x%08lx)",
-              nOutputBytes, mInputTimeUs, outHeader->nFlags);
-
-        hexdump(outHeader->pBuffer + outHeader->nOffset, outHeader->nFilledLen);
-#endif
-
-        outQueue.erase(outQueue.begin());
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-
-        outHeader = NULL;
-        outInfo = NULL;
-
-        mInputSize = 0;
-    }
-}
-
-void SoftAACEncoder2::onReset() {
-    delete[] mInputFrame;
-    mInputFrame = NULL;
-    mInputSize = 0;
-    mAllocatedFrameSize = 0;
-
-    mSentCodecSpecificData = false;
-    mInputTimeUs = -1LL;
-    mSawInputEOS = false;
-    mSignalledError = false;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftAACEncoder2(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
deleted file mode 100644
index 681dcf2..0000000
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AAC_ENCODER_2_H_
-
-#define SOFT_AAC_ENCODER_2_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-#include "aacenc_lib.h"
-
-namespace android {
-
-struct SoftAACEncoder2 : public SimpleSoftOMXComponent {
-    SoftAACEncoder2(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAACEncoder2();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers             = 4,
-        kNumSamplesPerFrame     = 1024
-    };
-
-    HANDLE_AACENCODER mAACEncoder;
-
-    OMX_U32 mNumChannels;
-    OMX_U32 mSampleRate;
-    OMX_U32 mBitRate;
-    OMX_S32 mSBRMode;
-    OMX_S32 mSBRRatio;
-    OMX_U32 mAACProfile;
-
-    bool mSentCodecSpecificData;
-    size_t mInputSize;
-    int16_t *mInputFrame;
-    size_t mAllocatedFrameSize;
-    int64_t mInputTimeUs;
-
-    bool mSawInputEOS;
-
-    bool mSignalledError;
-
-    void initPorts();
-    status_t initEncoder();
-
-    status_t setAudioParams();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAACEncoder2);
-};
-
-}  // namespace android
-
-#endif  // SOFT_AAC_ENCODER_2_H_
diff --git a/media/libstagefright/codecs/aacenc/exports.lds b/media/libstagefright/codecs/aacenc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/aacenc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/aacenc/patent_disclaimer.txt b/media/libstagefright/codecs/aacenc/patent_disclaimer.txt
deleted file mode 100644
index b4bf11d..0000000
--- a/media/libstagefright/codecs/aacenc/patent_disclaimer.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-
-THIS IS NOT A GRANT OF PATENT RIGHTS.
-
-Google makes no representation or warranty that the codecs for which
-source code is made available hereunder are unencumbered by
-third-party patents.  Those intending to use this source code in
-hardware or software products are advised that implementations of
-these codecs, including in open source software or shareware, may
-require patent licenses from the relevant patent holders.
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.bp b/media/libstagefright/codecs/amrnb/dec/Android.bp
deleted file mode 100644
index 2c0954d..0000000
--- a/media/libstagefright/codecs/amrnb/dec/Android.bp
+++ /dev/null
@@ -1,39 +0,0 @@
-//###############################################################################
-
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_amrdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftAMR.cpp"],
-
-    cflags: [
-        "-DOSCL_IMPORT_REF=",
-    ],
-
-    version_script: "exports.lds",
-
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //    ],
-    //},
-    //LOCAL_SANITIZE := signed-integer-overflow
-
-    static_libs: [
-        "libstagefright_amrnbdec",
-        "libstagefright_amrwbdec",
-    ],
-
-    shared_libs: [
-        "libstagefright_amrnb_common",
-    ],
-}
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
deleted file mode 100644
index 01da3f8..0000000
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
+++ /dev/null
@@ -1,585 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAMR"
-#include <utils/Log.h>
-
-#include "SoftAMR.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftAMR::SoftAMR(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mMode(MODE_NARROW),
-      mState(NULL),
-      mDecoderBuf(NULL),
-      mDecoderCookie(NULL),
-      mInputBufferCount(0),
-      mAnchorTimeUs(0),
-      mNumSamplesOutput(0),
-      mSignalledError(false),
-      mOutputPortSettingsChange(NONE) {
-    if (!strcmp(name, "OMX.google.amrwb.decoder")) {
-        mMode = MODE_WIDE;
-    } else {
-        CHECK(!strcmp(name, "OMX.google.amrnb.decoder"));
-    }
-
-    initPorts();
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftAMR::~SoftAMR() {
-    if (mMode == MODE_NARROW) {
-        GSMDecodeFrameExit(&mState);
-        mState = NULL;
-    } else {
-        free(mDecoderBuf);
-        mDecoderBuf = NULL;
-
-        mState = NULL;
-        mDecoderCookie = NULL;
-    }
-}
-
-void SoftAMR::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType =
-        mMode == MODE_NARROW
-            ? const_cast<char *>("audio/amr")
-            : const_cast<char *>("audio/amrwb");
-
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingAMR;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-
-    def.nBufferSize =
-        (mMode == MODE_NARROW ? kNumSamplesPerFrameNB : kNumSamplesPerFrameWB)
-            * sizeof(int16_t);
-
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-status_t SoftAMR::initDecoder() {
-    if (mMode == MODE_NARROW) {
-        Word16 err = GSMInitDecode(&mState, (Word8 *)"AMRNBDecoder");
-
-        if (err != 0) {
-            return UNKNOWN_ERROR;
-        }
-    } else {
-        int32_t memReq = pvDecoder_AmrWbMemRequirements();
-        mDecoderBuf = malloc(memReq);
-
-        pvDecoder_AmrWb_Init(&mState, mDecoderBuf, &mDecoderCookie);
-    }
-
-    return OK;
-}
-
-OMX_ERRORTYPE SoftAMR::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingAMR : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAmr:
-        {
-            OMX_AUDIO_PARAM_AMRTYPE *amrParams =
-                (OMX_AUDIO_PARAM_AMRTYPE *)params;
-
-            if (!isValidOMXParam(amrParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (amrParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            amrParams->nChannels = 1;
-            amrParams->eAMRDTXMode = OMX_AUDIO_AMRDTXModeOff;
-            amrParams->eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
-
-            if (!isConfigured()) {
-                amrParams->nBitRate = 0;
-                amrParams->eAMRBandMode = OMX_AUDIO_AMRBandModeUnused;
-            } else {
-                amrParams->nBitRate = 0;
-                amrParams->eAMRBandMode =
-                    mMode == MODE_NARROW
-                        ? OMX_AUDIO_AMRBandModeNB0 : OMX_AUDIO_AMRBandModeWB0;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->nChannels = 1;
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-
-            pcmParams->nSamplingRate =
-                (mMode == MODE_NARROW) ? kSampleRateNB : kSampleRateWB;
-
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAMR::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (mMode == MODE_NARROW) {
-                if (strncmp((const char *)roleParams->cRole,
-                            "audio_decoder.amrnb",
-                            OMX_MAX_STRINGNAME_SIZE - 1)) {
-                    return OMX_ErrorUndefined;
-                }
-            } else {
-                if (strncmp((const char *)roleParams->cRole,
-                            "audio_decoder.amrwb",
-                            OMX_MAX_STRINGNAME_SIZE - 1)) {
-                    return OMX_ErrorUndefined;
-                }
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingAMR)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAmr:
-        {
-            const OMX_AUDIO_PARAM_AMRTYPE *aacParams =
-                (const OMX_AUDIO_PARAM_AMRTYPE *)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-bool SoftAMR::isConfigured() const {
-    return mInputBufferCount > 0;
-}
-
-static size_t getFrameSize(unsigned FT) {
-    static const size_t kFrameSizeWB[10] = {
-        132, 177, 253, 285, 317, 365, 397, 461, 477, 40
-    };
-
-    if (FT >= 10) {
-        return 1;
-    }
-
-    size_t frameSize = kFrameSizeWB[FT];
-
-    // Round up bits to bytes and add 1 for the header byte.
-    frameSize = (frameSize + 7) / 8 + 1;
-
-    return frameSize;
-}
-
-void SoftAMR::onQueueFilled(OMX_U32 /* portIndex */) {
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-
-            outHeader->nFilledLen = 0;
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
-        }
-
-        if (inHeader->nFilledLen == 0) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            notifyEmptyBufferDone(inHeader);
-            continue;
-        }
-
-        if (inHeader->nOffset == 0) {
-            mAnchorTimeUs = inHeader->nTimeStamp;
-            mNumSamplesOutput = 0;
-        }
-
-        const uint8_t *inputPtr = inHeader->pBuffer + inHeader->nOffset;
-        int32_t numBytesRead;
-
-        if (mMode == MODE_NARROW) {
-            if (outHeader->nAllocLen < kNumSamplesPerFrameNB * sizeof(int16_t)) {
-                ALOGE("b/27662364: NB expected output buffer %zu bytes vs %u",
-                       kNumSamplesPerFrameNB * sizeof(int16_t), outHeader->nAllocLen);
-                android_errorWriteLog(0x534e4554, "27662364");
-                notify(OMX_EventError, OMX_ErrorOverflow, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            int16 mode = ((inputPtr[0] >> 3) & 0x0f);
-            // for WMF since MIME_IETF is used when calling AMRDecode.
-            size_t frameSize = WmfDecBytesPerFrame[mode] + 1;
-
-            if (inHeader->nFilledLen < frameSize) {
-                ALOGE("b/27662364: expected %zu bytes vs %u", frameSize, inHeader->nFilledLen);
-                notify(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            numBytesRead =
-                AMRDecode(mState,
-                  (Frame_Type_3GPP)((inputPtr[0] >> 3) & 0x0f),
-                  (UWord8 *)&inputPtr[1],
-                  reinterpret_cast<int16_t *>(outHeader->pBuffer),
-                  MIME_IETF);
-
-            if (numBytesRead == -1) {
-                ALOGE("PV AMR decoder AMRDecode() call failed");
-
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-
-                return;
-            }
-
-            ++numBytesRead;  // Include the frame type header byte.
-
-            if (static_cast<size_t>(numBytesRead) > inHeader->nFilledLen) {
-                // This is bad, should never have happened, but did. Abort now.
-
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-
-                return;
-            }
-        } else {
-            if (outHeader->nAllocLen < kNumSamplesPerFrameWB * sizeof(int16_t)) {
-                ALOGE("b/27662364: WB expected output buffer %zu bytes vs %u",
-                       kNumSamplesPerFrameWB * sizeof(int16_t), outHeader->nAllocLen);
-                android_errorWriteLog(0x534e4554, "27662364");
-                notify(OMX_EventError, OMX_ErrorOverflow, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            int16 mode = ((inputPtr[0] >> 3) & 0x0f);
-
-            if (mode >= 10 && mode <= 13) {
-                ALOGE("encountered illegal frame type %d in AMR WB content.",
-                      mode);
-
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-
-                return;
-            }
-
-            size_t frameSize = getFrameSize(mode);
-            if (inHeader->nFilledLen < frameSize) {
-                ALOGE("b/27662364: expected %zu bytes vs %u", frameSize, inHeader->nFilledLen);
-                notify(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            int16_t *outPtr = (int16_t *)outHeader->pBuffer;
-
-            if (mode >= 9) {
-                // Produce silence instead of comfort noise and for
-                // speech lost/no data.
-                memset(outPtr, 0, kNumSamplesPerFrameWB * sizeof(int16_t));
-            } else if (mode < 9) {
-                int16 frameType;
-                mime_unsorting(
-                        const_cast<uint8_t *>(&inputPtr[1]),
-                        mInputSampleBuffer,
-                        &frameType, &mode, 1, &mRxState);
-
-                int16_t numSamplesOutput;
-                pvDecoder_AmrWb(
-                        mode, mInputSampleBuffer,
-                        outPtr,
-                        &numSamplesOutput,
-                        mDecoderBuf, frameType, mDecoderCookie);
-
-                CHECK_EQ((int)numSamplesOutput, (int)kNumSamplesPerFrameWB);
-
-                for (int i = 0; i < kNumSamplesPerFrameWB; ++i) {
-                    /* Delete the 2 LSBs (14-bit output) */
-                    outPtr[i] &= 0xfffC;
-                }
-            }
-
-            numBytesRead = frameSize;
-        }
-
-        inHeader->nOffset += numBytesRead;
-        inHeader->nFilledLen -= numBytesRead;
-
-        outHeader->nFlags = 0;
-        outHeader->nOffset = 0;
-
-        if (mMode == MODE_NARROW) {
-            outHeader->nFilledLen = kNumSamplesPerFrameNB * sizeof(int16_t);
-
-            outHeader->nTimeStamp =
-                mAnchorTimeUs
-                    + (mNumSamplesOutput * 1000000LL) / kSampleRateNB;
-
-            mNumSamplesOutput += kNumSamplesPerFrameNB;
-        } else {
-            outHeader->nFilledLen = kNumSamplesPerFrameWB * sizeof(int16_t);
-
-            outHeader->nTimeStamp =
-                mAnchorTimeUs
-                    + (mNumSamplesOutput * 1000000LL) / kSampleRateWB;
-
-            mNumSamplesOutput += kNumSamplesPerFrameWB;
-        }
-
-        if (inHeader->nFilledLen == 0 && (inHeader->nFlags & OMX_BUFFERFLAG_EOS) == 0) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        outInfo = NULL;
-        notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-
-        ++mInputBufferCount;
-    }
-}
-
-void SoftAMR::onPortFlushCompleted(OMX_U32 portIndex) {
-    ALOGV("onPortFlushCompleted portindex %d, resetting frame ", portIndex);
-    if (portIndex == 0) {
-        if (mMode == MODE_NARROW) {
-           Speech_Decode_Frame_reset(mState);
-        } else {
-           pvDecoder_AmrWb_Reset(mState, 0 /* reset_all */);
-        }
-    }
-}
-
-void SoftAMR::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED:
-        {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            break;
-        }
-
-        default:
-        {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-void SoftAMR::onReset() {
-    mSignalledError = false;
-    mOutputPortSettingsChange = NONE;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftAMR(name, callbacks, appData, component);
-}
-
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
deleted file mode 100644
index d5aaed3..0000000
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AMR_H_
-
-#define SOFT_AMR_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-#include "gsmamr_dec.h"
-#include "pvamrwbdecoder.h"
-
-namespace android {
-
-struct SoftAMR : public SimpleSoftOMXComponent {
-    SoftAMR(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAMR();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers             = 4,
-        kSampleRateNB           = 8000,
-        kSampleRateWB           = 16000,
-        kNumSamplesPerFrameNB   = 160,
-        kNumSamplesPerFrameWB   = 320,
-    };
-
-    enum {
-        MODE_NARROW,
-        MODE_WIDE
-
-    } mMode;
-
-    void *mState;
-    void *mDecoderBuf;
-    int16_t *mDecoderCookie;
-    RX_State_wb mRxState{};
-
-    size_t mInputBufferCount;
-    int64_t mAnchorTimeUs;
-    int64_t mNumSamplesOutput;
-
-    bool mSignalledError;
-
-    enum {
-        NONE,
-        AWAITING_DISABLED,
-        AWAITING_ENABLED
-    } mOutputPortSettingsChange;
-
-    int16_t mInputSampleBuffer[477];
-
-    void initPorts();
-    status_t initDecoder();
-    bool isConfigured() const;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAMR);
-};
-
-}  // namespace android
-
-#endif  // SOFT_AMR_H_
-
diff --git a/media/libstagefright/codecs/amrnb/dec/exports.lds b/media/libstagefright/codecs/amrnb/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/amrnb/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.bp b/media/libstagefright/codecs/amrnb/enc/Android.bp
deleted file mode 100644
index 6bf2d39..0000000
--- a/media/libstagefright/codecs/amrnb/enc/Android.bp
+++ /dev/null
@@ -1,31 +0,0 @@
-
-//###############################################################################
-
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_amrnbenc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftAMRNBEncoder.cpp"],
-
-    //addressing b/25409744
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //    ],
-    //},
-
-    static_libs: ["libstagefright_amrnbenc"],
-
-    shared_libs: [
-        "libstagefright_amrnb_common",
-    ],
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
deleted file mode 100644
index a1f6686..0000000
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
+++ /dev/null
@@ -1,429 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAMRNBEncoder"
-#include <utils/Log.h>
-
-#include "SoftAMRNBEncoder.h"
-
-#include "gsmamr_enc.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-
-namespace android {
-
-static const int32_t kSampleRate = 8000;
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftAMRNBEncoder::SoftAMRNBEncoder(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mEncState(NULL),
-      mSidState(NULL),
-      mBitRate(0),
-      mMode(MR475),
-      mInputSize(0),
-      mInputTimeUs(-1LL),
-      mSawInputEOS(false),
-      mSignalledError(false) {
-    initPorts();
-    CHECK_EQ(initEncoder(), (status_t)OK);
-}
-
-SoftAMRNBEncoder::~SoftAMRNBEncoder() {
-    if (mEncState != NULL) {
-        AMREncodeExit(&mEncState, &mSidState);
-        mEncState = mSidState = NULL;
-    }
-}
-
-void SoftAMRNBEncoder::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kNumSamplesPerFrame * sizeof(int16_t);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/3gpp");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingAMR;
-
-    addPort(def);
-}
-
-status_t SoftAMRNBEncoder::initEncoder() {
-    if (AMREncodeInit(&mEncState, &mSidState, false /* dtx_enable */) != 0) {
-        return UNKNOWN_ERROR;
-    }
-
-    return OK;
-}
-
-OMX_ERRORTYPE SoftAMRNBEncoder::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingPCM : OMX_AUDIO_CodingAMR;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAmr:
-        {
-            OMX_AUDIO_PARAM_AMRTYPE *amrParams =
-                (OMX_AUDIO_PARAM_AMRTYPE *)params;
-
-            if (!isValidOMXParam(amrParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (amrParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            amrParams->nChannels = 1;
-            amrParams->nBitRate = mBitRate;
-            amrParams->eAMRBandMode = (OMX_AUDIO_AMRBANDMODETYPE)(mMode + 1);
-            amrParams->eAMRDTXMode = OMX_AUDIO_AMRDTXModeOff;
-            amrParams->eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelCF;
-
-            pcmParams->nChannels = 1;
-            pcmParams->nSamplingRate = kSampleRate;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAMRNBEncoder::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_encoder.amrnb",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingAMR)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAmr:
-        {
-            OMX_AUDIO_PARAM_AMRTYPE *amrParams =
-                (OMX_AUDIO_PARAM_AMRTYPE *)params;
-
-            if (!isValidOMXParam(amrParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (amrParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (amrParams->nChannels != 1
-                    || amrParams->eAMRDTXMode != OMX_AUDIO_AMRDTXModeOff
-                    || amrParams->eAMRFrameFormat
-                            != OMX_AUDIO_AMRFrameFormatFSF
-                    || amrParams->eAMRBandMode < OMX_AUDIO_AMRBandModeNB0
-                    || amrParams->eAMRBandMode > OMX_AUDIO_AMRBandModeNB7) {
-                return OMX_ErrorUndefined;
-            }
-
-            mBitRate = amrParams->nBitRate;
-            mMode = amrParams->eAMRBandMode - 1;
-
-            amrParams->eAMRDTXMode = OMX_AUDIO_AMRDTXModeOff;
-            amrParams->eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (pcmParams->nChannels != 1
-                    || pcmParams->nSamplingRate != (OMX_U32)kSampleRate) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftAMRNBEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    size_t numBytesPerInputFrame = kNumSamplesPerFrame * sizeof(int16_t);
-
-    for (;;) {
-        // We do the following until we run out of buffers.
-
-        while (mInputSize < numBytesPerInputFrame) {
-            // As long as there's still input data to be read we
-            // will drain "kNumSamplesPerFrame" samples
-            // into the "mInputFrame" buffer and then encode those
-            // as a unit into an output buffer.
-
-            if (mSawInputEOS || inQueue.empty()) {
-                return;
-            }
-
-            BufferInfo *inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-            const void *inData = inHeader->pBuffer + inHeader->nOffset;
-
-            size_t copy = numBytesPerInputFrame - mInputSize;
-            if (copy > inHeader->nFilledLen) {
-                copy = inHeader->nFilledLen;
-            }
-
-            if (mInputSize == 0) {
-                mInputTimeUs = inHeader->nTimeStamp;
-            }
-
-            memcpy((uint8_t *)mInputFrame + mInputSize, inData, copy);
-            mInputSize += copy;
-
-            inHeader->nOffset += copy;
-            inHeader->nFilledLen -= copy;
-
-            // "Time" on the input buffer has in effect advanced by the
-            // number of audio frames we just advanced nOffset by.
-            inHeader->nTimeStamp +=
-                (copy * 1000000LL / kSampleRate) / sizeof(int16_t);
-
-            if (inHeader->nFilledLen == 0) {
-                if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                    ALOGV("saw input EOS");
-                    mSawInputEOS = true;
-
-                    // Pad any remaining data with zeroes.
-                    memset((uint8_t *)mInputFrame + mInputSize,
-                           0,
-                           numBytesPerInputFrame - mInputSize);
-
-                    mInputSize = numBytesPerInputFrame;
-                }
-
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-
-                inData = NULL;
-                inHeader = NULL;
-                inInfo = NULL;
-            }
-        }
-
-        // At this  point we have all the input data necessary to encode
-        // a single frame, all we need is an output buffer to store the result
-        // in.
-
-        if (outQueue.empty()) {
-            return;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        uint8_t *outPtr = outHeader->pBuffer + outHeader->nOffset;
-        size_t outAvailable = outHeader->nAllocLen - outHeader->nOffset;
-
-        Frame_Type_3GPP frameType;
-        int res = AMREncode(
-                mEncState, mSidState, (Mode)mMode,
-                mInputFrame, outPtr, &frameType, AMR_TX_WMF);
-
-        CHECK_GE(res, 0);
-        CHECK_LE((size_t)res, outAvailable);
-
-        // Convert header byte from WMF to IETF format.
-        outPtr[0] = ((outPtr[0] << 3) | 4) & 0x7c;
-
-        outHeader->nFilledLen = res;
-        outHeader->nFlags = OMX_BUFFERFLAG_ENDOFFRAME;
-
-        if (mSawInputEOS) {
-            // We also tag this output buffer with EOS if it corresponds
-            // to the final input buffer.
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-        }
-
-        outHeader->nTimeStamp = mInputTimeUs;
-
-#if 0
-        ALOGI("sending %d bytes of data (time = %lld us, flags = 0x%08lx)",
-              nOutputBytes, mInputTimeUs, outHeader->nFlags);
-
-        hexdump(outHeader->pBuffer + outHeader->nOffset, outHeader->nFilledLen);
-#endif
-
-        outQueue.erase(outQueue.begin());
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-
-        outHeader = NULL;
-        outInfo = NULL;
-
-        mInputSize = 0;
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftAMRNBEncoder(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
deleted file mode 100644
index c73e4dd..0000000
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AMRNB_ENCODER_H_
-
-#define SOFT_AMRNB_ENCODER_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-namespace android {
-
-struct SoftAMRNBEncoder : public SimpleSoftOMXComponent {
-    SoftAMRNBEncoder(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAMRNBEncoder();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-private:
-    enum {
-        kNumBuffers             = 4,
-        kNumSamplesPerFrame     = 160,
-    };
-
-    void *mEncState;
-    void *mSidState;
-
-    OMX_U32 mBitRate;
-    int mMode;
-
-    size_t mInputSize;
-    int16_t mInputFrame[kNumSamplesPerFrame];
-    int64_t mInputTimeUs;
-
-    bool mSawInputEOS;
-    bool mSignalledError;
-
-    void initPorts();
-    status_t initEncoder();
-
-    status_t setAudioParams();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAMRNBEncoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_AMRNB_ENCODER_H_
diff --git a/media/libstagefright/codecs/amrnb/enc/exports.lds b/media/libstagefright/codecs/amrnb/enc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/amrnb/enc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
deleted file mode 100644
index 00e7bc9..0000000
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ /dev/null
@@ -1,31 +0,0 @@
-
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_amrwbenc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftAMRWBEncoder.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: ["libstagefright_amrwbenc"],
-
-    shared_libs: [
-        "libstagefright_enc_common",
-    ],
-}
diff --git a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
deleted file mode 100644
index 657a5ce..0000000
--- a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
+++ /dev/null
@@ -1,484 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAMRWBEncoder"
-#include <utils/Log.h>
-
-#include "SoftAMRWBEncoder.h"
-
-#include "cmnMemory.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-
-namespace android {
-
-static const int32_t kSampleRate = 16000;
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftAMRWBEncoder::SoftAMRWBEncoder(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mEncoderHandle(NULL),
-      mApiHandle(NULL),
-      mMemOperator(NULL),
-      mBitRate(0),
-      mMode(VOAMRWB_MD66),
-      mInputSize(0),
-      mInputTimeUs(-1LL),
-      mSawInputEOS(false),
-      mSignalledError(false) {
-    initPorts();
-    CHECK_EQ(initEncoder(), (status_t)OK);
-}
-
-SoftAMRWBEncoder::~SoftAMRWBEncoder() {
-    if (mEncoderHandle != NULL) {
-        CHECK_EQ((VO_U32)VO_ERR_NONE, mApiHandle->Uninit(mEncoderHandle));
-        mEncoderHandle = NULL;
-    }
-
-    delete mApiHandle;
-    mApiHandle = NULL;
-
-    delete mMemOperator;
-    mMemOperator = NULL;
-}
-
-void SoftAMRWBEncoder::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kNumSamplesPerFrame * sizeof(int16_t);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/amr-wb");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingAMR;
-
-    addPort(def);
-}
-
-status_t SoftAMRWBEncoder::initEncoder() {
-    mApiHandle = new VO_AUDIO_CODECAPI;
-
-    if (VO_ERR_NONE != voGetAMRWBEncAPI(mApiHandle)) {
-        ALOGE("Failed to get api handle");
-        return UNKNOWN_ERROR;
-    }
-
-    mMemOperator = new VO_MEM_OPERATOR;
-    mMemOperator->Alloc = cmnMemAlloc;
-    mMemOperator->Copy = cmnMemCopy;
-    mMemOperator->Free = cmnMemFree;
-    mMemOperator->Set = cmnMemSet;
-    mMemOperator->Check = cmnMemCheck;
-
-    VO_CODEC_INIT_USERDATA userData;
-    memset(&userData, 0, sizeof(userData));
-    userData.memflag = VO_IMF_USERMEMOPERATOR;
-    userData.memData = (VO_PTR) mMemOperator;
-
-    if (VO_ERR_NONE != mApiHandle->Init(
-                &mEncoderHandle, VO_AUDIO_CodingAMRWB, &userData)) {
-        ALOGE("Failed to init AMRWB encoder");
-        return UNKNOWN_ERROR;
-    }
-
-    VOAMRWBFRAMETYPE type = VOAMRWB_RFC3267;
-    if (VO_ERR_NONE != mApiHandle->SetParam(
-                mEncoderHandle, VO_PID_AMRWB_FRAMETYPE, &type)) {
-        ALOGE("Failed to set AMRWB encoder frame type to %d", type);
-        return UNKNOWN_ERROR;
-    }
-
-    return OK;
-}
-
-OMX_ERRORTYPE SoftAMRWBEncoder::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingPCM : OMX_AUDIO_CodingAMR;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAmr:
-        {
-            OMX_AUDIO_PARAM_AMRTYPE *amrParams =
-                (OMX_AUDIO_PARAM_AMRTYPE *)params;
-
-            if (!isValidOMXParam(amrParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (amrParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            amrParams->nChannels = 1;
-            amrParams->nBitRate = mBitRate;
-
-            amrParams->eAMRBandMode =
-                (OMX_AUDIO_AMRBANDMODETYPE)(mMode + OMX_AUDIO_AMRBandModeWB0);
-
-            amrParams->eAMRDTXMode = OMX_AUDIO_AMRDTXModeOff;
-            amrParams->eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelCF;
-
-            pcmParams->nChannels = 1;
-            pcmParams->nSamplingRate = kSampleRate;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAMRWBEncoder::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_encoder.amrwb",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingAMR)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAmr:
-        {
-            OMX_AUDIO_PARAM_AMRTYPE *amrParams =
-                (OMX_AUDIO_PARAM_AMRTYPE *)params;
-
-            if (!isValidOMXParam(amrParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (amrParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (amrParams->nChannels != 1
-                    || amrParams->eAMRDTXMode != OMX_AUDIO_AMRDTXModeOff
-                    || amrParams->eAMRFrameFormat
-                            != OMX_AUDIO_AMRFrameFormatFSF
-                    || amrParams->eAMRBandMode < OMX_AUDIO_AMRBandModeWB0
-                    || amrParams->eAMRBandMode > OMX_AUDIO_AMRBandModeWB8) {
-                return OMX_ErrorUndefined;
-            }
-
-            mBitRate = amrParams->nBitRate;
-
-            mMode = (VOAMRWBMODE)(
-                    amrParams->eAMRBandMode - OMX_AUDIO_AMRBandModeWB0);
-
-            amrParams->eAMRDTXMode = OMX_AUDIO_AMRDTXModeOff;
-            amrParams->eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
-
-            if (VO_ERR_NONE !=
-                    mApiHandle->SetParam(
-                        mEncoderHandle, VO_PID_AMRWB_MODE,  &mMode)) {
-                ALOGE("Failed to set AMRWB encoder mode to %d", mMode);
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (pcmParams->nChannels != 1
-                    || pcmParams->nSamplingRate != (OMX_U32)kSampleRate) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftAMRWBEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    size_t numBytesPerInputFrame = kNumSamplesPerFrame * sizeof(int16_t);
-
-    for (;;) {
-        // We do the following until we run out of buffers.
-
-        while (mInputSize < numBytesPerInputFrame) {
-            // As long as there's still input data to be read we
-            // will drain "kNumSamplesPerFrame" samples
-            // into the "mInputFrame" buffer and then encode those
-            // as a unit into an output buffer.
-
-            if (mSawInputEOS || inQueue.empty()) {
-                return;
-            }
-
-            BufferInfo *inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-            const void *inData = inHeader->pBuffer + inHeader->nOffset;
-
-            size_t copy = numBytesPerInputFrame - mInputSize;
-            if (copy > inHeader->nFilledLen) {
-                copy = inHeader->nFilledLen;
-            }
-
-            if (mInputSize == 0) {
-                mInputTimeUs = inHeader->nTimeStamp;
-            }
-
-            memcpy((uint8_t *)mInputFrame + mInputSize, inData, copy);
-            mInputSize += copy;
-
-            inHeader->nOffset += copy;
-            inHeader->nFilledLen -= copy;
-
-            // "Time" on the input buffer has in effect advanced by the
-            // number of audio frames we just advanced nOffset by.
-            inHeader->nTimeStamp +=
-                (copy * 1000000LL / kSampleRate) / sizeof(int16_t);
-
-            if (inHeader->nFilledLen == 0) {
-                if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                    ALOGV("saw input EOS");
-                    mSawInputEOS = true;
-
-                    // Pad any remaining data with zeroes.
-                    memset((uint8_t *)mInputFrame + mInputSize,
-                           0,
-                           numBytesPerInputFrame - mInputSize);
-
-                    mInputSize = numBytesPerInputFrame;
-                }
-
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-
-                inData = NULL;
-                inHeader = NULL;
-                inInfo = NULL;
-            }
-        }
-
-        // At this  point we have all the input data necessary to encode
-        // a single frame, all we need is an output buffer to store the result
-        // in.
-
-        if (outQueue.empty()) {
-            return;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        uint8_t *outPtr = outHeader->pBuffer + outHeader->nOffset;
-        size_t outAvailable = outHeader->nAllocLen - outHeader->nOffset;
-
-        VO_CODECBUFFER inputData;
-        memset(&inputData, 0, sizeof(inputData));
-        inputData.Buffer = (unsigned char *) mInputFrame;
-        inputData.Length = mInputSize;
-
-        CHECK_EQ((VO_U32)VO_ERR_NONE,
-                 mApiHandle->SetInputData(mEncoderHandle, &inputData));
-
-        VO_CODECBUFFER outputData;
-        memset(&outputData, 0, sizeof(outputData));
-        VO_AUDIO_OUTPUTINFO outputInfo;
-        memset(&outputInfo, 0, sizeof(outputInfo));
-
-        outputData.Buffer = outPtr;
-        outputData.Length = outAvailable;
-        VO_U32 ret = mApiHandle->GetOutputData(
-                mEncoderHandle, &outputData, &outputInfo);
-        CHECK(ret == VO_ERR_NONE || ret == VO_ERR_INPUT_BUFFER_SMALL);
-
-        outHeader->nFilledLen = outputData.Length;
-        outHeader->nFlags = OMX_BUFFERFLAG_ENDOFFRAME;
-
-        if (mSawInputEOS) {
-            // We also tag this output buffer with EOS if it corresponds
-            // to the final input buffer.
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-        }
-
-        outHeader->nTimeStamp = mInputTimeUs;
-
-#if 0
-        ALOGI("sending %ld bytes of data (time = %lld us, flags = 0x%08lx)",
-              outHeader->nFilledLen, mInputTimeUs, outHeader->nFlags);
-
-        hexdump(outHeader->pBuffer + outHeader->nOffset, outHeader->nFilledLen);
-#endif
-
-        outQueue.erase(outQueue.begin());
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-
-        outHeader = NULL;
-        outInfo = NULL;
-
-        mInputSize = 0;
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftAMRWBEncoder(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
deleted file mode 100644
index 8950a8c..0000000
--- a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AMRWB_ENCODER_H_
-
-#define SOFT_AMRWB_ENCODER_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-#include "voAMRWB.h"
-
-struct VO_AUDIO_CODECAPI;
-struct VO_MEM_OPERATOR;
-
-namespace android {
-
-struct SoftAMRWBEncoder : public SimpleSoftOMXComponent {
-    SoftAMRWBEncoder(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAMRWBEncoder();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-private:
-    enum {
-        kNumBuffers             = 4,
-        kNumSamplesPerFrame     = 320,
-    };
-
-    void *mEncoderHandle;
-    VO_AUDIO_CODECAPI *mApiHandle;
-    VO_MEM_OPERATOR *mMemOperator;
-
-    OMX_U32 mBitRate;
-    VOAMRWBMODE mMode;
-
-    size_t mInputSize;
-    int16_t mInputFrame[kNumSamplesPerFrame];
-    int64_t mInputTimeUs;
-
-    bool mSawInputEOS;
-    bool mSignalledError;
-
-    void initPorts();
-    status_t initEncoder();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAMRWBEncoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_AMRWB_ENCODER_H_
diff --git a/media/libstagefright/codecs/amrwbenc/exports.lds b/media/libstagefright/codecs/amrwbenc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/amrwbenc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
deleted file mode 100644
index 1c2f9be..0000000
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ /dev/null
@@ -1,34 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_avcdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    static_libs: ["libavcdec"],
-    srcs: ["SoftAVCDec.cpp"],
-
-    cflags: [
-        "-Wall",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-        config: {
-            cfi_assembly_support: true,
-        },
-    },
-
-    ldflags: ["-Wl,-Bsymbolic"],
-}
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
deleted file mode 100644
index 3891f23..0000000
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ /dev/null
@@ -1,732 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAVCDec"
-#include <utils/Log.h>
-
-#include "ih264_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
-#include "ih264d.h"
-#include "SoftAVCDec.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <OMX_VideoExt.h>
-#include <inttypes.h>
-
-namespace android {
-
-#define componentName                   "video_decoder.avc"
-#define codingType                      OMX_VIDEO_CodingAVC
-#define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_AVC
-
-/** Function and structure definitions to keep code similar for each codec */
-#define ivdec_api_function              ih264d_api_function
-#define ivdext_create_ip_t              ih264d_create_ip_t
-#define ivdext_create_op_t              ih264d_create_op_t
-#define ivdext_delete_ip_t              ih264d_delete_ip_t
-#define ivdext_delete_op_t              ih264d_delete_op_t
-#define ivdext_ctl_set_num_cores_ip_t   ih264d_ctl_set_num_cores_ip_t
-#define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
-
-#define IVDEXT_CMD_CTL_SET_NUM_CORES    \
-        (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
-
-static const CodecProfileLevel kProfileLevels[] = {
-    { OMX_VIDEO_AVCProfileConstrainedBaseline, OMX_VIDEO_AVCLevel52 },
-
-    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
-
-    { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
-
-    { OMX_VIDEO_AVCProfileConstrainedHigh,     OMX_VIDEO_AVCLevel52 },
-
-    { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
-};
-
-SoftAVC::SoftAVC(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVideoDecoderOMXComponent(
-            name, componentName, codingType,
-            kProfileLevels, ARRAY_SIZE(kProfileLevels),
-            320 /* width */, 240 /* height */, callbacks,
-            appData, component),
-      mCodecCtx(NULL),
-      mFlushOutBuffer(NULL),
-      mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
-      mIvColorFormat(IV_YUV_420P),
-      mChangingResolution(false),
-      mSignalledError(false),
-      mStride(mWidth),
-      mInputOffset(0){
-    initPorts(
-            1 /* numMinInputBuffers */, kNumBuffers, INPUT_BUF_SIZE,
-            1 /* numMinOutputBuffers */, kNumBuffers, CODEC_MIME_TYPE);
-
-    mTimeStart = mTimeEnd = systemTime();
-
-    // If input dump is enabled, then open create an empty file
-    GENERATE_FILE_NAMES();
-    CREATE_DUMP_FILE(mInFile);
-}
-
-SoftAVC::~SoftAVC() {
-    CHECK_EQ(deInitDecoder(), (status_t)OK);
-}
-
-static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
-    UNUSED(ctxt);
-    return memalign(alignment, size);
-}
-
-static void ivd_aligned_free(void *ctxt, void *buf) {
-    UNUSED(ctxt);
-    free(buf);
-    return;
-}
-
-static size_t GetCPUCoreCount() {
-    long cpuCoreCount = 1;
-#if defined(_SC_NPROCESSORS_ONLN)
-    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
-#else
-    // _SC_NPROC_ONLN must be defined...
-    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
-#endif
-    CHECK(cpuCoreCount >= 1);
-    ALOGV("Number of CPU cores: %ld", cpuCoreCount);
-    return (size_t)cpuCoreCount;
-}
-
-void SoftAVC::logVersion() {
-    ivd_ctl_getversioninfo_ip_t s_ctl_ip;
-    ivd_ctl_getversioninfo_op_t s_ctl_op;
-    UWORD8 au1_buf[512];
-    IV_API_CALL_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
-    s_ctl_ip.pv_version_buffer = au1_buf;
-    s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
-
-    status =
-        ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in getting version number: 0x%x",
-                s_ctl_op.u4_error_code);
-    } else {
-        ALOGV("Ittiam decoder version number: %s",
-                (char *)s_ctl_ip.pv_version_buffer);
-    }
-    return;
-}
-
-status_t SoftAVC::setParams(size_t stride) {
-    ivd_ctl_set_config_ip_t s_ctl_ip;
-    ivd_ctl_set_config_op_t s_ctl_op;
-    IV_API_CALL_STATUS_T status;
-    s_ctl_ip.u4_disp_wd = (UWORD32)stride;
-    s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-
-    s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
-
-    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in setting the run-time parameters: 0x%x",
-                s_ctl_op.u4_error_code);
-
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-status_t SoftAVC::resetPlugin() {
-    mIsInFlush = false;
-    mReceivedEOS = false;
-
-    memset(mTimeStamps, 0, sizeof(mTimeStamps));
-    memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
-
-    /* Initialize both start and end times */
-    mTimeStart = mTimeEnd = systemTime();
-
-    return OK;
-}
-
-status_t SoftAVC::resetDecoder() {
-    ivd_ctl_reset_ip_t s_ctl_ip;
-    ivd_ctl_reset_op_t s_ctl_op;
-    IV_API_CALL_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
-
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
-    if (IV_SUCCESS != status) {
-        ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-    mSignalledError = false;
-
-    /* Set number of cores/threads to be used by the codec */
-    setNumCores();
-
-    mStride = 0;
-    return OK;
-}
-
-status_t SoftAVC::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_cores_op;
-    IV_API_CALL_STATUS_T status;
-    s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
-    s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
-    s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
-    s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
-    status = ivdec_api_function(
-            mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op);
-    if (IV_SUCCESS != status) {
-        ALOGE("Error in setting number of cores: 0x%x",
-                s_set_cores_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-status_t SoftAVC::setFlushMode() {
-    IV_API_CALL_STATUS_T status;
-    ivd_ctl_flush_ip_t s_video_flush_ip;
-    ivd_ctl_flush_op_t s_video_flush_op;
-
-    s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
-    s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
-    s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
-
-    /* Set the decoder in Flush mode, subsequent decode() calls will flush */
-    status = ivdec_api_function(
-            mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
-                s_video_flush_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-
-    mIsInFlush = true;
-    return OK;
-}
-
-status_t SoftAVC::initDecoder() {
-    IV_API_CALL_STATUS_T status;
-
-    mNumCores = GetCPUCoreCount();
-    mCodecCtx = NULL;
-
-    mStride = outputBufferWidth();
-
-    /* Initialize the decoder */
-    {
-        ivdext_create_ip_t s_create_ip;
-        ivdext_create_op_t s_create_op;
-
-        void *dec_fxns = (void *)ivdec_api_function;
-
-        s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
-        s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
-        s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
-        s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
-        s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
-        s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
-        s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
-        s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = NULL;
-
-        status = ivdec_api_function(mCodecCtx, (void *)&s_create_ip, (void *)&s_create_op);
-
-        if (status != IV_SUCCESS) {
-            ALOGE("Error in create: 0x%x",
-                    s_create_op.s_ivd_create_op_t.u4_error_code);
-            deInitDecoder();
-            mCodecCtx = NULL;
-            return UNKNOWN_ERROR;
-        }
-
-        mCodecCtx = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
-        mCodecCtx->pv_fxns = dec_fxns;
-        mCodecCtx->u4_size = sizeof(iv_obj_t);
-    }
-
-    /* Reset the plugin state */
-    resetPlugin();
-
-    /* Set the run time (dynamic) parameters */
-    setParams(mStride);
-
-    /* Set number of cores/threads to be used by the codec */
-    setNumCores();
-
-    /* Get codec version */
-    logVersion();
-
-    mFlushNeeded = false;
-    return OK;
-}
-
-status_t SoftAVC::deInitDecoder() {
-    IV_API_CALL_STATUS_T status;
-
-    if (mCodecCtx) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
-
-        s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
-        s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
-
-        s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
-
-        status = ivdec_api_function(mCodecCtx, (void *)&s_delete_ip, (void *)&s_delete_op);
-        if (status != IV_SUCCESS) {
-            ALOGE("Error in delete: 0x%x",
-                    s_delete_op.s_ivd_delete_op_t.u4_error_code);
-            return UNKNOWN_ERROR;
-        }
-    }
-
-
-    mChangingResolution = false;
-
-    return OK;
-}
-
-void SoftAVC::onReset() {
-    SoftVideoDecoderOMXComponent::onReset();
-
-    mSignalledError = false;
-    mInputOffset = 0;
-    resetDecoder();
-    resetPlugin();
-}
-
-bool SoftAVC::getVUIParams() {
-    IV_API_CALL_STATUS_T status;
-    ih264d_ctl_get_vui_params_ip_t s_ctl_get_vui_params_ip;
-    ih264d_ctl_get_vui_params_op_t s_ctl_get_vui_params_op;
-
-    s_ctl_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_get_vui_params_ip.e_sub_cmd =
-        (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_GET_VUI_PARAMS;
-
-    s_ctl_get_vui_params_ip.u4_size =
-        sizeof(ih264d_ctl_get_vui_params_ip_t);
-
-    s_ctl_get_vui_params_op.u4_size = sizeof(ih264d_ctl_get_vui_params_op_t);
-
-    status = ivdec_api_function(
-            (iv_obj_t *)mCodecCtx, (void *)&s_ctl_get_vui_params_ip,
-            (void *)&s_ctl_get_vui_params_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGW("Error in getting VUI params: 0x%x",
-                s_ctl_get_vui_params_op.u4_error_code);
-        return false;
-    }
-
-    int32_t primaries = s_ctl_get_vui_params_op.u1_colour_primaries;
-    int32_t transfer = s_ctl_get_vui_params_op.u1_tfr_chars;
-    int32_t coeffs = s_ctl_get_vui_params_op.u1_matrix_coeffs;
-    bool fullRange = s_ctl_get_vui_params_op.u1_video_full_range_flag;
-
-    ColorAspects colorAspects;
-    ColorUtils::convertIsoColorAspectsToCodecAspects(
-            primaries, transfer, coeffs, fullRange, colorAspects);
-
-    // Update color aspects if necessary.
-    if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
-        mBitstreamColorAspects = colorAspects;
-        status_t err = handleColorAspectsChange();
-        CHECK(err == OK);
-    }
-    return true;
-}
-
-bool SoftAVC::setDecodeArgs(
-        ivd_video_decode_ip_t *ps_dec_ip,
-        ivd_video_decode_op_t *ps_dec_op,
-        OMX_BUFFERHEADERTYPE *inHeader,
-        OMX_BUFFERHEADERTYPE *outHeader,
-        size_t timeStampIx) {
-    size_t sizeY = outputBufferWidth() * outputBufferHeight();
-    size_t sizeUV;
-
-    ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
-    ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
-
-    ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
-
-    /* When in flush and after EOS with zero byte input,
-     * inHeader is set to zero. Hence check for non-null */
-    if (inHeader) {
-        ps_dec_ip->u4_ts = timeStampIx;
-        ps_dec_ip->pv_stream_buffer =
-            inHeader->pBuffer + inHeader->nOffset + mInputOffset;
-        ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen - mInputOffset;
-    } else {
-        ps_dec_ip->u4_ts = 0;
-        ps_dec_ip->pv_stream_buffer = NULL;
-        ps_dec_ip->u4_num_Bytes = 0;
-    }
-
-    sizeUV = sizeY / 4;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
-
-    uint8_t *pBuf;
-    if (outHeader) {
-        if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
-            android_errorWriteLog(0x534e4554, "27833616");
-            return false;
-        }
-        pBuf = outHeader->pBuffer;
-    } else {
-        // mFlushOutBuffer always has the right size.
-        pBuf = mFlushOutBuffer;
-    }
-
-    ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
-    ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
-    ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
-    ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
-    return true;
-}
-void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
-    /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
-    if (kOutputPortIndex == portIndex) {
-        setFlushMode();
-
-        /* Allocate a picture buffer to flushed data */
-        uint32_t displayStride = outputBufferWidth();
-        uint32_t displayHeight = outputBufferHeight();
-
-        uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
-        mFlushOutBuffer = (uint8_t *)memalign(128, bufferSize);
-        if (NULL == mFlushOutBuffer) {
-            ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
-            return;
-        }
-
-        while (true) {
-            ivd_video_decode_ip_t s_dec_ip;
-            ivd_video_decode_op_t s_dec_op;
-            IV_API_CALL_STATUS_T status;
-
-            setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
-
-            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
-            if (0 == s_dec_op.u4_output_present) {
-                resetPlugin();
-                break;
-            }
-        }
-
-        if (mFlushOutBuffer) {
-            free(mFlushOutBuffer);
-            mFlushOutBuffer = NULL;
-        }
-    } else {
-        mInputOffset = 0;
-    }
-}
-
-void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
-    UNUSED(portIndex);
-    OMX_BUFFERHEADERTYPE *inHeader = NULL;
-    BufferInfo *inInfo = NULL;
-
-    if (mSignalledError) {
-        return;
-    }
-    if (mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    if (NULL == mCodecCtx) {
-        if (OK != initDecoder()) {
-            ALOGE("Failed to initialize decoder");
-            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-    }
-    if (outputBufferWidth() != mStride) {
-        /* Set the run-time (dynamic) parameters */
-        mStride = outputBufferWidth();
-        setParams(mStride);
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
-    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
-
-    while (!outQueue.empty()) {
-        BufferInfo *outInfo;
-        OMX_BUFFERHEADERTYPE *outHeader;
-        size_t timeStampIx = 0;
-
-        if (!mIsInFlush && (NULL == inHeader)) {
-            if (!inQueue.empty()) {
-                inInfo = *inQueue.begin();
-                inHeader = inInfo->mHeader;
-                if (inHeader == NULL) {
-                    inQueue.erase(inQueue.begin());
-                    inInfo->mOwnedByUs = false;
-                    continue;
-                }
-            } else {
-                break;
-            }
-        }
-
-        outInfo = *outQueue.begin();
-        outHeader = outInfo->mHeader;
-        outHeader->nFlags = 0;
-        outHeader->nTimeStamp = 0;
-        outHeader->nOffset = 0;
-
-        if (inHeader != NULL) {
-            if (inHeader->nFilledLen == 0) {
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-
-                if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
-                    return;
-                }
-
-                mReceivedEOS = true;
-                inHeader = NULL;
-                setFlushMode();
-            } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                mReceivedEOS = true;
-            }
-        }
-
-        /* Get a free slot in timestamp array to hold input timestamp */
-        {
-            size_t i;
-            timeStampIx = 0;
-            for (i = 0; i < MAX_TIME_STAMPS; i++) {
-                if (!mTimeStampsValid[i]) {
-                    timeStampIx = i;
-                    break;
-                }
-            }
-            if (inHeader != NULL) {
-                mTimeStampsValid[timeStampIx] = true;
-                mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
-            }
-        }
-
-        {
-            ivd_video_decode_ip_t s_dec_ip;
-            ivd_video_decode_op_t s_dec_op;
-            nsecs_t timeDelay, timeTaken;
-
-            if (!setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
-                ALOGE("Decoder arg setup failed");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-            // If input dump is enabled, then write to file
-            DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes, mInputOffset);
-
-            mTimeStart = systemTime();
-            /* Compute time elapsed between end of previous decode()
-             * to start of current decode() */
-            timeDelay = mTimeStart - mTimeEnd;
-
-            IV_API_CALL_STATUS_T status;
-            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
-
-            bool unsupportedResolution =
-                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
-
-            /* Check for unsupported dimensions */
-            if (unsupportedResolution) {
-                ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
-                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
-            if (allocationFailed) {
-                ALOGE("Allocation failure in decoder");
-                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            if (IS_IVD_FATAL_ERROR(s_dec_op.u4_error_code)) {
-                ALOGE("Fatal Error : 0x%x", s_dec_op.u4_error_code);
-                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
-
-            getVUIParams();
-
-            mTimeEnd = systemTime();
-            /* Compute time taken for decode() */
-            timeTaken = mTimeEnd - mTimeStart;
-
-            ALOGV("timeTaken=%6lldus delay=%6lldus numBytes=%6d",
-                    (long long) (timeTaken / 1000LL), (long long) (timeDelay / 1000LL),
-                   s_dec_op.u4_num_bytes_consumed);
-            if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
-                mFlushNeeded = true;
-            }
-
-            if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
-                /* If the input did not contain picture data, then ignore
-                 * the associated timestamp */
-                mTimeStampsValid[timeStampIx] = false;
-            }
-
-            // If the decoder is in the changing resolution mode and there is no output present,
-            // that means the switching is done and it's ready to reset the decoder and the plugin.
-            if (mChangingResolution && !s_dec_op.u4_output_present) {
-                mChangingResolution = false;
-                resetDecoder();
-                resetPlugin();
-                mStride = outputBufferWidth();
-                setParams(mStride);
-                continue;
-            }
-
-            if (resChanged) {
-                mChangingResolution = true;
-                if (mFlushNeeded) {
-                    setFlushMode();
-                }
-                continue;
-            }
-
-            // Combine the resolution change and coloraspects change in one PortSettingChange event
-            // if necessary.
-            if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
-                uint32_t width = s_dec_op.u4_pic_wd;
-                uint32_t height = s_dec_op.u4_pic_ht;
-                bool portWillReset = false;
-                handlePortSettingsChange(&portWillReset, width, height);
-                if (portWillReset) {
-                    resetDecoder();
-                    resetPlugin();
-                    return;
-                }
-            } else if (mUpdateColorAspects) {
-                notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
-                    kDescribeColorAspectsIndex, NULL);
-                mUpdateColorAspects = false;
-                return;
-            }
-
-            if (s_dec_op.u4_output_present) {
-                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
-
-                outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
-                mTimeStampsValid[s_dec_op.u4_ts] = false;
-
-                outInfo->mOwnedByUs = false;
-                outQueue.erase(outQueue.begin());
-                outInfo = NULL;
-                notifyFillBufferDone(outHeader);
-                outHeader = NULL;
-            } else if (mIsInFlush) {
-                /* If in flush mode and no output is returned by the codec,
-                 * then come out of flush mode */
-                mIsInFlush = false;
-
-                /* If EOS was recieved on input port and there is no output
-                 * from the codec, then signal EOS on output port */
-                if (mReceivedEOS) {
-                    outHeader->nFilledLen = 0;
-                    outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
-
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    outInfo = NULL;
-                    notifyFillBufferDone(outHeader);
-                    outHeader = NULL;
-                    resetPlugin();
-                }
-            }
-            mInputOffset += s_dec_op.u4_num_bytes_consumed;
-        }
-        // If more than 4 bytes are remaining in input, then do not release it
-        if (inHeader != NULL && ((inHeader->nFilledLen - mInputOffset) <= 4)) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-            mInputOffset = 0;
-
-            /* If input EOS is seen and decoder is not in flush mode,
-             * set the decoder in flush mode.
-             * There can be a case where EOS is sent along with last picture data
-             * In that case, only after decoding that input data, decoder has to be
-             * put in flush. This case is handled here  */
-
-            if (mReceivedEOS && !mIsInFlush) {
-                setFlushMode();
-            }
-        }
-    }
-}
-
-int SoftAVC::getColorAspectPreference() {
-    return kPreferBitstream;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
-        OMX_COMPONENTTYPE **component) {
-    return new android::SoftAVC(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
deleted file mode 100644
index 679ed3e..0000000
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_H264_DEC_H_
-
-#define SOFT_H264_DEC_H_
-
-#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
-#include <sys/time.h>
-
-namespace android {
-
-/** Number of entries in the time-stamp array */
-#define MAX_TIME_STAMPS 64
-
-/** Maximum number of cores supported by the codec */
-#define CODEC_MAX_NUM_CORES 4
-
-#define CODEC_MAX_WIDTH     1920
-
-#define CODEC_MAX_HEIGHT    1088
-
-/** Input buffer size */
-#define INPUT_BUF_SIZE (1024 * 1024)
-
-#define MIN(a, b) ((a) < (b)) ? (a) : (b)
-
-/** Used to remove warnings about unused parameters */
-#define UNUSED(x) ((void)(x))
-
-struct SoftAVC : public SoftVideoDecoderOMXComponent {
-    SoftAVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData, OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAVC();
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onReset();
-    virtual int getColorAspectPreference();
-private:
-    // Number of input and output buffers
-    enum {
-        kNumBuffers = 8
-    };
-
-    iv_obj_t *mCodecCtx;         // Codec context
-
-    size_t mNumCores;            // Number of cores to be uesd by the codec
-
-    nsecs_t mTimeStart;   // Time at the start of decode()
-    nsecs_t mTimeEnd;     // Time at the end of decode()
-
-    // Internal buffer to be used to flush out the buffers from decoder
-    uint8_t *mFlushOutBuffer;
-
-    // Status of entries in the timestamp array
-    bool mTimeStampsValid[MAX_TIME_STAMPS];
-
-    // Timestamp array - Since codec does not take 64 bit timestamps,
-    // they are maintained in the plugin
-    OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
-
-#ifdef FILE_DUMP_ENABLE
-    char mInFile[200];
-#endif /* FILE_DUMP_ENABLE */
-
-    OMX_COLOR_FORMATTYPE mOmxColorFormat;    // OMX Color format
-    IV_COLOR_FORMAT_T mIvColorFormat;        // Ittiam Color format
-
-    bool mIsInFlush;        // codec is flush mode
-    bool mReceivedEOS;      // EOS is receieved on input port
-
-    // The input stream has changed to a different resolution, which is still supported by the
-    // codec. So the codec is switching to decode the new resolution.
-    bool mChangingResolution;
-    bool mFlushNeeded;
-    bool mSignalledError;
-    size_t mStride;
-    size_t mInputOffset;
-
-    status_t initDecoder();
-    status_t deInitDecoder();
-    status_t setFlushMode();
-    status_t setParams(size_t stride);
-    void logVersion();
-    status_t setNumCores();
-    status_t resetDecoder();
-    status_t resetPlugin();
-
-
-    bool setDecodeArgs(
-            ivd_video_decode_ip_t *ps_dec_ip,
-            ivd_video_decode_op_t *ps_dec_op,
-            OMX_BUFFERHEADERTYPE *inHeader,
-            OMX_BUFFERHEADERTYPE *outHeader,
-            size_t timeStampIx);
-
-    bool getVUIParams();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
-};
-
-#ifdef FILE_DUMP_ENABLE
-
-#define INPUT_DUMP_PATH     "/sdcard/media/avcd_input"
-#define INPUT_DUMP_EXT      "h264"
-
-#define GENERATE_FILE_NAMES() {                         \
-    strcpy(mInFile, "");                                \
-    sprintf(mInFile, "%s_%lld.%s", INPUT_DUMP_PATH,     \
-            (long long) mTimeStart,                     \
-            INPUT_DUMP_EXT);                            \
-}
-
-#define CREATE_DUMP_FILE(m_filename) {                  \
-    FILE *fp = fopen(m_filename, "wb");                 \
-    if (fp != NULL) {                                   \
-        fclose(fp);                                     \
-    } else {                                            \
-        ALOGD("Could not open file %s", m_filename);    \
-    }                                                   \
-}
-#define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)\
-{                                                       \
-    FILE *fp = fopen(m_filename, "ab");                 \
-    if (fp != NULL && m_buf != NULL && m_offset == 0) { \
-        int i;                                          \
-        i = fwrite(m_buf, 1, m_size, fp);               \
-        ALOGD("fwrite ret %d to write %d", i, m_size);  \
-        if (i != (int) m_size) {                        \
-            ALOGD("Error in fwrite, returned %d", i);   \
-            perror("Error in write to file");           \
-        }                                               \
-    } else if (fp == NULL) {                            \
-        ALOGD("Could not write to file %s", m_filename);\
-    }                                                   \
-    if (fp) {                                           \
-        fclose(fp);                                     \
-    }                                                   \
-}
-#else /* FILE_DUMP_ENABLE */
-#define INPUT_DUMP_PATH
-#define INPUT_DUMP_EXT
-#define OUTPUT_DUMP_PATH
-#define OUTPUT_DUMP_EXT
-#define GENERATE_FILE_NAMES()
-#define CREATE_DUMP_FILE(m_filename)
-#define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)
-#endif /* FILE_DUMP_ENABLE */
-
-} // namespace android
-
-#endif  // SOFT_H264_DEC_H_
diff --git a/media/libstagefright/codecs/avcdec/exports.lds b/media/libstagefright/codecs/avcdec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/avcdec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
deleted file mode 100644
index 586088c..0000000
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ /dev/null
@@ -1,34 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_avcenc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    static_libs: ["libavcenc"],
-    srcs: ["SoftAVCEnc.cpp"],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-        config: {
-            cfi_assembly_support: true,
-        },
-    },
-
-    cflags: [
-        "-Wall",
-        "-Wno-unused-variable",
-    ],
-    ldflags: ["-Wl,-Bsymbolic"],
-
-    version_script: "exports.lds",
-}
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
deleted file mode 100644
index 01174c9..0000000
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ /dev/null
@@ -1,1515 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAVCEnc"
-#include <utils/Log.h>
-#include <utils/misc.h>
-
-#include "OMX_Video.h"
-
-#include <media/hardware/HardwareAPI.h>
-#include <media/hardware/MetadataBufferType.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <OMX_IndexExt.h>
-#include <OMX_VideoExt.h>
-
-#include "ih264_typedefs.h"
-#include "iv2.h"
-#include "ive2.h"
-#include "ih264e.h"
-#include "SoftAVCEnc.h"
-
-namespace android {
-
-    #define ive_api_function ih264e_api_function
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-struct LevelConversion {
-    OMX_VIDEO_AVCLEVELTYPE omxLevel;
-    WORD32 avcLevel;
-};
-
-static LevelConversion ConversionTable[] = {
-    { OMX_VIDEO_AVCLevel1,  10 },
-    { OMX_VIDEO_AVCLevel1b, 9  },
-    { OMX_VIDEO_AVCLevel11, 11 },
-    { OMX_VIDEO_AVCLevel12, 12 },
-    { OMX_VIDEO_AVCLevel13, 13 },
-    { OMX_VIDEO_AVCLevel2,  20 },
-    { OMX_VIDEO_AVCLevel21, 21 },
-    { OMX_VIDEO_AVCLevel22, 22 },
-    { OMX_VIDEO_AVCLevel3,  30 },
-    { OMX_VIDEO_AVCLevel31, 31 },
-    { OMX_VIDEO_AVCLevel32, 32 },
-    { OMX_VIDEO_AVCLevel4,  40 },
-    { OMX_VIDEO_AVCLevel41, 41 },
-    { OMX_VIDEO_AVCLevel42, 42 },
-    { OMX_VIDEO_AVCLevel5,  50 },
-    { OMX_VIDEO_AVCLevel51, 51 },
-};
-
-static const CodecProfileLevel kProfileLevels[] = {
-    { OMX_VIDEO_AVCProfileConstrainedBaseline, OMX_VIDEO_AVCLevel41 },
-
-    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
-
-    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel41 },
-};
-
-static size_t GetCPUCoreCount() {
-    long cpuCoreCount = 1;
-#if defined(_SC_NPROCESSORS_ONLN)
-    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
-#else
-    // _SC_NPROC_ONLN must be defined...
-    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
-#endif
-    CHECK(cpuCoreCount >= 1);
-    ALOGV("Number of CPU cores: %ld", cpuCoreCount);
-    return (size_t)cpuCoreCount;
-}
-
-static status_t ConvertOmxAvcLevelToAvcSpecLevel(
-        OMX_VIDEO_AVCLEVELTYPE omxLevel, WORD32 *avcLevel) {
-    for (size_t i = 0; i < NELEM(ConversionTable); ++i) {
-        if (omxLevel == ConversionTable[i].omxLevel) {
-            *avcLevel = ConversionTable[i].avcLevel;
-            return OK;
-        }
-    }
-
-    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
-            (int32_t)omxLevel);
-
-    return BAD_VALUE;
-}
-
-static status_t ConvertAvcSpecLevelToOmxAvcLevel(
-        WORD32 avcLevel, OMX_VIDEO_AVCLEVELTYPE *omxLevel) {
-    for (size_t i = 0; i < NELEM(ConversionTable); ++i) {
-        if (avcLevel == ConversionTable[i].avcLevel) {
-            *omxLevel = ConversionTable[i].omxLevel;
-            return OK;
-        }
-    }
-
-    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
-            (int32_t)avcLevel);
-
-    return BAD_VALUE;
-}
-
-
-SoftAVC::SoftAVC(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVideoEncoderOMXComponent(
-            name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
-            kProfileLevels, NELEM(kProfileLevels),
-            176 /* width */, 144 /* height */,
-            callbacks, appData, component),
-      mUpdateFlag(0),
-      mIvVideoColorFormat(IV_YUV_420P),
-      mAVCEncProfile(IV_PROFILE_BASE),
-      mAVCEncLevel(41),
-      mStarted(false),
-      mSawInputEOS(false),
-      mSawOutputEOS(false),
-      mSignalledError(false),
-      mCodecCtx(NULL) {
-
-    initPorts(kNumBuffers, kNumBuffers, ((mWidth * mHeight * 3) >> 1),
-            MEDIA_MIMETYPE_VIDEO_AVC, 2);
-
-    // If dump is enabled, then open create an empty file
-    GENERATE_FILE_NAMES();
-    CREATE_DUMP_FILE(mInFile);
-    CREATE_DUMP_FILE(mOutFile);
-    memset(mConversionBuffers, 0, sizeof(mConversionBuffers));
-    memset(mInputBufferInfo, 0, sizeof(mInputBufferInfo));
-
-    initEncParams();
-
-}
-
-SoftAVC::~SoftAVC() {
-    releaseEncoder();
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    CHECK(outQueue.empty());
-    CHECK(inQueue.empty());
-}
-
-void  SoftAVC::initEncParams() {
-    mCodecCtx = NULL;
-    mMemRecords = NULL;
-    mNumMemRecords = DEFAULT_MEM_REC_CNT;
-    mHeaderGenerated = 0;
-    mNumCores = GetCPUCoreCount();
-    mArch = DEFAULT_ARCH;
-    mSliceMode = DEFAULT_SLICE_MODE;
-    mSliceParam = DEFAULT_SLICE_PARAM;
-    mHalfPelEnable = DEFAULT_HPEL;
-    mIInterval = DEFAULT_I_INTERVAL;
-    mIDRInterval = DEFAULT_IDR_INTERVAL;
-    mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
-    mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
-    mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
-    mEncSpeed = DEFAULT_ENC_SPEED;
-    mIntra4x4 = DEFAULT_INTRA4x4;
-    mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
-    mAIRMode = DEFAULT_AIR;
-    mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD;
-    mPSNREnable = DEFAULT_PSNR_ENABLE;
-    mReconEnable = DEFAULT_RECON_ENABLE;
-    mEntropyMode = DEFAULT_ENTROPY_MODE;
-    mBframes = DEFAULT_B_FRAMES;
-
-    gettimeofday(&mTimeStart, NULL);
-    gettimeofday(&mTimeEnd, NULL);
-
-}
-
-
-OMX_ERRORTYPE SoftAVC::setDimensions() {
-    ive_ctl_set_dimensions_ip_t s_dimensions_ip;
-    ive_ctl_set_dimensions_op_t s_dimensions_op;
-    IV_STATUS_T status;
-
-    s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
-    s_dimensions_ip.u4_ht = mHeight;
-    s_dimensions_ip.u4_wd = mWidth;
-
-    s_dimensions_ip.u4_timestamp_high = -1;
-    s_dimensions_ip.u4_timestamp_low = -1;
-
-    s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
-    s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set frame dimensions = 0x%x\n",
-                s_dimensions_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setNumCores() {
-    IV_STATUS_T status;
-    ive_ctl_set_num_cores_ip_t s_num_cores_ip;
-    ive_ctl_set_num_cores_op_t s_num_cores_op;
-    s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
-    s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
-    s_num_cores_ip.u4_timestamp_high = -1;
-    s_num_cores_ip.u4_timestamp_low = -1;
-    s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
-
-    s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
-
-    status = ive_api_function(
-            mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set processor params = 0x%x\n",
-                s_num_cores_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setFrameRate() {
-    ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
-    ive_ctl_set_frame_rate_op_t s_frame_rate_op;
-    IV_STATUS_T status;
-
-    s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
-
-    s_frame_rate_ip.u4_src_frame_rate = mFramerate >> 16;
-    s_frame_rate_ip.u4_tgt_frame_rate = mFramerate >> 16;
-
-    s_frame_rate_ip.u4_timestamp_high = -1;
-    s_frame_rate_ip.u4_timestamp_low = -1;
-
-    s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
-    s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set frame rate = 0x%x\n",
-                s_frame_rate_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setIpeParams() {
-    ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
-    ive_ctl_set_ipe_params_op_t s_ipe_params_op;
-    IV_STATUS_T status;
-
-    s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
-
-    s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
-    s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
-    s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
-
-    s_ipe_params_ip.u4_timestamp_high = -1;
-    s_ipe_params_ip.u4_timestamp_low = -1;
-
-    s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
-    s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set ipe params = 0x%x\n",
-                s_ipe_params_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setBitRate() {
-    ive_ctl_set_bitrate_ip_t s_bitrate_ip;
-    ive_ctl_set_bitrate_op_t s_bitrate_op;
-    IV_STATUS_T status;
-
-    s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
-
-    s_bitrate_ip.u4_target_bitrate = mBitrate;
-
-    s_bitrate_ip.u4_timestamp_high = -1;
-    s_bitrate_ip.u4_timestamp_low = -1;
-
-    s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
-    s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
-    ive_ctl_set_frame_type_ip_t s_frame_type_ip;
-    ive_ctl_set_frame_type_op_t s_frame_type_op;
-    IV_STATUS_T status;
-    s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
-
-    s_frame_type_ip.e_frame_type = e_frame_type;
-
-    s_frame_type_ip.u4_timestamp_high = -1;
-    s_frame_type_ip.u4_timestamp_low = -1;
-
-    s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
-    s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set frame type = 0x%x\n",
-                s_frame_type_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setQp() {
-    ive_ctl_set_qp_ip_t s_qp_ip;
-    ive_ctl_set_qp_op_t s_qp_op;
-    IV_STATUS_T status;
-
-    s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
-
-    s_qp_ip.u4_i_qp = DEFAULT_I_QP;
-    s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
-
-    s_qp_ip.u4_p_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
-
-    s_qp_ip.u4_b_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
-
-    s_qp_ip.u4_timestamp_high = -1;
-    s_qp_ip.u4_timestamp_low = -1;
-
-    s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
-    s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
-    IV_STATUS_T status;
-    ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
-    ive_ctl_set_enc_mode_op_t s_enc_mode_op;
-
-    s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
-
-    s_enc_mode_ip.e_enc_mode = e_enc_mode;
-
-    s_enc_mode_ip.u4_timestamp_high = -1;
-    s_enc_mode_ip.u4_timestamp_low = -1;
-
-    s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
-    s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set in header encode mode = 0x%x\n",
-                s_enc_mode_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setVbvParams() {
-    ive_ctl_set_vbv_params_ip_t s_vbv_ip;
-    ive_ctl_set_vbv_params_op_t s_vbv_op;
-    IV_STATUS_T status;
-
-    s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
-
-    s_vbv_ip.u4_vbv_buf_size = 0;
-    s_vbv_ip.u4_vbv_buffer_delay = 1000;
-
-    s_vbv_ip.u4_timestamp_high = -1;
-    s_vbv_ip.u4_timestamp_low = -1;
-
-    s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
-    s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set VBC params = 0x%x\n", s_vbv_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setAirParams() {
-    ive_ctl_set_air_params_ip_t s_air_ip;
-    ive_ctl_set_air_params_op_t s_air_op;
-    IV_STATUS_T status;
-
-    s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
-
-    s_air_ip.e_air_mode = mAIRMode;
-    s_air_ip.u4_air_refresh_period = mAIRRefreshPeriod;
-
-    s_air_ip.u4_timestamp_high = -1;
-    s_air_ip.u4_timestamp_low = -1;
-
-    s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
-    s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setMeParams() {
-    IV_STATUS_T status;
-    ive_ctl_set_me_params_ip_t s_me_params_ip;
-    ive_ctl_set_me_params_op_t s_me_params_op;
-
-    s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
-
-    s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
-    s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
-
-    s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
-    s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
-    s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
-    s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
-    s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
-
-    s_me_params_ip.u4_timestamp_high = -1;
-    s_me_params_ip.u4_timestamp_low = -1;
-
-    s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
-    s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setGopParams() {
-    IV_STATUS_T status;
-    ive_ctl_set_gop_params_ip_t s_gop_params_ip;
-    ive_ctl_set_gop_params_op_t s_gop_params_op;
-
-    s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
-
-    s_gop_params_ip.u4_i_frm_interval = mIInterval;
-    s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
-
-    s_gop_params_ip.u4_timestamp_high = -1;
-    s_gop_params_ip.u4_timestamp_low = -1;
-
-    s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
-    s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set ME params = 0x%x\n",
-                s_gop_params_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setProfileParams() {
-    IV_STATUS_T status;
-    ive_ctl_set_profile_params_ip_t s_profile_params_ip;
-    ive_ctl_set_profile_params_op_t s_profile_params_op;
-
-    s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
-
-    s_profile_params_ip.e_profile = DEFAULT_EPROFILE;
-    s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;
-    s_profile_params_ip.u4_timestamp_high = -1;
-    s_profile_params_ip.u4_timestamp_low = -1;
-
-    s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
-    s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to set profile params = 0x%x\n",
-                s_profile_params_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setDeblockParams() {
-    IV_STATUS_T status;
-    ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
-    ive_ctl_set_deblock_params_op_t s_deblock_params_op;
-
-    s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
-
-    s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
-
-    s_deblock_params_ip.u4_timestamp_high = -1;
-    s_deblock_params_ip.u4_timestamp_low = -1;
-
-    s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
-    s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
-
-    status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to enable/disable deblock params = 0x%x\n",
-                s_deblock_params_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-    return OMX_ErrorNone;
-}
-
-void SoftAVC::logVersion() {
-    ive_ctl_getversioninfo_ip_t s_ctl_ip;
-    ive_ctl_getversioninfo_op_t s_ctl_op;
-    UWORD8 au1_buf[512];
-    IV_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
-    s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
-    s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
-    s_ctl_ip.pu1_version = au1_buf;
-    s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
-
-    status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
-    } else {
-        ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
-    }
-    return;
-}
-
-OMX_ERRORTYPE SoftAVC::initEncoder() {
-    IV_STATUS_T status;
-    WORD32 level;
-    uint32_t displaySizeY;
-
-    CHECK(!mStarted);
-
-    OMX_ERRORTYPE errType = OMX_ErrorNone;
-
-    displaySizeY = mWidth * mHeight;
-    if (displaySizeY > (1920 * 1088)) {
-        level = 50;
-    } else if (displaySizeY > (1280 * 720)) {
-        level = 40;
-    } else if (displaySizeY > (720 * 576)) {
-        level = 31;
-    } else if (displaySizeY > (624 * 320)) {
-        level = 30;
-    } else if (displaySizeY > (352 * 288)) {
-        level = 21;
-    } else if (displaySizeY > (176 * 144)) {
-        level = 20;
-    } else {
-        level = 10;
-    }
-    mAVCEncLevel = MAX(level, mAVCEncLevel);
-
-    mStride = mWidth;
-
-    if (mInputDataIsMeta) {
-        for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) {
-            if (mConversionBuffers[i] != NULL) {
-                free(mConversionBuffers[i]);
-                mConversionBuffers[i] = 0;
-            }
-
-            if (((uint64_t)mStride * mHeight) > ((uint64_t)INT32_MAX / 3)) {
-                ALOGE("Buffer size is too big.");
-                return OMX_ErrorUndefined;
-            }
-            mConversionBuffers[i] = (uint8_t *)malloc(mStride * mHeight * 3 / 2);
-
-            if (mConversionBuffers[i] == NULL) {
-                ALOGE("Allocating conversion buffer failed.");
-                return OMX_ErrorUndefined;
-            }
-
-            mConversionBuffersFree[i] = 1;
-        }
-    }
-
-    switch (mColorFormat) {
-        case OMX_COLOR_FormatYUV420SemiPlanar:
-            mIvVideoColorFormat = IV_YUV_420SP_UV;
-            ALOGV("colorFormat YUV_420SP");
-            break;
-        default:
-        case OMX_COLOR_FormatYUV420Planar:
-            mIvVideoColorFormat = IV_YUV_420P;
-            ALOGV("colorFormat YUV_420P");
-            break;
-    }
-
-    ALOGD("Params width %d height %d level %d colorFormat %d", mWidth,
-            mHeight, mAVCEncLevel, mIvVideoColorFormat);
-
-    /* Getting Number of MemRecords */
-    {
-        iv_num_mem_rec_ip_t s_num_mem_rec_ip;
-        iv_num_mem_rec_op_t s_num_mem_rec_op;
-
-        s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
-        s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
-
-        s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
-
-        status = ive_api_function(0, &s_num_mem_rec_ip, &s_num_mem_rec_op);
-
-        if (status != IV_SUCCESS) {
-            ALOGE("Get number of memory records failed = 0x%x\n",
-                    s_num_mem_rec_op.u4_error_code);
-            return OMX_ErrorUndefined;
-        }
-
-        mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
-    }
-
-    /* Allocate array to hold memory records */
-    if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
-        ALOGE("requested memory size is too big.");
-        return OMX_ErrorUndefined;
-    }
-    mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
-    if (NULL == mMemRecords) {
-        ALOGE("Unable to allocate memory for hold memory records: Size %zu",
-                mNumMemRecords * sizeof(iv_mem_rec_t));
-        mSignalledError = true;
-        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-        return OMX_ErrorUndefined;
-    }
-
-    {
-        iv_mem_rec_t *ps_mem_rec;
-        ps_mem_rec = mMemRecords;
-        for (size_t i = 0; i < mNumMemRecords; i++) {
-            ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
-            ps_mem_rec->pv_base = NULL;
-            ps_mem_rec->u4_mem_size = 0;
-            ps_mem_rec->u4_mem_alignment = 0;
-            ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
-
-            ps_mem_rec++;
-        }
-    }
-
-    /* Getting MemRecords Attributes */
-    {
-        iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
-        iv_fill_mem_rec_op_t s_fill_mem_rec_op;
-
-        s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
-        s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
-
-        s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
-        s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
-        s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
-        s_fill_mem_rec_ip.u4_max_wd = mWidth;
-        s_fill_mem_rec_ip.u4_max_ht = mHeight;
-        s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
-        s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
-        s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
-
-        status = ive_api_function(0, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
-
-        if (status != IV_SUCCESS) {
-            ALOGE("Fill memory records failed = 0x%x\n",
-                    s_fill_mem_rec_op.u4_error_code);
-            mSignalledError = true;
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-            return OMX_ErrorUndefined;
-        }
-    }
-
-    /* Allocating Memory for Mem Records */
-    {
-        WORD32 total_size;
-        iv_mem_rec_t *ps_mem_rec;
-        total_size = 0;
-        ps_mem_rec = mMemRecords;
-
-        for (size_t i = 0; i < mNumMemRecords; i++) {
-            ps_mem_rec->pv_base = ive_aligned_malloc(
-                    ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
-            if (ps_mem_rec->pv_base == NULL) {
-                ALOGE("Allocation failure for mem record id %zu size %u\n", i,
-                        ps_mem_rec->u4_mem_size);
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                return OMX_ErrorUndefined;
-
-            }
-            total_size += ps_mem_rec->u4_mem_size;
-
-            ps_mem_rec++;
-        }
-    }
-
-    /* Codec Instance Creation */
-    {
-        ive_init_ip_t s_init_ip;
-        ive_init_op_t s_init_op;
-
-        mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
-        mCodecCtx->u4_size = sizeof(iv_obj_t);
-        mCodecCtx->pv_fxns = (void *)ive_api_function;
-
-        s_init_ip.u4_size = sizeof(ive_init_ip_t);
-        s_init_op.u4_size = sizeof(ive_init_op_t);
-
-        s_init_ip.e_cmd = IV_CMD_INIT;
-        s_init_ip.u4_num_mem_rec = mNumMemRecords;
-        s_init_ip.ps_mem_rec = mMemRecords;
-        s_init_ip.u4_max_wd = mWidth;
-        s_init_ip.u4_max_ht = mHeight;
-        s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_init_ip.u4_max_level = mAVCEncLevel;
-        s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
-
-        if (mReconEnable || mPSNREnable) {
-            s_init_ip.u4_enable_recon = 1;
-        } else {
-            s_init_ip.u4_enable_recon = 0;
-        }
-        s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
-        s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
-        s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
-        s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
-        s_init_ip.u4_num_bframes = mBframes;
-        s_init_ip.e_content_type = IV_PROGRESSIVE;
-        s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
-        s_init_ip.e_slice_mode = mSliceMode;
-        s_init_ip.u4_slice_param = mSliceParam;
-        s_init_ip.e_arch = mArch;
-        s_init_ip.e_soc = DEFAULT_SOC;
-
-        status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
-
-        if (status != IV_SUCCESS) {
-            ALOGE("Init memory records failed = 0x%x\n",
-                    s_init_op.u4_error_code);
-            mSignalledError = true;
-            notify(OMX_EventError, OMX_ErrorUndefined, 0 /* arg2 */, NULL /* data */);
-            return OMX_ErrorUndefined;
-        }
-    }
-
-    /* Get Codec Version */
-    logVersion();
-
-    /* set processor details */
-    setNumCores();
-
-    /* Video control Set Frame dimensions */
-    setDimensions();
-
-    /* Video control Set Frame rates */
-    setFrameRate();
-
-    /* Video control Set IPE Params */
-    setIpeParams();
-
-    /* Video control Set Bitrate */
-    setBitRate();
-
-    /* Video control Set QP */
-    setQp();
-
-    /* Video control Set AIR params */
-    setAirParams();
-
-    /* Video control Set VBV params */
-    setVbvParams();
-
-    /* Video control Set Motion estimation params */
-    setMeParams();
-
-    /* Video control Set GOP params */
-    setGopParams();
-
-    /* Video control Set Deblock params */
-    setDeblockParams();
-
-    /* Video control Set Profile params */
-    setProfileParams();
-
-    /* Video control Set in Encode header mode */
-    setEncMode(IVE_ENC_MODE_HEADER);
-
-    ALOGV("init_codec successfull");
-
-    mSpsPpsHeaderReceived = false;
-    mStarted = true;
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::releaseEncoder() {
-    IV_STATUS_T status = IV_SUCCESS;
-    iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
-    iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
-    iv_mem_rec_t *ps_mem_rec;
-
-    if (!mStarted) {
-        return OMX_ErrorNone;
-    }
-
-    s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
-    s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
-    s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
-    s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
-
-    status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Unable to retrieve memory records = 0x%x\n",
-                s_retrieve_mem_op.u4_error_code);
-        return OMX_ErrorUndefined;
-    }
-
-    /* Free memory records */
-    ps_mem_rec = mMemRecords;
-    for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
-        ive_aligned_free(ps_mem_rec->pv_base);
-        ps_mem_rec++;
-    }
-
-    free(mMemRecords);
-
-    for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) {
-        if (mConversionBuffers[i]) {
-            free(mConversionBuffers[i]);
-            mConversionBuffers[i] = NULL;
-        }
-    }
-
-    // clear other pointers into the space being free()d
-    mCodecCtx = NULL;
-
-    mStarted = false;
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamVideoBitrate:
-        {
-            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
-                (OMX_VIDEO_PARAM_BITRATETYPE *)params;
-
-            if (!isValidOMXParam(bitRate)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (bitRate->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            bitRate->eControlRate = OMX_Video_ControlRateVariable;
-            bitRate->nTargetBitrate = mBitrate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoAvc:
-        {
-            OMX_VIDEO_PARAM_AVCTYPE *avcParams = (OMX_VIDEO_PARAM_AVCTYPE *)params;
-
-            if (!isValidOMXParam(avcParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (avcParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            OMX_VIDEO_AVCLEVELTYPE omxLevel = OMX_VIDEO_AVCLevel41;
-            if (OMX_ErrorNone
-                    != ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
-                return OMX_ErrorUndefined;
-            }
-
-            // TODO: maintain profile
-            avcParams->eProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedBaseline;
-            avcParams->eLevel = omxLevel;
-            avcParams->nRefFrames = 1;
-            avcParams->bUseHadamard = OMX_TRUE;
-            avcParams->nAllowedPictureTypes = (OMX_VIDEO_PictureTypeI
-                    | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB);
-            avcParams->nRefIdx10ActiveMinus1 = 0;
-            avcParams->nRefIdx11ActiveMinus1 = 0;
-            avcParams->bWeightedPPrediction = OMX_FALSE;
-            avcParams->bconstIpred = OMX_FALSE;
-            avcParams->bDirect8x8Inference = OMX_FALSE;
-            avcParams->bDirectSpatialTemporal = OMX_FALSE;
-            avcParams->nCabacInitIdc = 0;
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
-    int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoBitrate:
-        {
-            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
-                (OMX_VIDEO_PARAM_BITRATETYPE *)params;
-
-            if (!isValidOMXParam(bitRate)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            return internalSetBitrateParams(bitRate);
-        }
-
-        case OMX_IndexParamVideoAvc:
-        {
-            OMX_VIDEO_PARAM_AVCTYPE *avcType = (OMX_VIDEO_PARAM_AVCTYPE *)params;
-
-            if (!isValidOMXParam(avcType)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (avcType->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            mEntropyMode = 0;
-
-            if (OMX_TRUE == avcType->bEntropyCodingCABAC)
-                mEntropyMode = 1;
-
-            if ((avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) &&
-                    avcType->nPFrames) {
-                mBframes = avcType->nBFrames;
-            }
-
-            mIInterval = (avcType->nPFrames + 1) * (avcType->nBFrames + 1);
-            mConstrainedIntraFlag = avcType->bconstIpred;
-
-            if (OMX_VIDEO_AVCLoopFilterDisable == avcType->eLoopFilterMode)
-                mDisableDeblkLevel = 4;
-
-            if (avcType->nRefFrames != 1
-                    || avcType->bUseHadamard != OMX_TRUE
-                    || avcType->nRefIdx10ActiveMinus1 != 0
-                    || avcType->nRefIdx11ActiveMinus1 != 0
-                    || avcType->bWeightedPPrediction != OMX_FALSE
-                    || avcType->bDirect8x8Inference != OMX_FALSE
-                    || avcType->bDirectSpatialTemporal != OMX_FALSE
-                    || avcType->nCabacInitIdc != 0) {
-                // OMX does not allow a way to signal what values are wrong, so it's
-                // best for components to just do best effort in supporting these values
-                ALOGV("ignoring unsupported settings");
-            }
-
-            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftAVC::getConfig(
-        OMX_INDEXTYPE index, OMX_PTR _params) {
-    switch ((int)index) {
-        case OMX_IndexConfigAndroidIntraRefresh:
-        {
-            OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *intraRefreshParams =
-                (OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *)_params;
-
-            if (!isValidOMXParam(intraRefreshParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (intraRefreshParams->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorUndefined;
-            }
-
-            intraRefreshParams->nRefreshPeriod =
-                    (mAIRMode == IVE_AIR_MODE_NONE) ? 0 : mAIRRefreshPeriod;
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SoftVideoEncoderOMXComponent::getConfig(index, _params);
-    }
-}
-
-OMX_ERRORTYPE SoftAVC::internalSetConfig(
-        OMX_INDEXTYPE index, const OMX_PTR _params, bool *frameConfig) {
-    switch ((int)index) {
-        case OMX_IndexConfigVideoIntraVOPRefresh:
-        {
-            OMX_CONFIG_INTRAREFRESHVOPTYPE *params =
-                (OMX_CONFIG_INTRAREFRESHVOPTYPE *)_params;
-
-            if (!isValidOMXParam(params)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (params->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorBadPortIndex;
-            }
-
-            if (params->IntraRefreshVOP) {
-                mUpdateFlag |= kRequestKeyFrame;
-            }
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexConfigVideoBitrate:
-        {
-            OMX_VIDEO_CONFIG_BITRATETYPE *params =
-                (OMX_VIDEO_CONFIG_BITRATETYPE *)_params;
-
-            if (!isValidOMXParam(params)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (params->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorBadPortIndex;
-            }
-
-            if (mBitrate != params->nEncodeBitrate) {
-                mBitrate = params->nEncodeBitrate;
-                mUpdateFlag |= kUpdateBitrate;
-            }
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexConfigAndroidIntraRefresh:
-        {
-            const OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *intraRefreshParams =
-                (const OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *)_params;
-
-            if (!isValidOMXParam(intraRefreshParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (intraRefreshParams->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (intraRefreshParams->nRefreshPeriod == 0) {
-                mAIRMode = IVE_AIR_MODE_NONE;
-                mAIRRefreshPeriod = 0;
-            } else if (intraRefreshParams->nRefreshPeriod > 0) {
-                mAIRMode = IVE_AIR_MODE_CYCLIC;
-                mAIRRefreshPeriod = intraRefreshParams->nRefreshPeriod;
-            }
-            mUpdateFlag |= kUpdateAIRMode;
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetConfig(index, _params, frameConfig);
-    }
-}
-
-OMX_ERRORTYPE SoftAVC::internalSetBitrateParams(
-        const OMX_VIDEO_PARAM_BITRATETYPE *bitrate) {
-    if (bitrate->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    mBitrate = bitrate->nTargetBitrate;
-    mUpdateFlag |= kUpdateBitrate;
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftAVC::setEncodeArgs(
-        ive_video_encode_ip_t *ps_encode_ip,
-        ive_video_encode_op_t *ps_encode_op,
-        OMX_BUFFERHEADERTYPE *inputBufferHeader,
-        OMX_BUFFERHEADERTYPE *outputBufferHeader) {
-    iv_raw_buf_t *ps_inp_raw_buf;
-    const uint8_t *source;
-    UWORD8 *pu1_buf;
-
-    ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
-    ps_encode_ip->s_out_buf.pv_buf = outputBufferHeader->pBuffer;
-    ps_encode_ip->s_out_buf.u4_bytes = 0;
-    ps_encode_ip->s_out_buf.u4_bufsize = outputBufferHeader->nAllocLen;
-    ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
-    ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
-
-    ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
-    ps_encode_ip->pv_bufs = NULL;
-    ps_encode_ip->pv_mb_info = NULL;
-    ps_encode_ip->pv_pic_info = NULL;
-    ps_encode_ip->u4_mb_info_type = 0;
-    ps_encode_ip->u4_pic_info_type = 0;
-    ps_encode_op->s_out_buf.pv_buf = NULL;
-
-    /* Initialize color formats */
-    ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
-    source = NULL;
-    if ((inputBufferHeader != NULL) && inputBufferHeader->nFilledLen) {
-        OMX_ERRORTYPE error = validateInputBuffer(inputBufferHeader);
-        if (error != OMX_ErrorNone) {
-            ALOGE("b/69065651");
-            android_errorWriteLog(0x534e4554, "69065651");
-            return error;
-        }
-        source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
-
-        if (mInputDataIsMeta) {
-            uint8_t *conversionBuffer = NULL;
-            for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) {
-                if (mConversionBuffersFree[i]) {
-                    mConversionBuffersFree[i] = 0;
-                    conversionBuffer = mConversionBuffers[i];
-                    break;
-                }
-            }
-
-            if (NULL == conversionBuffer) {
-                ALOGE("No free buffers to hold conversion data");
-                return OMX_ErrorUndefined;
-            }
-
-            source = extractGraphicBuffer(
-                    conversionBuffer, (mWidth * mHeight * 3 / 2), source,
-                    inputBufferHeader->nFilledLen, mWidth, mHeight);
-
-            if (source == NULL) {
-                ALOGE("Error in extractGraphicBuffer");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                return OMX_ErrorUndefined;
-            }
-        }
-        ps_encode_ip->u4_is_last = 0;
-        ps_encode_ip->u4_timestamp_high = (inputBufferHeader->nTimeStamp) >> 32;
-        ps_encode_ip->u4_timestamp_low = (inputBufferHeader->nTimeStamp) & 0xFFFFFFFF;
-    }
-    else {
-        if (mSawInputEOS){
-            ps_encode_ip->u4_is_last = 1;
-        }
-        memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t));
-        ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
-        ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
-        return OMX_ErrorNone;
-    }
-
-    pu1_buf = (UWORD8 *)source;
-    switch (mIvVideoColorFormat) {
-        case IV_YUV_420P:
-        {
-            ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
-            pu1_buf += (mStride) * mHeight;
-            ps_inp_raw_buf->apv_bufs[1] = pu1_buf;
-            pu1_buf += (mStride / 2) * mHeight / 2;
-            ps_inp_raw_buf->apv_bufs[2] = pu1_buf;
-
-            ps_inp_raw_buf->au4_wd[0] = mWidth;
-            ps_inp_raw_buf->au4_wd[1] = mWidth / 2;
-            ps_inp_raw_buf->au4_wd[2] = mWidth / 2;
-
-            ps_inp_raw_buf->au4_ht[0] = mHeight;
-            ps_inp_raw_buf->au4_ht[1] = mHeight / 2;
-            ps_inp_raw_buf->au4_ht[2] = mHeight / 2;
-
-            ps_inp_raw_buf->au4_strd[0] = mStride;
-            ps_inp_raw_buf->au4_strd[1] = (mStride / 2);
-            ps_inp_raw_buf->au4_strd[2] = (mStride / 2);
-            break;
-        }
-
-        case IV_YUV_422ILE:
-        {
-            ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
-            ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
-            ps_inp_raw_buf->au4_ht[0] = mHeight;
-            ps_inp_raw_buf->au4_strd[0] = mStride * 2;
-            break;
-        }
-
-        case IV_YUV_420SP_UV:
-        case IV_YUV_420SP_VU:
-        default:
-        {
-            ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
-            pu1_buf += (mStride) * mHeight;
-            ps_inp_raw_buf->apv_bufs[1] = pu1_buf;
-
-            ps_inp_raw_buf->au4_wd[0] = mWidth;
-            ps_inp_raw_buf->au4_wd[1] = mWidth;
-
-            ps_inp_raw_buf->au4_ht[0] = mHeight;
-            ps_inp_raw_buf->au4_ht[1] = mHeight / 2;
-
-            ps_inp_raw_buf->au4_strd[0] = mStride;
-            ps_inp_raw_buf->au4_strd[1] = mStride;
-            break;
-        }
-    }
-    return OMX_ErrorNone;
-}
-
-void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
-    IV_STATUS_T status;
-    WORD32 timeDelay, timeTaken;
-
-    UNUSED(portIndex);
-
-    // Initialize encoder if not already initialized
-    if (mCodecCtx == NULL) {
-        if (OMX_ErrorNone != initEncoder()) {
-            ALOGE("Failed to initialize encoder");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0 /* arg2 */, NULL /* data */);
-            return;
-        }
-    }
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while (!mSawOutputEOS && !outQueue.empty()) {
-
-        OMX_ERRORTYPE error;
-        ive_video_encode_ip_t s_encode_ip;
-        ive_video_encode_op_t s_encode_op;
-        BufferInfo *outputBufferInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader;
-
-        BufferInfo *inputBufferInfo;
-        OMX_BUFFERHEADERTYPE *inputBufferHeader;
-
-        if (mSawInputEOS) {
-            inputBufferHeader = NULL;
-            inputBufferInfo = NULL;
-        } else if (!inQueue.empty()) {
-            inputBufferInfo = *inQueue.begin();
-            inputBufferHeader = inputBufferInfo->mHeader;
-        } else {
-            return;
-        }
-
-        outputBufferHeader->nTimeStamp = 0;
-        outputBufferHeader->nFlags = 0;
-        outputBufferHeader->nOffset = 0;
-        outputBufferHeader->nFilledLen = 0;
-        outputBufferHeader->nOffset = 0;
-
-        if (inputBufferHeader != NULL) {
-            outputBufferHeader->nFlags = inputBufferHeader->nFlags;
-        }
-
-        uint8_t *outPtr = (uint8_t *)outputBufferHeader->pBuffer;
-
-        if (!mSpsPpsHeaderReceived) {
-            error = setEncodeArgs(&s_encode_ip, &s_encode_op, NULL, outputBufferHeader);
-            if (error != OMX_ErrorNone) {
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                return;
-            }
-            status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
-
-            if (IV_SUCCESS != status) {
-                ALOGE("Encode Frame failed = 0x%x\n",
-                        s_encode_op.u4_error_code);
-            } else {
-                ALOGV("Bytes Generated in header %d\n",
-                        s_encode_op.s_out_buf.u4_bytes);
-            }
-
-            mSpsPpsHeaderReceived = true;
-
-            outputBufferHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
-            outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes;
-            if (inputBufferHeader != NULL) {
-                outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp;
-            }
-
-            outQueue.erase(outQueue.begin());
-            outputBufferInfo->mOwnedByUs = false;
-
-            DUMP_TO_FILE(
-                    mOutFile, outputBufferHeader->pBuffer,
-                    outputBufferHeader->nFilledLen);
-            notifyFillBufferDone(outputBufferHeader);
-
-            setEncMode(IVE_ENC_MODE_PICTURE);
-            return;
-        }
-
-        if (mUpdateFlag) {
-            if (mUpdateFlag & kUpdateBitrate) {
-                setBitRate();
-            }
-            if (mUpdateFlag & kRequestKeyFrame) {
-                setFrameType(IV_IDR_FRAME);
-            }
-            if (mUpdateFlag & kUpdateAIRMode) {
-                setAirParams();
-                notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
-                        OMX_IndexConfigAndroidIntraRefresh, NULL);
-            }
-            mUpdateFlag = 0;
-        }
-
-        if ((inputBufferHeader != NULL)
-                && (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
-            mSawInputEOS = true;
-        }
-
-        /* In normal mode, store inputBufferInfo and this will be returned
-           when encoder consumes this input */
-        if (!mInputDataIsMeta && (inputBufferInfo != NULL)) {
-            for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
-                if (NULL == mInputBufferInfo[i]) {
-                    mInputBufferInfo[i] = inputBufferInfo;
-                    break;
-                }
-            }
-        }
-        error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, inputBufferHeader, outputBufferHeader);
-
-        if (error != OMX_ErrorNone) {
-            mSignalledError = true;
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-            return;
-        }
-
-        DUMP_TO_FILE(
-                mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
-                (mHeight * mStride * 3 / 2));
-
-        GETTIME(&mTimeStart, NULL);
-        /* Compute time elapsed between end of previous decode()
-         * to start of current decode() */
-        TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
-
-        if (IV_SUCCESS != status) {
-            ALOGE("Encode Frame failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
-            mSignalledError = true;
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-            return;
-        }
-
-        GETTIME(&mTimeEnd, NULL);
-        /* Compute time taken for decode() */
-        TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
-
-        ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
-                s_encode_op.s_out_buf.u4_bytes);
-
-        /* In encoder frees up an input buffer, mark it as free */
-        if (s_encode_op.s_inp_buf.apv_bufs[0] != NULL) {
-            if (mInputDataIsMeta) {
-                for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) {
-                    if (mConversionBuffers[i] == s_encode_op.s_inp_buf.apv_bufs[0]) {
-                        mConversionBuffersFree[i] = 1;
-                        break;
-                    }
-                }
-            } else {
-                /* In normal mode, call EBD on inBuffeHeader that is freed by the codec */
-                for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
-                    uint8_t *buf = NULL;
-                    OMX_BUFFERHEADERTYPE *bufHdr = NULL;
-                    if (mInputBufferInfo[i] != NULL) {
-                        bufHdr = mInputBufferInfo[i]->mHeader;
-                        buf = bufHdr->pBuffer + bufHdr->nOffset;
-                    }
-                    if (s_encode_op.s_inp_buf.apv_bufs[0] == buf) {
-                        mInputBufferInfo[i]->mOwnedByUs = false;
-                        notifyEmptyBufferDone(bufHdr);
-                        mInputBufferInfo[i] = NULL;
-                        break;
-                    }
-                }
-            }
-        }
-
-        outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes;
-
-        if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
-            outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
-        }
-
-        if (inputBufferHeader != NULL) {
-            inQueue.erase(inQueue.begin());
-
-            /* If in meta data, call EBD on input */
-            /* In case of normal mode, EBD will be done once encoder
-            releases the input buffer */
-            if (mInputDataIsMeta) {
-                inputBufferInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inputBufferHeader);
-            }
-        }
-
-        if (s_encode_op.u4_is_last) {
-            outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
-            mSawOutputEOS = true;
-        } else {
-            outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
-        }
-
-        if (outputBufferHeader->nFilledLen || s_encode_op.u4_is_last) {
-            outputBufferHeader->nTimeStamp = s_encode_op.u4_timestamp_high;
-            outputBufferHeader->nTimeStamp <<= 32;
-            outputBufferHeader->nTimeStamp |= s_encode_op.u4_timestamp_low;
-            outputBufferInfo->mOwnedByUs = false;
-            outQueue.erase(outQueue.begin());
-            DUMP_TO_FILE(mOutFile, outputBufferHeader->pBuffer,
-                    outputBufferHeader->nFilledLen);
-            notifyFillBufferDone(outputBufferHeader);
-        }
-
-        if (s_encode_op.u4_is_last == 1) {
-            return;
-        }
-    }
-    return;
-}
-
-void SoftAVC::onReset() {
-    SoftVideoEncoderOMXComponent::onReset();
-
-    if (releaseEncoder() != OMX_ErrorNone) {
-        ALOGW("releaseEncoder failed");
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftAVC(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
deleted file mode 100644
index 6d2e084..0000000
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
+++ /dev/null
@@ -1,317 +0,0 @@
-/*
- * Copyright 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SOFT_AVC_ENC_H__
-#define __SOFT_AVC_ENC_H__
-
-
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Vector.h>
-
-#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
-
-namespace android {
-
-#define MAX_INPUT_BUFFER_HEADERS 4
-#define MAX_CONVERSION_BUFFERS   4
-#define CODEC_MAX_CORES          4
-#define LEN_STATUS_BUFFER        (10  * 1024)
-#define MAX_VBV_BUFF_SIZE        (120 * 16384)
-#define MAX_NUM_IO_BUFS           3
-
-#define DEFAULT_MAX_REF_FRM         2
-#define DEFAULT_MAX_REORDER_FRM     0
-#define DEFAULT_QP_MIN              10
-#define DEFAULT_QP_MAX              40
-#define DEFAULT_MAX_BITRATE         240000000
-#define DEFAULT_MAX_SRCH_RANGE_X    256
-#define DEFAULT_MAX_SRCH_RANGE_Y    256
-#define DEFAULT_MAX_FRAMERATE       120000
-#define DEFAULT_NUM_CORES           1
-#define DEFAULT_NUM_CORES_PRE_ENC   0
-#define DEFAULT_FPS                 30
-#define DEFAULT_ENC_SPEED           IVE_NORMAL
-
-#define DEFAULT_MEM_REC_CNT         0
-#define DEFAULT_RECON_ENABLE        0
-#define DEFAULT_CHKSUM_ENABLE       0
-#define DEFAULT_START_FRM           0
-#define DEFAULT_NUM_FRMS            0xFFFFFFFF
-#define DEFAULT_INP_COLOR_FORMAT       IV_YUV_420SP_VU
-#define DEFAULT_RECON_COLOR_FORMAT     IV_YUV_420P
-#define DEFAULT_LOOPBACK            0
-#define DEFAULT_SRC_FRAME_RATE      30
-#define DEFAULT_TGT_FRAME_RATE      30
-#define DEFAULT_MAX_WD              1920
-#define DEFAULT_MAX_HT              1920
-#define DEFAULT_MAX_LEVEL           41
-#define DEFAULT_STRIDE              0
-#define DEFAULT_WD                  1280
-#define DEFAULT_HT                  720
-#define DEFAULT_PSNR_ENABLE         0
-#define DEFAULT_ME_SPEED            100
-#define DEFAULT_ENABLE_FAST_SAD     0
-#define DEFAULT_ENABLE_ALT_REF      0
-#define DEFAULT_RC_MODE             IVE_RC_STORAGE
-#define DEFAULT_BITRATE             6000000
-#define DEFAULT_I_QP                22
-#define DEFAULT_I_QP_MAX            DEFAULT_QP_MAX
-#define DEFAULT_I_QP_MIN            DEFAULT_QP_MIN
-#define DEFAULT_P_QP                28
-#define DEFAULT_P_QP_MAX            DEFAULT_QP_MAX
-#define DEFAULT_P_QP_MIN            DEFAULT_QP_MIN
-#define DEFAULT_B_QP                22
-#define DEFAULT_B_QP_MAX            DEFAULT_QP_MAX
-#define DEFAULT_B_QP_MIN            DEFAULT_QP_MIN
-#define DEFAULT_AIR                 IVE_AIR_MODE_NONE
-#define DEFAULT_AIR_REFRESH_PERIOD  30
-#define DEFAULT_SRCH_RNG_X          64
-#define DEFAULT_SRCH_RNG_Y          48
-#define DEFAULT_I_INTERVAL          30
-#define DEFAULT_IDR_INTERVAL        1000
-#define DEFAULT_B_FRAMES            0
-#define DEFAULT_DISABLE_DEBLK_LEVEL 0
-#define DEFAULT_HPEL                1
-#define DEFAULT_QPEL                1
-#define DEFAULT_I4                  1
-#define DEFAULT_EPROFILE            IV_PROFILE_BASE
-#define DEFAULT_ENTROPY_MODE        0
-#define DEFAULT_SLICE_MODE          IVE_SLICE_MODE_NONE
-#define DEFAULT_SLICE_PARAM         256
-#define DEFAULT_ARCH                ARCH_ARM_A9Q
-#define DEFAULT_SOC                 SOC_GENERIC
-#define DEFAULT_INTRA4x4            0
-#define STRLENGTH                   500
-#define DEFAULT_CONSTRAINED_INTRA   0
-
-#define MIN(a, b) ((a) < (b))? (a) : (b)
-#define MAX(a, b) ((a) > (b))? (a) : (b)
-#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
-#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
-#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12)
-
-/** Used to remove warnings about unused parameters */
-#define UNUSED(x) ((void)(x))
-
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b);
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff) \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
-
-#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
-#define ive_aligned_free(buf) free(buf)
-
-struct SoftAVC : public SoftVideoEncoderOMXComponent {
-    SoftAVC(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-    // Override SimpleSoftOMXComponent methods
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-protected:
-    virtual ~SoftAVC();
-
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers = 2,
-    };
-
-    enum {
-        kUpdateBitrate            = 1 << 0,
-        kRequestKeyFrame          = 1 << 1,
-        kUpdateAIRMode            = 1 << 2,
-    };
-
-    // OMX input buffer's timestamp and flags
-    typedef struct {
-        int64_t mTimeUs;
-        int32_t mFlags;
-    } InputBufferInfo;
-
-    int32_t  mStride;
-
-    struct timeval mTimeStart;   // Time at the start of decode()
-    struct timeval mTimeEnd;     // Time at the end of decode()
-
-    int mUpdateFlag;
-
-#ifdef FILE_DUMP_ENABLE
-    char mInFile[200];
-    char mOutFile[200];
-#endif /* FILE_DUMP_ENABLE */
-
-    IV_COLOR_FORMAT_T mIvVideoColorFormat;
-
-    IV_PROFILE_T mAVCEncProfile;
-    WORD32   mAVCEncLevel;
-    bool     mStarted;
-    bool     mSpsPpsHeaderReceived;
-
-    bool     mSawInputEOS;
-    bool     mSawOutputEOS;
-    bool     mSignalledError;
-    bool     mIntra4x4;
-    bool     mEnableFastSad;
-    bool     mEnableAltRef;
-    bool     mReconEnable;
-    bool     mPSNREnable;
-    bool     mEntropyMode;
-    bool     mConstrainedIntraFlag;
-    IVE_SPEED_CONFIG     mEncSpeed;
-
-    uint8_t *mConversionBuffers[MAX_CONVERSION_BUFFERS];
-    bool     mConversionBuffersFree[MAX_CONVERSION_BUFFERS];
-    BufferInfo *mInputBufferInfo[MAX_INPUT_BUFFER_HEADERS];
-    iv_obj_t *mCodecCtx;         // Codec context
-    iv_mem_rec_t *mMemRecords;   // Memory records requested by the codec
-    size_t mNumMemRecords;       // Number of memory records requested by codec
-    size_t mNumCores;            // Number of cores used by the codec
-
-    UWORD32 mHeaderGenerated;
-    UWORD32 mBframes;
-    IV_ARCH_T mArch;
-    IVE_SLICE_MODE_T mSliceMode;
-    UWORD32 mSliceParam;
-    bool mHalfPelEnable;
-    UWORD32 mIInterval;
-    UWORD32 mIDRInterval;
-    UWORD32 mDisableDeblkLevel;
-    IVE_AIR_MODE_T mAIRMode;
-    UWORD32 mAIRRefreshPeriod;
-
-    void initEncParams();
-    OMX_ERRORTYPE initEncoder();
-    OMX_ERRORTYPE releaseEncoder();
-
-    // Verifies the component role tried to be set to this OMX component is
-    // strictly video_encoder.avc
-    OMX_ERRORTYPE internalSetRoleParams(
-        const OMX_PARAM_COMPONENTROLETYPE *role);
-
-    // Updates bitrate to reflect port settings.
-    OMX_ERRORTYPE internalSetBitrateParams(
-        const OMX_VIDEO_PARAM_BITRATETYPE *bitrate);
-
-    OMX_ERRORTYPE internalSetConfig(
-        OMX_INDEXTYPE index, const OMX_PTR _params, bool *frameConfig);
-
-    OMX_ERRORTYPE getConfig(
-        OMX_INDEXTYPE index, const OMX_PTR _params);
-
-    // Handles port definition changes.
-    OMX_ERRORTYPE internalSetPortParams(
-        const OMX_PARAM_PORTDEFINITIONTYPE *port);
-
-    OMX_ERRORTYPE internalSetFormatParams(
-        const OMX_VIDEO_PARAM_PORTFORMATTYPE *format);
-
-    OMX_ERRORTYPE setFrameType(IV_PICTURE_CODING_TYPE_T  e_frame_type);
-    OMX_ERRORTYPE setQp();
-    OMX_ERRORTYPE setEncMode(IVE_ENC_MODE_T e_enc_mode);
-    OMX_ERRORTYPE setDimensions();
-    OMX_ERRORTYPE setNumCores();
-    OMX_ERRORTYPE setFrameRate();
-    OMX_ERRORTYPE setIpeParams();
-    OMX_ERRORTYPE setBitRate();
-    OMX_ERRORTYPE setAirParams();
-    OMX_ERRORTYPE setMeParams();
-    OMX_ERRORTYPE setGopParams();
-    OMX_ERRORTYPE setProfileParams();
-    OMX_ERRORTYPE setDeblockParams();
-    OMX_ERRORTYPE setVbvParams();
-    void logVersion();
-    OMX_ERRORTYPE setEncodeArgs(
-        ive_video_encode_ip_t *ps_encode_ip,
-        ive_video_encode_op_t *ps_encode_op,
-        OMX_BUFFERHEADERTYPE *inputBufferHeader,
-        OMX_BUFFERHEADERTYPE *outputBufferHeader);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
-};
-
-#ifdef FILE_DUMP_ENABLE
-
-#define INPUT_DUMP_PATH     "/sdcard/media/avce_input"
-#define INPUT_DUMP_EXT      "yuv"
-#define OUTPUT_DUMP_PATH    "/sdcard/media/avce_output"
-#define OUTPUT_DUMP_EXT     "h264"
-
-#define GENERATE_FILE_NAMES() {                         \
-    GETTIME(&mTimeStart, NULL);                         \
-    strcpy(mInFile, "");                                \
-    sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH,  \
-            mTimeStart.tv_sec, mTimeStart.tv_usec,      \
-            INPUT_DUMP_EXT);                            \
-    strcpy(mOutFile, "");                               \
-    sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
-            mTimeStart.tv_sec, mTimeStart.tv_usec,      \
-            OUTPUT_DUMP_EXT);                           \
-}
-
-#define CREATE_DUMP_FILE(m_filename) {                  \
-    FILE *fp = fopen(m_filename, "wb");                 \
-    if (fp != NULL) {                                   \
-        ALOGD("Opened file %s", m_filename);            \
-        fclose(fp);                                     \
-    } else {                                            \
-        ALOGD("Could not open file %s", m_filename);    \
-    }                                                   \
-}
-#define DUMP_TO_FILE(m_filename, m_buf, m_size)         \
-{                                                       \
-    FILE *fp = fopen(m_filename, "ab");                 \
-    if (fp != NULL && m_buf != NULL) {                  \
-        int i;                                          \
-        i = fwrite(m_buf, 1, m_size, fp);               \
-        ALOGD("fwrite ret %d to write %d", i, m_size);  \
-        if (i != (int)m_size) {                         \
-            ALOGD("Error in fwrite, returned %d", i);   \
-            perror("Error in write to file");           \
-        }                                               \
-        fclose(fp);                                     \
-    } else {                                            \
-        ALOGD("Could not write to file %s", m_filename);\
-        if (fp != NULL)                                 \
-            fclose(fp);                                 \
-    }                                                   \
-}
-#else /* FILE_DUMP_ENABLE */
-#define INPUT_DUMP_PATH
-#define INPUT_DUMP_EXT
-#define OUTPUT_DUMP_PATH
-#define OUTPUT_DUMP_EXT
-#define GENERATE_FILE_NAMES()
-#define CREATE_DUMP_FILE(m_filename)
-#define DUMP_TO_FILE(m_filename, m_buf, m_size)
-#endif /* FILE_DUMP_ENABLE */
-
-}  // namespace android
-
-#endif  // __SOFT_AVC_ENC_H__
diff --git a/media/libstagefright/codecs/avcenc/exports.lds b/media/libstagefright/codecs/avcenc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/avcenc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/flac/dec/Android.bp b/media/libstagefright/codecs/flac/dec/Android.bp
deleted file mode 100644
index d06e0b7..0000000
--- a/media/libstagefright/codecs/flac/dec/Android.bp
+++ /dev/null
@@ -1,41 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_flac_dec_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_flac_dec_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_flacdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: [
-        "SoftFlacDecoder.cpp",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    shared_libs: [
-        "libstagefright_flacdec",
-    ],
-}
diff --git a/media/libstagefright/codecs/flac/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/flac/dec/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/flac/dec/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/flac/dec/NOTICE b/media/libstagefright/codecs/flac/dec/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/flac/dec/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
deleted file mode 100644
index d6448d3..0000000
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
+++ /dev/null
@@ -1,500 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftFlacDecoder"
-#include <utils/Log.h>
-
-#include "SoftFlacDecoder.h"
-#include <OMX_AudioExt.h>
-#include <OMX_IndexExt.h>
-
-#include <cutils/properties.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaErrors.h>
-#include <utils/misc.h>
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftFlacDecoder::SoftFlacDecoder(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mFLACDecoder(NULL),
-      mInputBufferCount(0),
-      mHasStreamInfo(false),
-      mSignalledError(false),
-      mSawInputEOS(false),
-      mFinishedDecoder(false),
-      mOutputPortSettingsChange(NONE) {
-    ALOGV("ctor:");
-    memset(&mStreamInfo, 0, sizeof(mStreamInfo));
-    initPorts();
-    initDecoder();
-}
-
-SoftFlacDecoder::~SoftFlacDecoder() {
-    ALOGV("dtor:");
-    delete mFLACDecoder;
-}
-
-void SoftFlacDecoder::initPorts() {
-    ALOGV("initPorts:");
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumInputBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 32768;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/flac");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingFLAC;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumOutputBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kNumSamplesPerFrame * FLACDecoder::kMaxChannels * sizeof(float);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = sizeof(float);
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-void SoftFlacDecoder::initDecoder() {
-    ALOGV("initDecoder:");
-    mFLACDecoder = FLACDecoder::Create();
-    if (mFLACDecoder == NULL) {
-        ALOGE("initDecoder: failed to create FLACDecoder");
-        mSignalledError = true;
-    }
-}
-
-OMX_ERRORTYPE SoftFlacDecoder::initCheck() const {
-    if (mSignalledError) {
-        if (mFLACDecoder == NULL) {
-            ALOGE("initCheck: failed due to NULL encoder");
-            return OMX_ErrorDynamicResourcesUnavailable;
-        }
-        return OMX_ErrorUndefined;
-    }
-
-    return SimpleSoftOMXComponent::initCheck();
-}
-
-OMX_ERRORTYPE SoftFlacDecoder::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    ALOGV("internalGetParameter: index(%x)", index);
-    switch ((OMX_U32)index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingFLAC : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-        case OMX_IndexParamAudioFlac:
-        {
-            OMX_AUDIO_PARAM_FLACTYPE *flacParams =
-                (OMX_AUDIO_PARAM_FLACTYPE *)params;
-
-            if (!isValidOMXParam(flacParams)) {
-                ALOGE("internalGetParameter(OMX_IndexParamAudioFlac): invalid omx params");
-                return OMX_ErrorBadParameter;
-            }
-
-            if (flacParams->nPortIndex != 0) {
-                ALOGE("internalGetParameter(OMX_IndexParamAudioFlac): bad port index");
-                return OMX_ErrorBadPortIndex;
-            }
-
-            flacParams->nCompressionLevel = 0;
-
-            if (isConfigured()) {
-                flacParams->nChannels = mStreamInfo.channels;
-                flacParams->nSampleRate = mStreamInfo.sample_rate;
-            } else {
-                flacParams->nChannels = 2;
-                flacParams->nSampleRate = 44100;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                ALOGE("internalGetParameter(OMX_IndexParamAudioPcm): invalid omx params");
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                ALOGE("internalGetParameter(OMX_IndexParamAudioPcm): bad port index");
-                return OMX_ErrorBadPortIndex;
-            }
-
-            pcmParams->eNumData = mNumericalData;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = mBitsPerSample;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-            pcmParams->eChannelMapping[2] = OMX_AUDIO_ChannelCF;
-            pcmParams->eChannelMapping[3] = OMX_AUDIO_ChannelLFE;
-            pcmParams->eChannelMapping[4] = OMX_AUDIO_ChannelLS;
-            pcmParams->eChannelMapping[5] = OMX_AUDIO_ChannelRS;
-
-            if (isConfigured()) {
-                pcmParams->nChannels = mStreamInfo.channels;
-                pcmParams->nSamplingRate = mStreamInfo.sample_rate;
-            } else {
-                pcmParams->nChannels = 2;
-                pcmParams->nSamplingRate = 44100;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftFlacDecoder::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    ALOGV("internalSetParameter: index(%x)", (int)index);
-    switch ((int)index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.flac",
-                        OMX_MAX_STRINGNAME_SIZE - 1) != 0) {
-                return OMX_ErrorInvalidComponentName;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingFLAC)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorBadPortIndex;
-            }
-
-            if (pcmParams->eNumData == OMX_NumericalDataFloat && pcmParams->nBitPerSample == 32) {
-                mNumericalData = OMX_NumericalDataFloat;
-                mBitsPerSample = 32;
-            } else if (pcmParams->eNumData == OMX_NumericalDataSigned
-                     && pcmParams->nBitPerSample == 16) {
-                mNumericalData = OMX_NumericalDataSigned;
-                mBitsPerSample = 16;
-            } else {
-                ALOGE("Invalid eNumData %d, nBitsPerSample %d",
-                        pcmParams->eNumData, pcmParams->nBitPerSample);
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-bool SoftFlacDecoder::isConfigured() const {
-    return mHasStreamInfo;
-}
-
-void SoftFlacDecoder::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    const bool outputFloat = mNumericalData == OMX_NumericalDataFloat;
-
-    ALOGV("onQueueFilled %d/%d:", inQueue.empty(), outQueue.empty());
-    while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty() && !mFinishedDecoder) {
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-        void *outBuffer = reinterpret_cast<void *>(outHeader->pBuffer + outHeader->nOffset);
-        size_t outBufferSize = outHeader->nAllocLen - outHeader->nOffset;
-        int64_t timeStamp = 0;
-
-        if (!inQueue.empty()) {
-            BufferInfo *inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-            uint8_t* inBuffer = inHeader->pBuffer + inHeader->nOffset;
-            uint32_t inBufferLength = inHeader->nFilledLen;
-            ALOGV("input: %u bytes", inBufferLength);
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                ALOGV("saw EOS");
-                mSawInputEOS = true;
-                if (mInputBufferCount == 0 && inHeader->nFilledLen == 0) {
-                    // first buffer was empty and EOS: signal EOS on output and return
-                    ALOGV("empty first EOS");
-                    outHeader->nFilledLen = 0;
-                    outHeader->nTimeStamp = inHeader->nTimeStamp;
-                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    notifyFillBufferDone(outHeader);
-                    mFinishedDecoder = true;
-                    inInfo->mOwnedByUs = false;
-                    inQueue.erase(inQueue.begin());
-                    notifyEmptyBufferDone(inHeader);
-                    return;
-                }
-            }
-
-            if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
-                ALOGE("onQueueFilled: first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
-                inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
-            }
-            if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
-                ALOGV("received config buffer of size %u", inBufferLength);
-                status_t decoderErr = mFLACDecoder->parseMetadata(inBuffer, inBufferLength);
-                mInputBufferCount++;
-
-                if (decoderErr != OK && decoderErr != WOULD_BLOCK) {
-                    ALOGE("onQueueFilled: FLACDecoder parseMetaData returns error %d", decoderErr);
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
-                    return;
-                }
-
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                notifyEmptyBufferDone(inHeader);
-
-                if (decoderErr == WOULD_BLOCK) {
-                    continue;
-                }
-                mStreamInfo = mFLACDecoder->getStreamInfo();
-                mHasStreamInfo = true;
-
-                // Only send out port settings changed event if both sample rate
-                // and numChannels are valid.
-                if (mStreamInfo.sample_rate && mStreamInfo.channels) {
-                    ALOGD("onQueueFilled: initially configuring decoder: %d Hz, %d channels",
-                        mStreamInfo.sample_rate, mStreamInfo.channels);
-
-                    notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                    mOutputPortSettingsChange = AWAITING_DISABLED;
-                }
-                return;
-            }
-
-            status_t decoderErr = mFLACDecoder->decodeOneFrame(
-                    inBuffer, inBufferLength, outBuffer, &outBufferSize, outputFloat);
-            if (decoderErr != OK) {
-                ALOGE("onQueueFilled: FLACDecoder decodeOneFrame returns error %d", decoderErr);
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
-                return;
-            }
-
-            mInputBufferCount++;
-            timeStamp = inHeader->nTimeStamp;
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            notifyEmptyBufferDone(inHeader);
-
-            if (outBufferSize == 0) {
-                ALOGV("no output, trying again");
-                continue;
-            }
-        } else if (mSawInputEOS) {
-            status_t decoderErr = mFLACDecoder->decodeOneFrame(
-                    nullptr /* inBuffer */, 0 /* inBufferLen */,
-                    outBuffer, &outBufferSize, outputFloat);
-            mFinishedDecoder = true;
-            if (decoderErr != OK) {
-                ALOGE("onQueueFilled: FLACDecoder finish returns error %d", decoderErr);
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
-                return;
-            }
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-        } else {
-            // no more input buffers at this time, loop and see if there is more output
-            continue;
-        }
-
-        outHeader->nFilledLen = outBufferSize;
-        outHeader->nTimeStamp = timeStamp;
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        notifyFillBufferDone(outHeader);
-    }
-}
-
-void SoftFlacDecoder::onPortFlushCompleted(OMX_U32 portIndex) {
-    ALOGV("onPortFlushCompleted: portIndex(%u)", portIndex);
-    if (portIndex == 0) {
-        drainDecoder();
-    }
-}
-
-void SoftFlacDecoder::drainDecoder() {
-    mFLACDecoder->flush();
-    mSawInputEOS = false;
-    mFinishedDecoder = false;
-}
-
-void SoftFlacDecoder::onReset() {
-    ALOGV("onReset");
-    drainDecoder();
-
-    memset(&mStreamInfo, 0, sizeof(mStreamInfo));
-    mHasStreamInfo = false;
-    mInputBufferCount = 0;
-    mSignalledError = false;
-    mOutputPortSettingsChange = NONE;
-}
-
-void SoftFlacDecoder::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    ALOGV("onPortEnableCompleted: portIndex(%u), enabled(%d)", portIndex, enabled);
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED:
-        {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            PortInfo *info = editPortInfo(1 /* portIndex */);
-            if (!info->mDef.bEnabled) {
-                info->mDef.nBufferSize =
-                        mStreamInfo.max_blocksize * mStreamInfo.channels * sizeof(float);
-            }
-            break;
-        }
-
-        default:
-        {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    ALOGV("createSoftOMXComponent: flac decoder");
-    return new android::SoftFlacDecoder(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
deleted file mode 100644
index ba02074..0000000
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_FLAC_DECODER_H
-#define SOFT_FLAC_DECODER_H
-
-#include "FLACDecoder.h"
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-namespace android {
-
-struct SoftFlacDecoder : public SimpleSoftOMXComponent {
-    SoftFlacDecoder(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-    virtual OMX_ERRORTYPE initCheck() const override;
-
-protected:
-    virtual ~SoftFlacDecoder();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params) override;
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params) override;
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex) override;
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled) override;
-    virtual void onReset() override;
-
-private:
-    static constexpr unsigned int kNumSamplesPerFrame = 2048; // adjusted based on stream.
-
-    enum {
-        kNumInputBuffers   = 4,
-        kNumOutputBuffers  = 4,
-    };
-    OMX_NUMERICALDATATYPE mNumericalData = OMX_NumericalDataSigned;
-    OMX_U32 mBitsPerSample = 16;
-
-    FLACDecoder *mFLACDecoder;
-    FLAC__StreamMetadata_StreamInfo mStreamInfo;
-    size_t mInputBufferCount;
-    bool mHasStreamInfo;
-    bool mSignalledError;
-    bool mSawInputEOS;
-    bool mFinishedDecoder;
-
-    enum {
-        NONE,
-        AWAITING_DISABLED,
-        AWAITING_ENABLED
-    } mOutputPortSettingsChange;
-
-    void initPorts();
-    void initDecoder();
-    bool isConfigured() const;
-    void drainDecoder();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftFlacDecoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_FLAC_DECODER_H
diff --git a/media/libstagefright/codecs/flac/dec/exports.lds b/media/libstagefright/codecs/flac/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/flac/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
deleted file mode 100644
index 59a4675..0000000
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ /dev/null
@@ -1,43 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_flac_enc_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_flac_enc_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_flacenc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftFlacEncoder.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    header_libs: ["libbase_headers"],
-    shared_libs: [
-        "libaudioutils",
-    ],
-    static_libs: [
-        "libFLAC",
-    ],
-}
diff --git a/media/libstagefright/codecs/flac/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/flac/enc/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/flac/enc/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/flac/enc/NOTICE b/media/libstagefright/codecs/flac/enc/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/flac/enc/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
deleted file mode 100644
index 24216a2..0000000
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ /dev/null
@@ -1,601 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftFlacEncoder"
-#include <android-base/macros.h>
-#include <utils/Log.h>
-
-#include "SoftFlacEncoder.h"
-#include <audio_utils/primitives.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-#define FLAC_COMPRESSION_LEVEL_MIN     0
-#define FLAC_COMPRESSION_LEVEL_DEFAULT 5
-#define FLAC_COMPRESSION_LEVEL_MAX     8
-
-#if LOG_NDEBUG
-#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
-#else
-#define UNUSED_UNLESS_VERBOSE(x)
-#endif
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftFlacEncoder::SoftFlacEncoder(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mSignalledError(false),
-      mNumChannels(1),
-      mSampleRate(44100),
-      mCompressionLevel(FLAC_COMPRESSION_LEVEL_DEFAULT),
-      mEncoderWriteData(false),
-      mEncoderReturnedEncodedData(false),
-      mSawInputEOS(false),
-      mSentOutputEOS(false),
-      mEncoderReturnedNbBytes(0),
-      mInputBufferPcm32(NULL),
-      mHeaderOffset(0),
-      mHeaderComplete(false),
-      mWroteHeader(false)
-{
-    ALOGV("SoftFlacEncoder::SoftFlacEncoder(name=%s)", name);
-    initPorts();
-
-    mFlacStreamEncoder = FLAC__stream_encoder_new();
-    if (mFlacStreamEncoder == NULL) {
-        ALOGE("SoftFlacEncoder::SoftFlacEncoder(name=%s) error instantiating FLAC encoder", name);
-        mSignalledError = true;
-    }
-
-    if (!mSignalledError) { // no use allocating input buffer if we had an error above
-        // 2x the pcm16 samples can exist with the same size as pcmFloat samples.
-        mInputBufferPcm32 = (FLAC__int32*) malloc(
-                sizeof(FLAC__int32) * kNumSamplesPerFrame * kMaxChannels * 2);
-        if (mInputBufferPcm32 == NULL) {
-            ALOGE("SoftFlacEncoder::SoftFlacEncoder(name=%s) error allocating internal input buffer", name);
-            mSignalledError = true;
-        }
-    }
-}
-
-SoftFlacEncoder::~SoftFlacEncoder() {
-    ALOGV("SoftFlacEncoder::~SoftFlacEncoder()");
-    if (mFlacStreamEncoder != NULL) {
-        FLAC__stream_encoder_delete(mFlacStreamEncoder);
-        mFlacStreamEncoder = NULL;
-    }
-    free(mInputBufferPcm32);
-    mInputBufferPcm32 = NULL;
-}
-
-OMX_ERRORTYPE SoftFlacEncoder::initCheck() const {
-    if (mSignalledError) {
-        if (mFlacStreamEncoder == NULL) {
-            ALOGE("initCheck() failed due to NULL encoder");
-        } else if (mInputBufferPcm32 == NULL) {
-            ALOGE("initCheck() failed due to error allocating internal input buffer");
-        }
-        return OMX_ErrorUndefined;
-    } else {
-        return SimpleSoftOMXComponent::initCheck();
-    }
-}
-
-void SoftFlacEncoder::initPorts() {
-    ALOGV("SoftFlacEncoder::initPorts()");
-
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    // configure input port of the encoder
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxInputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = sizeof(float);
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-
-    // configure output port of the encoder
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxOutputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_AUDIO_FLAC);
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingFLAC;
-
-    addPort(def);
-}
-
-OMX_ERRORTYPE SoftFlacEncoder::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    ALOGV("SoftFlacEncoder::internalGetParameter(index=0x%x)", index);
-
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingPCM : OMX_AUDIO_CodingFLAC;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = mNumericalData;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = mBitsPerSample;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            pcmParams->nChannels = mNumChannels;
-            pcmParams->nSamplingRate = mSampleRate;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioFlac:
-        {
-            OMX_AUDIO_PARAM_FLACTYPE *flacParams = (OMX_AUDIO_PARAM_FLACTYPE *)params;
-
-            if (!isValidOMXParam(flacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (flacParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            flacParams->nCompressionLevel = mCompressionLevel;
-            flacParams->nChannels = mNumChannels;
-            flacParams->nSampleRate = mSampleRate;
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingFLAC)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            ALOGV("SoftFlacEncoder::internalSetParameter(OMX_IndexParamAudioPcm)");
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                ALOGE("SoftFlacEncoder::internalSetParameter() Error #1");
-                return OMX_ErrorUndefined;
-            }
-
-            if (pcmParams->nChannels < 1 || pcmParams->nChannels > kMaxChannels) {
-                return OMX_ErrorUndefined;
-            }
-
-            mNumChannels = pcmParams->nChannels;
-            mSampleRate = pcmParams->nSamplingRate;
-
-            if (pcmParams->eNumData == OMX_NumericalDataFloat && pcmParams->nBitPerSample == 32) {
-                mNumericalData = OMX_NumericalDataFloat;
-                mBitsPerSample = 32;
-            } else if (pcmParams->eNumData == OMX_NumericalDataSigned
-                     && pcmParams->nBitPerSample == 16) {
-                mNumericalData = OMX_NumericalDataSigned;
-                mBitsPerSample = 16;
-            } else {
-                ALOGE("%s: invalid eNumData %d, nBitsPerSample %d",
-                        __func__, pcmParams->eNumData, pcmParams->nBitPerSample);
-                return OMX_ErrorUndefined;
-            }
-
-            ALOGV("will encode %d channels at %dHz", mNumChannels, mSampleRate);
-
-            return configureEncoder();
-        }
-
-        case OMX_IndexParamStandardComponentRole:
-        {
-            ALOGV("SoftFlacEncoder::internalSetParameter(OMX_IndexParamStandardComponentRole)");
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                    "audio_encoder.flac",
-                    OMX_MAX_STRINGNAME_SIZE - 1)) {
-                ALOGE("SoftFlacEncoder::internalSetParameter(OMX_IndexParamStandardComponentRole)"
-                        "error");
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioFlac:
-        {
-            // used only for setting the compression level
-            OMX_AUDIO_PARAM_FLACTYPE *flacParams = (OMX_AUDIO_PARAM_FLACTYPE *)params;
-
-            if (!isValidOMXParam(flacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (flacParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            mCompressionLevel = flacParams->nCompressionLevel; // range clamping done inside encoder
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamPortDefinition:
-        {
-            OMX_PARAM_PORTDEFINITIONTYPE *defParams =
-                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
-
-            if (!isValidOMXParam(defParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (defParams->nPortIndex == 0) {
-                if (defParams->nBufferSize > kMaxInputBufferSize) {
-                    ALOGE("Input buffer size must be at most %d bytes",
-                        kMaxInputBufferSize);
-                    return OMX_ErrorUnsupportedSetting;
-                }
-            }
-
-            FALLTHROUGH_INTENDED;
-        }
-
-        default:
-            ALOGV("SoftFlacEncoder::internalSetParameter(default)");
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
-    UNUSED_UNLESS_VERBOSE(portIndex);
-    ALOGV("SoftFlacEncoder::onQueueFilled(portIndex=%d)", portIndex);
-
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    const bool inputFloat = mNumericalData == OMX_NumericalDataFloat;
-    const size_t sampleSize = inputFloat ? sizeof(float) : sizeof(int16_t);
-    const size_t frameSize = sampleSize * mNumChannels;
-
-    FLAC__bool ok = true;
-
-    while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty() && !mSentOutputEOS) {
-        if (!inQueue.empty()) {
-            BufferInfo *inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                ALOGV("saw EOS on buffer of size %u", inHeader->nFilledLen);
-                mSawInputEOS = true;
-            }
-
-            if (inHeader->nFilledLen > kMaxInputBufferSize) {
-                ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-
-            assert(mNumChannels != 0);
-            mEncoderWriteData = true;
-            mEncoderReturnedEncodedData = false;
-            mEncoderReturnedNbBytes = 0;
-            if (inHeader->nFilledLen) {
-                mCurrentInputTimeStamp = inHeader->nTimeStamp;
-
-                const unsigned nbInputFrames = inHeader->nFilledLen / frameSize;
-                const unsigned nbInputSamples = inHeader->nFilledLen / sampleSize;
-
-                if (inputFloat) {
-                    CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels);
-                    const float * const pcmFloat = reinterpret_cast<float *>(inHeader->pBuffer);
-                     memcpy_to_q8_23_from_float_with_clamp(
-                             mInputBufferPcm32, pcmFloat, nbInputSamples);
-                } else {
-                    // note nbInputSamples may be 2x as large for pcm16 data.
-                    CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels * 2);
-                    const int16_t * const pcm16 = reinterpret_cast<int16_t *>(inHeader->pBuffer);
-                    for (unsigned i = 0; i < nbInputSamples; ++i) {
-                        mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
-                    }
-                }
-                ALOGV(" about to encode %u samples per channel", nbInputFrames);
-                ok = FLAC__stream_encoder_process_interleaved(
-                                mFlacStreamEncoder,
-                                mInputBufferPcm32,
-                                nbInputFrames /*samples per channel*/ );
-            }
-
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        if (ok) {
-            ALOGV("encoded %d, bytes %lld, eos %d", mEncoderReturnedEncodedData,
-                  (long long )mEncoderReturnedNbBytes, mSawInputEOS);
-            if (mSawInputEOS && !mEncoderReturnedEncodedData) {
-                ALOGV("finishing encoder");
-                mSentOutputEOS = true;
-                FLAC__stream_encoder_finish(mFlacStreamEncoder);
-                outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-            }
-            if (mSawInputEOS || mEncoderReturnedEncodedData) {
-                ALOGV(" dequeueing buffer on output port after writing data");
-                outInfo->mOwnedByUs = false;
-                outQueue.erase(outQueue.begin());
-                outInfo = NULL;
-                notifyFillBufferDone(outHeader);
-                outHeader = NULL;
-                mEncoderReturnedEncodedData = false;
-            }
-        } else {
-            ALOGE(" error encountered during encoding");
-            mSignalledError = true;
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            return;
-        }
-
-    }
-}
-
-FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
-            const FLAC__byte buffer[],
-            size_t bytes, unsigned samples,
-            unsigned current_frame) {
-    UNUSED_UNLESS_VERBOSE(current_frame);
-    ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%zu, samples=%u, curr_frame=%u)",
-            bytes, samples, current_frame);
-
-    if (samples == 0) {
-        ALOGV("saving %zu bytes of header", bytes);
-        if (mHeaderOffset + bytes > sizeof(mHeader) || mHeaderComplete) {
-            ALOGW("header is too big, or header already received");
-            mSignalledError = true;
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-        } else {
-            memcpy(mHeader + mHeaderOffset, buffer, bytes);
-            mHeaderOffset += bytes;// will contain header size when finished receiving header
-            if (buffer[0] & 0x80) {
-                mHeaderComplete = true;
-            }
-        }
-        return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
-    }
-
-    if ((samples == 0) || !mEncoderWriteData) {
-        // called by the encoder because there's header data to save, but it's not the role
-        // of this component (unless WRITE_FLAC_HEADER_IN_FIRST_BUFFER is defined)
-        ALOGV("ignoring %zu bytes of header data (samples=%d)", bytes, samples);
-        return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
-    }
-
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-    CHECK(!outQueue.empty());
-    BufferInfo *outInfo = *outQueue.begin();
-    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-    if (mHeaderComplete && !mWroteHeader) {
-        ALOGV(" writing %d bytes of header on output port", mHeaderOffset);
-        memcpy(outHeader->pBuffer + outHeader->nOffset + outHeader->nFilledLen,
-                mHeader, mHeaderOffset);
-        outHeader->nFilledLen += mHeaderOffset;
-        mWroteHeader = true;
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
-        notifyFillBufferDone(outHeader);
-        outInfo = NULL;
-        outHeader = NULL;
-        // get the next buffer for the rest of the data
-        CHECK(!outQueue.empty());
-        outInfo = *outQueue.begin();
-        outHeader = outInfo->mHeader;
-    }
-
-    // write encoded data
-    ALOGV(" writing %zu bytes of encoded data on output port", bytes);
-    if (bytes > outHeader->nAllocLen - outHeader->nOffset - outHeader->nFilledLen) {
-        ALOGE(" not enough space left to write encoded data, dropping %zu bytes", bytes);
-        // a fatal error would stop the encoding
-        return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
-    }
-    memcpy(outHeader->pBuffer + outHeader->nOffset, buffer, bytes);
-
-    outHeader->nTimeStamp = mCurrentInputTimeStamp;
-    outHeader->nOffset = 0;
-    outHeader->nFilledLen += bytes;
-    outHeader->nFlags = 0;
-
-    mEncoderReturnedEncodedData = true;
-    mEncoderReturnedNbBytes += bytes;
-
-    return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
-}
-
-
-OMX_ERRORTYPE SoftFlacEncoder::configureEncoder() {
-    ALOGV("SoftFlacEncoder::configureEncoder() numChannel=%d, sampleRate=%d",
-            mNumChannels, mSampleRate);
-
-    if (mSignalledError || (mFlacStreamEncoder == NULL)) {
-        ALOGE("can't configure encoder: no encoder or invalid state");
-        return OMX_ErrorInvalidState;
-    }
-
-    const bool inputFloat = mNumericalData == OMX_NumericalDataFloat;
-    const int codecBitsPerSample = inputFloat ? 24 : 16;
-    FLAC__bool ok = true;
-    ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mNumChannels);
-    ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mSampleRate);
-    ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, codecBitsPerSample);
-    ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder,
-            (unsigned)mCompressionLevel);
-    ok = ok && FLAC__stream_encoder_set_verify(mFlacStreamEncoder, false);
-    if (!ok) { goto return_result; }
-
-    ok &= FLAC__STREAM_ENCODER_INIT_STATUS_OK ==
-            FLAC__stream_encoder_init_stream(mFlacStreamEncoder,
-                    flacEncoderWriteCallback    /*write_callback*/,
-                    NULL /*seek_callback*/,
-                    NULL /*tell_callback*/,
-                    NULL /*metadata_callback*/,
-                    (void *) this /*client_data*/);
-
-return_result:
-    if (ok) {
-        ALOGV("encoder successfully configured");
-        return OMX_ErrorNone;
-    } else {
-        ALOGE("unknown error when configuring encoder");
-        return OMX_ErrorUndefined;
-    }
-}
-
-
-// static
-FLAC__StreamEncoderWriteStatus SoftFlacEncoder::flacEncoderWriteCallback(
-            const FLAC__StreamEncoder * /* encoder */,
-            const FLAC__byte buffer[],
-            size_t bytes,
-            unsigned samples,
-            unsigned current_frame,
-            void *client_data) {
-    return ((SoftFlacEncoder*) client_data)->onEncodedFlacAvailable(
-            buffer, bytes, samples, current_frame);
-}
-
-}  // namespace android
-
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftFlacEncoder(name, callbacks, appData, component);
-}
-
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
deleted file mode 100644
index 722fc13..0000000
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_FLAC_ENC_H_
-
-#define SOFT_FLAC_ENC_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-#include "FLAC/stream_encoder.h"
-
-namespace android {
-
-struct SoftFlacEncoder : public SimpleSoftOMXComponent {
-    SoftFlacEncoder(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-    virtual OMX_ERRORTYPE initCheck() const;
-
-protected:
-    virtual ~SoftFlacEncoder();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-private:
-    const unsigned int kNumBuffers = 2;
-    static constexpr unsigned int kMaxChannels = 2;
-    static constexpr unsigned int kNumSamplesPerFrame = 1152;
-    static constexpr unsigned int kMaxInputBufferSize =
-            kNumSamplesPerFrame * kMaxChannels * sizeof(float);
-    const unsigned int kMaxOutputBufferSize = 65536;    //TODO check if this can be reduced
-
-    bool mSignalledError;
-
-    OMX_U32 mNumChannels;
-    OMX_U32 mSampleRate;
-    OMX_U32 mCompressionLevel;
-    OMX_NUMERICALDATATYPE mNumericalData = OMX_NumericalDataSigned;
-    OMX_U32 mBitsPerSample = 16;
-
-    // should the data received by the callback be written to the output port
-    bool        mEncoderWriteData;
-    bool        mEncoderReturnedEncodedData;
-    bool        mSawInputEOS;
-    bool        mSentOutputEOS;
-    size_t      mEncoderReturnedNbBytes;
-    OMX_TICKS  mCurrentInputTimeStamp;
-
-    FLAC__StreamEncoder* mFlacStreamEncoder;
-
-    void initPorts();
-
-    OMX_ERRORTYPE configureEncoder();
-
-    // FLAC encoder callbacks
-    // maps to encoderEncodeFlac()
-    static FLAC__StreamEncoderWriteStatus flacEncoderWriteCallback(
-            const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[],
-            size_t bytes, unsigned samples, unsigned current_frame, void *client_data);
-
-    FLAC__StreamEncoderWriteStatus onEncodedFlacAvailable(
-                const FLAC__byte buffer[],
-                size_t bytes, unsigned samples, unsigned current_frame);
-
-    // FLAC takes samples aligned on 32bit boundaries, use this buffer for the conversion
-    // before passing the input data to the encoder
-    FLAC__int32* mInputBufferPcm32;
-
-    unsigned mHeaderOffset;
-    bool mHeaderComplete;
-    bool mWroteHeader;
-    char mHeader[128];
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftFlacEncoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_FLAC_ENC_H_
-
diff --git a/media/libstagefright/codecs/flac/enc/exports.lds b/media/libstagefright/codecs/flac/enc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/flac/enc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/g711/dec/Android.bp b/media/libstagefright/codecs/g711/dec/Android.bp
deleted file mode 100644
index 1dc34c3..0000000
--- a/media/libstagefright/codecs/g711/dec/Android.bp
+++ /dev/null
@@ -1,35 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_g711_dec_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_g711_dec_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_g711dec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftG711.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/g711/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/g711/dec/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/g711/dec/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/g711/dec/NOTICE b/media/libstagefright/codecs/g711/dec/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/g711/dec/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.cpp b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
deleted file mode 100644
index fe91510..0000000
--- a/media/libstagefright/codecs/g711/dec/SoftG711.cpp
+++ /dev/null
@@ -1,391 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftG711"
-#include <utils/Log.h>
-
-#include "SoftG711.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-#define MAX_CHANNEL_COUNT            6  /* maximum number of audio channels that can be decoded */
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftG711::SoftG711(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mIsMLaw(true),
-      mSignalledError(false),
-      mNumChannels(1),
-      mSamplingRate(8000) {
-    if (!strcmp(name, "OMX.google.g711.alaw.decoder")) {
-        mIsMLaw = false;
-    } else {
-        CHECK(!strcmp(name, "OMX.google.g711.mlaw.decoder"));
-    }
-
-    initPorts();
-}
-
-SoftG711::~SoftG711() {
-}
-
-void SoftG711::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType =
-        const_cast<char *>(
-                mIsMLaw
-                    ? MEDIA_MIMETYPE_AUDIO_G711_MLAW
-                    : MEDIA_MIMETYPE_AUDIO_G711_ALAW);
-
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingG711;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-OMX_ERRORTYPE SoftG711::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingG711 : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            if (pcmParams->nPortIndex == 0) {
-                // input port
-                pcmParams->ePCMMode = mIsMLaw ? OMX_AUDIO_PCMModeMULaw
-                                              : OMX_AUDIO_PCMModeALaw;
-            } else {
-                // output port
-                pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            }
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            pcmParams->nChannels = mNumChannels;
-            pcmParams->nSamplingRate = mSamplingRate;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftG711::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0 && pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (pcmParams->nChannels < 1 || pcmParams->nChannels > MAX_CHANNEL_COUNT) {
-                return OMX_ErrorUndefined;
-            }
-
-            if(pcmParams->nPortIndex == 0) {
-                mNumChannels = pcmParams->nChannels;
-            }
-
-            mSamplingRate = pcmParams->nSamplingRate;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingG711)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (mIsMLaw) {
-                if (strncmp((const char *)roleParams->cRole,
-                            "audio_decoder.g711mlaw",
-                            OMX_MAX_STRINGNAME_SIZE - 1)) {
-                    return OMX_ErrorUndefined;
-                }
-            } else {
-                if (strncmp((const char *)roleParams->cRole,
-                            "audio_decoder.g711alaw",
-                            OMX_MAX_STRINGNAME_SIZE - 1)) {
-                    return OMX_ErrorUndefined;
-                }
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftG711::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-
-            outHeader->nFilledLen = 0;
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
-        }
-
-        if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) {
-            ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
-
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-        }
-
-        if (inHeader->nFilledLen * sizeof(int16_t) > outHeader->nAllocLen) {
-            ALOGE("output buffer too small (%d).", outHeader->nAllocLen);
-            android_errorWriteLog(0x534e4554, "27793163");
-
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        const uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset;
-
-        if (mIsMLaw) {
-            DecodeMLaw(
-                    reinterpret_cast<int16_t *>(outHeader->pBuffer),
-                    inputptr, inHeader->nFilledLen);
-        } else {
-            DecodeALaw(
-                    reinterpret_cast<int16_t *>(outHeader->pBuffer),
-                    inputptr, inHeader->nFilledLen);
-        }
-
-        outHeader->nTimeStamp = inHeader->nTimeStamp;
-        outHeader->nOffset = 0;
-        outHeader->nFilledLen = inHeader->nFilledLen * sizeof(int16_t);
-        outHeader->nFlags = 0;
-
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            inHeader->nFilledLen = 0;
-        } else {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        outInfo = NULL;
-        notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-    }
-}
-
-// static
-void SoftG711::DecodeALaw(
-        int16_t *out, const uint8_t *in, size_t inSize) {
-    while (inSize > 0) {
-        inSize--;
-        int32_t x = *in++;
-
-        int32_t ix = x ^ 0x55;
-        ix &= 0x7f;
-
-        int32_t iexp = ix >> 4;
-        int32_t mant = ix & 0x0f;
-
-        if (iexp > 0) {
-            mant += 16;
-        }
-
-        mant = (mant << 4) + 8;
-
-        if (iexp > 1) {
-            mant = mant << (iexp - 1);
-        }
-
-        *out++ = (x > 127) ? mant : -mant;
-    }
-}
-
-// static
-void SoftG711::DecodeMLaw(
-        int16_t *out, const uint8_t *in, size_t inSize) {
-    while (inSize > 0) {
-        inSize--;
-        int32_t x = *in++;
-
-        int32_t mantissa = ~x;
-        int32_t exponent = (mantissa >> 4) & 7;
-        int32_t segment = exponent + 1;
-        mantissa &= 0x0f;
-
-        int32_t step = 4 << segment;
-
-        int32_t abs = (0x80L << exponent) + step * mantissa + step / 2 - 4 * 33;
-
-        *out++ = (x < 0x80) ? -abs : abs;
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftG711(name, callbacks, appData, component);
-}
-
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.h b/media/libstagefright/codecs/g711/dec/SoftG711.h
deleted file mode 100644
index 3ece246..0000000
--- a/media/libstagefright/codecs/g711/dec/SoftG711.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_G711_H_
-
-#define SOFT_G711_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-namespace android {
-
-struct SoftG711 : public SimpleSoftOMXComponent {
-    SoftG711(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftG711();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-private:
-    enum {
-        kNumBuffers = 4,
-        kMaxNumSamplesPerFrame = 16384,
-    };
-
-    bool mIsMLaw;
-    bool mSignalledError;
-    OMX_U32 mNumChannels;
-    int32_t mSamplingRate;
-
-    void initPorts();
-
-    static void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
-    static void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftG711);
-};
-
-}  // namespace android
-
-#endif  // SOFT_G711_H_
-
diff --git a/media/libstagefright/codecs/g711/dec/exports.lds b/media/libstagefright/codecs/g711/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/g711/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/gsm/dec/Android.bp b/media/libstagefright/codecs/gsm/dec/Android.bp
deleted file mode 100644
index efa2f83..0000000
--- a/media/libstagefright/codecs/gsm/dec/Android.bp
+++ /dev/null
@@ -1,37 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_gsm_dec_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_gsm_dec_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_gsmdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftGSM.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: ["libgsm"],
-}
diff --git a/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/gsm/dec/NOTICE b/media/libstagefright/codecs/gsm/dec/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/gsm/dec/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
deleted file mode 100644
index 330cb8a..0000000
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
+++ /dev/null
@@ -1,363 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftGSM"
-#include <utils/Log.h>
-
-#include "SoftGSM.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-// Microsoft WAV GSM encoding packs two GSM frames into 65 bytes.
-static const int kMSGSMFrameSize = 65;
-
-SoftGSM::SoftGSM(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mSignalledError(false) {
-
-    CHECK(!strcmp(name, "OMX.google.gsm.decoder"));
-
-    mGsm = gsm_create();
-    CHECK(mGsm);
-    int msopt = 1;
-    gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
-
-    initPorts();
-}
-
-SoftGSM::~SoftGSM() {
-    gsm_destroy(mGsm);
-}
-
-void SoftGSM::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 1024 / kMSGSMFrameSize * kMSGSMFrameSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType =
-        const_cast<char *>(MEDIA_MIMETYPE_AUDIO_MSGSM);
-
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingGSMFR;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-OMX_ERRORTYPE SoftGSM::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingGSMFR : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            pcmParams->nChannels = 1;
-            pcmParams->nSamplingRate = 8000;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftGSM::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0 && pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (pcmParams->nChannels != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (pcmParams->nSamplingRate != 8000) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingGSMFR)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.gsm",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftGSM::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-
-            outHeader->nFilledLen = 0;
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
-        }
-
-        if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) {
-            ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-        }
-
-        if(((inHeader->nFilledLen / kMSGSMFrameSize) * kMSGSMFrameSize) != inHeader->nFilledLen) {
-            ALOGE("input buffer not multiple of %d (%d).", kMSGSMFrameSize, inHeader->nFilledLen);
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-        }
-
-        if (outHeader->nAllocLen < (inHeader->nFilledLen / kMSGSMFrameSize) * 320) {
-            ALOGE("output buffer is not large enough (%d).", outHeader->nAllocLen);
-            android_errorWriteLog(0x534e4554, "27793367");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset;
-
-        int n = mSignalledError ? 0 : DecodeGSM(mGsm,
-                  reinterpret_cast<int16_t *>(outHeader->pBuffer), inputptr, inHeader->nFilledLen);
-
-        outHeader->nTimeStamp = inHeader->nTimeStamp;
-        outHeader->nOffset = 0;
-        outHeader->nFilledLen = n * sizeof(int16_t);
-        outHeader->nFlags = 0;
-
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            inHeader->nFilledLen = 0;
-        } else {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        outInfo = NULL;
-        notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-    }
-}
-
-
-// static
-int SoftGSM::DecodeGSM(gsm handle,
-        int16_t *out, uint8_t *in, size_t inSize) {
-
-    int ret = 0;
-    while (inSize > 0) {
-        gsm_decode(handle, in, out);
-        in += 33;
-        inSize -= 33;
-        out += 160;
-        ret += 160;
-        gsm_decode(handle, in, out);
-        in += 32;
-        inSize -= 32;
-        out += 160;
-        ret += 160;
-    }
-    return ret;
-}
-
-void SoftGSM::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0) {
-        gsm_destroy(mGsm);
-        mGsm = gsm_create();
-        int msopt = 1;
-        gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
-    }
-}
-
-void SoftGSM::onReset() {
-    gsm_destroy(mGsm);
-    mGsm = gsm_create();
-    int msopt = 1;
-    gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
-    mSignalledError = false;
-}
-
-
-
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftGSM(name, callbacks, appData, component);
-}
-
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
deleted file mode 100644
index d5885a6..0000000
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_GSM_H_
-
-#define SOFT_GSM_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-#include "gsm.h"
-
-namespace android {
-
-struct SoftGSM : public SimpleSoftOMXComponent {
-    SoftGSM(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftGSM();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers = 4,
-        kMaxNumSamplesPerFrame = 16384,
-    };
-
-    bool mSignalledError;
-    gsm mGsm;
-
-    void initPorts();
-
-    static int DecodeGSM(gsm handle, int16_t *out, uint8_t *in, size_t inSize);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftGSM);
-};
-
-}  // namespace android
-
-#endif  // SOFT_GSM_H_
-
diff --git a/media/libstagefright/codecs/gsm/dec/exports.lds b/media/libstagefright/codecs/gsm/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/gsm/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/hevcdec/Android.bp b/media/libstagefright/codecs/hevcdec/Android.bp
deleted file mode 100644
index 2c4091b..0000000
--- a/media/libstagefright/codecs/hevcdec/Android.bp
+++ /dev/null
@@ -1,38 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_hevcdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    static_libs: ["libhevcdec"],
-    srcs: ["SoftHEVC.cpp"],
-
-    cflags: [
-        "-Wall",
-        "-Wno-unused-variable",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-        config: {
-            cfi_assembly_support: true,
-        },
-    },
-
-    // We need this because the current asm generates the following link error:
-    // requires unsupported dynamic reloc R_ARM_REL32; recompile with -fPIC
-    // Bug: 16853291
-    ldflags: ["-Wl,-Bsymbolic"],
-}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
deleted file mode 100644
index 176da47..0000000
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ /dev/null
@@ -1,726 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftHEVC"
-#include <utils/Log.h>
-
-#include "ihevc_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
-#include "ihevcd_cxa.h"
-#include "SoftHEVC.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
-#include <OMX_VideoExt.h>
-
-namespace android {
-
-#define componentName                   "video_decoder.hevc"
-#define codingType                      OMX_VIDEO_CodingHEVC
-#define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_HEVC
-
-/** Function and structure definitions to keep code similar for each codec */
-#define ivdec_api_function              ihevcd_cxa_api_function
-#define ivdext_create_ip_t              ihevcd_cxa_create_ip_t
-#define ivdext_create_op_t              ihevcd_cxa_create_op_t
-#define ivdext_delete_ip_t              ihevcd_cxa_delete_ip_t
-#define ivdext_delete_op_t              ihevcd_cxa_delete_op_t
-#define ivdext_ctl_set_num_cores_ip_t   ihevcd_cxa_ctl_set_num_cores_ip_t
-#define ivdext_ctl_set_num_cores_op_t   ihevcd_cxa_ctl_set_num_cores_op_t
-
-#define IVDEXT_CMD_CTL_SET_NUM_CORES    \
-        (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
-
-static const CodecProfileLevel kProfileLevels[] = {
-    { OMX_VIDEO_HEVCProfileMain,      OMX_VIDEO_HEVCMainTierLevel51 },
-    { OMX_VIDEO_HEVCProfileMainStill, OMX_VIDEO_HEVCMainTierLevel51 },
-};
-
-SoftHEVC::SoftHEVC(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVideoDecoderOMXComponent(name, componentName, codingType,
-            kProfileLevels, ARRAY_SIZE(kProfileLevels),
-            320 /* width */, 240 /* height */, callbacks,
-            appData, component),
-      mCodecCtx(NULL),
-      mFlushOutBuffer(NULL),
-      mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
-      mIvColorFormat(IV_YUV_420P),
-      mChangingResolution(false),
-      mSignalledError(false),
-      mStride(mWidth) {
-    const size_t kMinCompressionRatio = 4 /* compressionRatio (for Level 4+) */;
-    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
-    // INPUT_BUF_SIZE is given by HEVC codec as minimum input size
-    initPorts(
-            kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
-            kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
-}
-
-status_t SoftHEVC::init() {
-    return initDecoder();
-}
-
-SoftHEVC::~SoftHEVC() {
-    ALOGV("In SoftHEVC::~SoftHEVC");
-    CHECK_EQ(deInitDecoder(), (status_t)OK);
-}
-
-static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
-    UNUSED(ctxt);
-    return memalign(alignment, size);
-}
-
-static void ivd_aligned_free(void *ctxt, void *buf) {
-    UNUSED(ctxt);
-    free(buf);
-    return;
-}
-
-static size_t GetCPUCoreCount() {
-    long cpuCoreCount = 1;
-#if defined(_SC_NPROCESSORS_ONLN)
-    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
-#else
-    // _SC_NPROC_ONLN must be defined...
-    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
-#endif
-    CHECK(cpuCoreCount >= 1);
-    ALOGV("Number of CPU cores: %ld", cpuCoreCount);
-    return (size_t)cpuCoreCount;
-}
-
-void SoftHEVC::logVersion() {
-    ivd_ctl_getversioninfo_ip_t s_ctl_ip;
-    ivd_ctl_getversioninfo_op_t s_ctl_op;
-    UWORD8 au1_buf[512];
-    IV_API_CALL_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
-    s_ctl_ip.pv_version_buffer = au1_buf;
-    s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
-
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
-            (void *)&s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in getting version number: 0x%x",
-                s_ctl_op.u4_error_code);
-    } else {
-        ALOGV("Ittiam decoder version number: %s",
-                (char *)s_ctl_ip.pv_version_buffer);
-    }
-    return;
-}
-
-status_t SoftHEVC::setParams(size_t stride) {
-    ivd_ctl_set_config_ip_t s_ctl_ip;
-    ivd_ctl_set_config_op_t s_ctl_op;
-    IV_API_CALL_STATUS_T status;
-    s_ctl_ip.u4_disp_wd = (UWORD32)stride;
-    s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-
-    s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
-
-    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
-            (void *)&s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in setting the run-time parameters: 0x%x",
-                s_ctl_op.u4_error_code);
-
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-status_t SoftHEVC::resetPlugin() {
-    mIsInFlush = false;
-    mReceivedEOS = false;
-    memset(mTimeStamps, 0, sizeof(mTimeStamps));
-    memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
-
-    /* Initialize both start and end times */
-    gettimeofday(&mTimeStart, NULL);
-    gettimeofday(&mTimeEnd, NULL);
-
-    return OK;
-}
-
-bool SoftHEVC::getVUIParams() {
-    IV_API_CALL_STATUS_T status;
-    ihevcd_cxa_ctl_get_vui_params_ip_t s_ctl_get_vui_params_ip;
-    ihevcd_cxa_ctl_get_vui_params_op_t s_ctl_get_vui_params_op;
-
-    s_ctl_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_get_vui_params_ip.e_sub_cmd =
-        (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_GET_VUI_PARAMS;
-
-    s_ctl_get_vui_params_ip.u4_size =
-        sizeof(ihevcd_cxa_ctl_get_vui_params_ip_t);
-
-    s_ctl_get_vui_params_op.u4_size = sizeof(ihevcd_cxa_ctl_get_vui_params_op_t);
-
-    status = ivdec_api_function(
-            (iv_obj_t *)mCodecCtx, (void *)&s_ctl_get_vui_params_ip,
-            (void *)&s_ctl_get_vui_params_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGW("Error in getting VUI params: 0x%x",
-                s_ctl_get_vui_params_op.u4_error_code);
-        return false;
-    }
-
-    int32_t primaries = s_ctl_get_vui_params_op.u1_colour_primaries;
-    int32_t transfer = s_ctl_get_vui_params_op.u1_transfer_characteristics;
-    int32_t coeffs = s_ctl_get_vui_params_op.u1_matrix_coefficients;
-    bool fullRange = s_ctl_get_vui_params_op.u1_video_full_range_flag;
-
-    ColorAspects colorAspects;
-    ColorUtils::convertIsoColorAspectsToCodecAspects(
-            primaries, transfer, coeffs, fullRange, colorAspects);
-
-    // Update color aspects if necessary.
-    if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
-        mBitstreamColorAspects = colorAspects;
-        status_t err = handleColorAspectsChange();
-        CHECK(err == OK);
-    }
-    return true;
-}
-
-status_t SoftHEVC::resetDecoder() {
-    ivd_ctl_reset_ip_t s_ctl_ip;
-    ivd_ctl_reset_op_t s_ctl_op;
-    IV_API_CALL_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
-
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
-            (void *)&s_ctl_op);
-    if (IV_SUCCESS != status) {
-        ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-    mSignalledError = false;
-
-    /* Set number of cores/threads to be used by the codec */
-    setNumCores();
-
-    mStride = 0;
-    return OK;
-}
-
-status_t SoftHEVC::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_cores_op;
-    IV_API_CALL_STATUS_T status;
-    s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
-    s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
-    s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
-    s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
-    ALOGV("Set number of cores to %u", s_set_cores_ip.u4_num_cores);
-    status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip,
-            (void *)&s_set_cores_op);
-    if (IV_SUCCESS != status) {
-        ALOGE("Error in setting number of cores: 0x%x",
-                s_set_cores_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-status_t SoftHEVC::setFlushMode() {
-    IV_API_CALL_STATUS_T status;
-    ivd_ctl_flush_ip_t s_video_flush_ip;
-    ivd_ctl_flush_op_t s_video_flush_op;
-
-    s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
-    s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
-    s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
-    ALOGV("Set the decoder in flush mode ");
-
-    /* Set the decoder in Flush mode, subsequent decode() calls will flush */
-    status = ivdec_api_function(mCodecCtx, (void *)&s_video_flush_ip,
-            (void *)&s_video_flush_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
-                s_video_flush_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-
-    mIsInFlush = true;
-    return OK;
-}
-
-status_t SoftHEVC::initDecoder() {
-    IV_API_CALL_STATUS_T status;
-
-    mNumCores = GetCPUCoreCount();
-    mCodecCtx = NULL;
-
-    mStride = outputBufferWidth();
-
-    /* Initialize the decoder */
-    {
-        ivdext_create_ip_t s_create_ip;
-        ivdext_create_op_t s_create_op;
-
-        void *dec_fxns = (void *)ivdec_api_function;
-
-        s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
-        s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
-        s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
-        s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
-        s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
-        s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
-        s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
-        s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = NULL;
-
-        status = ivdec_api_function(mCodecCtx, (void *)&s_create_ip, (void *)&s_create_op);
-
-        if (status != IV_SUCCESS) {
-            ALOGE("Error in create: 0x%x",
-                    s_create_op.s_ivd_create_op_t.u4_error_code);
-            deInitDecoder();
-            mCodecCtx = NULL;
-            return UNKNOWN_ERROR;
-        }
-
-        mCodecCtx = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
-        mCodecCtx->pv_fxns = dec_fxns;
-        mCodecCtx->u4_size = sizeof(iv_obj_t);
-    }
-
-    /* Reset the plugin state */
-    resetPlugin();
-
-    /* Set the run time (dynamic) parameters */
-    setParams(mStride);
-
-    /* Set number of cores/threads to be used by the codec */
-    setNumCores();
-
-    /* Get codec version */
-    logVersion();
-
-    mFlushNeeded = false;
-    return OK;
-}
-
-status_t SoftHEVC::deInitDecoder() {
-    size_t i;
-    IV_API_CALL_STATUS_T status;
-
-    if (mCodecCtx) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
-
-        s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
-        s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
-
-        s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
-
-        status = ivdec_api_function(mCodecCtx, (void *)&s_delete_ip, (void *)&s_delete_op);
-        if (status != IV_SUCCESS) {
-            ALOGE("Error in delete: 0x%x",
-                    s_delete_op.s_ivd_delete_op_t.u4_error_code);
-            return UNKNOWN_ERROR;
-        }
-    }
-
-
-    mChangingResolution = false;
-
-    return OK;
-}
-
-void SoftHEVC::onReset() {
-    ALOGV("onReset called");
-    SoftVideoDecoderOMXComponent::onReset();
-
-    mSignalledError = false;
-    resetDecoder();
-    resetPlugin();
-}
-
-bool SoftHEVC::setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
-        ivd_video_decode_op_t *ps_dec_op,
-        OMX_BUFFERHEADERTYPE *inHeader,
-        OMX_BUFFERHEADERTYPE *outHeader,
-        size_t timeStampIx) {
-    size_t sizeY = outputBufferWidth() * outputBufferHeight();
-    size_t sizeUV;
-
-    ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
-    ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
-
-    ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
-
-    /* When in flush and after EOS with zero byte input,
-     * inHeader is set to zero. Hence check for non-null */
-    if (inHeader) {
-        ps_dec_ip->u4_ts = timeStampIx;
-        ps_dec_ip->pv_stream_buffer = inHeader->pBuffer
-                + inHeader->nOffset;
-        ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
-    } else {
-        ps_dec_ip->u4_ts = 0;
-        ps_dec_ip->pv_stream_buffer = NULL;
-        ps_dec_ip->u4_num_Bytes = 0;
-    }
-
-    sizeUV = sizeY / 4;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
-
-    uint8_t *pBuf;
-    if (outHeader) {
-        if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
-            android_errorWriteLog(0x534e4554, "27833616");
-            return false;
-        }
-        pBuf = outHeader->pBuffer;
-    } else {
-        // mFlushOutBuffer always has the right size.
-        pBuf = mFlushOutBuffer;
-    }
-
-    ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
-    ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
-    ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
-    ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
-    return true;
-}
-void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
-    /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
-    if (kOutputPortIndex == portIndex) {
-        setFlushMode();
-
-        /* Allocate a picture buffer to flushed data */
-        uint32_t displayStride = outputBufferWidth();
-        uint32_t displayHeight = outputBufferHeight();
-
-        uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
-        mFlushOutBuffer = (uint8_t *)memalign(128, bufferSize);
-        if (NULL == mFlushOutBuffer) {
-            ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
-            return;
-        }
-
-        while (true) {
-            ivd_video_decode_ip_t s_dec_ip;
-            ivd_video_decode_op_t s_dec_op;
-            IV_API_CALL_STATUS_T status;
-            size_t sizeY, sizeUV;
-
-            setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
-
-            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
-                    (void *)&s_dec_op);
-            if (0 == s_dec_op.u4_output_present) {
-                resetPlugin();
-                break;
-            }
-        }
-
-        if (mFlushOutBuffer) {
-            free(mFlushOutBuffer);
-            mFlushOutBuffer = NULL;
-        }
-
-    }
-}
-
-void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
-    UNUSED(portIndex);
-
-    if (mSignalledError) {
-        return;
-    }
-    if (mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    if (NULL == mCodecCtx) {
-        if (OK != initDecoder()) {
-            ALOGE("Failed to initialize decoder");
-            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-    }
-    if (outputBufferWidth() != mStride) {
-        /* Set the run-time (dynamic) parameters */
-        mStride = outputBufferWidth();
-        setParams(mStride);
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
-    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
-
-    while (!outQueue.empty()) {
-        BufferInfo *inInfo;
-        OMX_BUFFERHEADERTYPE *inHeader;
-
-        BufferInfo *outInfo;
-        OMX_BUFFERHEADERTYPE *outHeader;
-        size_t timeStampIx;
-
-        inInfo = NULL;
-        inHeader = NULL;
-
-        if (!mIsInFlush) {
-            if (!inQueue.empty()) {
-                inInfo = *inQueue.begin();
-                inHeader = inInfo->mHeader;
-            } else {
-                break;
-            }
-        }
-
-        outInfo = *outQueue.begin();
-        outHeader = outInfo->mHeader;
-        outHeader->nFlags = 0;
-        outHeader->nTimeStamp = 0;
-        outHeader->nOffset = 0;
-
-        if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
-            mReceivedEOS = true;
-            if (inHeader->nFilledLen == 0) {
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-                setFlushMode();
-            }
-        }
-
-        /* Get a free slot in timestamp array to hold input timestamp */
-        {
-            size_t i;
-            timeStampIx = 0;
-            for (i = 0; i < MAX_TIME_STAMPS; i++) {
-                if (!mTimeStampsValid[i]) {
-                    timeStampIx = i;
-                    break;
-                }
-            }
-            if (inHeader != NULL) {
-                mTimeStampsValid[timeStampIx] = true;
-                mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
-            }
-        }
-
-        {
-            ivd_video_decode_ip_t s_dec_ip;
-            ivd_video_decode_op_t s_dec_op;
-            WORD32 timeDelay, timeTaken;
-            size_t sizeY, sizeUV;
-
-            if (!setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
-                ALOGE("Decoder arg setup failed");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            GETTIME(&mTimeStart, NULL);
-            /* Compute time elapsed between end of previous decode()
-             * to start of current decode() */
-            TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
-
-            IV_API_CALL_STATUS_T status;
-            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
-
-            bool unsupportedResolution =
-                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
-
-            /* Check for unsupported dimensions */
-            if (unsupportedResolution) {
-                ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
-                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            bool allocationFailed = 
-                (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
-            if (allocationFailed) {
-                ALOGE("Allocation failure in decoder");
-                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            if (IS_IVD_FATAL_ERROR(s_dec_op.u4_error_code)) {
-                ALOGE("Fatal Error : 0x%x", s_dec_op.u4_error_code);
-                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
-
-            getVUIParams();
-
-            GETTIME(&mTimeEnd, NULL);
-            /* Compute time taken for decode() */
-            TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
-
-            ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
-                   s_dec_op.u4_num_bytes_consumed);
-            if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
-                mFlushNeeded = true;
-            }
-
-            if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
-                /* If the input did not contain picture data, then ignore
-                 * the associated timestamp */
-                mTimeStampsValid[timeStampIx] = false;
-            }
-
-            // If the decoder is in the changing resolution mode and there is no output present,
-            // that means the switching is done and it's ready to reset the decoder and the plugin.
-            if (mChangingResolution && !s_dec_op.u4_output_present) {
-                mChangingResolution = false;
-                resetDecoder();
-                resetPlugin();
-                mStride = outputBufferWidth();
-                setParams(mStride);
-                continue;
-            }
-
-            if (resChanged) {
-                mChangingResolution = true;
-                if (mFlushNeeded) {
-                    setFlushMode();
-                }
-                continue;
-            }
-
-            // Combine the resolution change and coloraspects change in one PortSettingChange event
-            // if necessary.
-            if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
-                uint32_t width = s_dec_op.u4_pic_wd;
-                uint32_t height = s_dec_op.u4_pic_ht;
-                bool portWillReset = false;
-                handlePortSettingsChange(&portWillReset, width, height);
-
-                if (portWillReset) {
-                    resetDecoder();
-                    resetPlugin();
-                    return;
-                }
-            } else if (mUpdateColorAspects) {
-                notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
-                    kDescribeColorAspectsIndex, NULL);
-                mUpdateColorAspects = false;
-                return;
-            }
-
-            if (s_dec_op.u4_output_present) {
-                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
-
-                outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
-                mTimeStampsValid[s_dec_op.u4_ts] = false;
-
-                outInfo->mOwnedByUs = false;
-                outQueue.erase(outQueue.begin());
-                outInfo = NULL;
-                notifyFillBufferDone(outHeader);
-                outHeader = NULL;
-            } else if (mIsInFlush) {
-                /* If in flush mode and no output is returned by the codec,
-                 * then come out of flush mode */
-                mIsInFlush = false;
-
-                /* If EOS was recieved on input port and there is no output
-                 * from the codec, then signal EOS on output port */
-                if (mReceivedEOS) {
-                    outHeader->nFilledLen = 0;
-                    outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
-
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    outInfo = NULL;
-                    notifyFillBufferDone(outHeader);
-                    outHeader = NULL;
-                    resetPlugin();
-                }
-            }
-        }
-
-        /* If input EOS is seen and decoder is not in flush mode,
-         * set the decoder in flush mode.
-         * There can be a case where EOS is sent along with last picture data
-         * In that case, only after decoding that input data, decoder has to be
-         * put in flush. This case is handled here  */
-
-        if (mReceivedEOS && !mIsInFlush) {
-            setFlushMode();
-        }
-
-        // TODO: Handle more than one picture data
-        if (inHeader != NULL) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-    }
-}
-
-int SoftHEVC::getColorAspectPreference() {
-    return kPreferBitstream;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(const char *name,
-        const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
-        OMX_COMPONENTTYPE **component) {
-    android::SoftHEVC *codec = new android::SoftHEVC(name, callbacks, appData, component);
-    if (codec->init() != android::OK) {
-        android::sp<android::SoftOMXComponent> release = codec;
-        return NULL;
-    }
-    return codec;
-}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
deleted file mode 100644
index 5800490..0000000
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_HEVC_H_
-
-#define SOFT_HEVC_H_
-
-#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
-#include <sys/time.h>
-
-namespace android {
-
-/** Number of entries in the time-stamp array */
-#define MAX_TIME_STAMPS 64
-
-/** Maximum number of cores supported by the codec */
-#define CODEC_MAX_NUM_CORES 4
-
-#define CODEC_MAX_WIDTH     1920
-
-#define CODEC_MAX_HEIGHT    1088
-
-/** Input buffer size */
-#define INPUT_BUF_SIZE (1024 * 1024)
-
-#define MIN(a, b) ((a) < (b)) ? (a) : (b)
-
-/** Used to remove warnings about unused parameters */
-#define UNUSED(x) ((void)(x))
-
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b);
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff) \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
-
-struct SoftHEVC: public SoftVideoDecoderOMXComponent {
-    SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData, OMX_COMPONENTTYPE **component);
-
-    status_t init();
-
-protected:
-    virtual ~SoftHEVC();
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onReset();
-    virtual int getColorAspectPreference();
-private:
-    // Number of input and output buffers
-    enum {
-        kNumBuffers = 8
-    };
-
-    iv_obj_t *mCodecCtx;         // Codec context
-
-    size_t mNumCores;            // Number of cores to be uesd by the codec
-
-    struct timeval mTimeStart;   // Time at the start of decode()
-    struct timeval mTimeEnd;     // Time at the end of decode()
-
-    // Internal buffer to be used to flush out the buffers from decoder
-    uint8_t *mFlushOutBuffer;
-
-    // Status of entries in the timestamp array
-    bool mTimeStampsValid[MAX_TIME_STAMPS];
-
-    // Timestamp array - Since codec does not take 64 bit timestamps,
-    // they are maintained in the plugin
-    OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
-
-    OMX_COLOR_FORMATTYPE mOmxColorFormat;    // OMX Color format
-    IV_COLOR_FORMAT_T mIvColorFormat;        // Ittiam Color format
-
-    bool mIsInFlush;        // codec is flush mode
-    bool mReceivedEOS;      // EOS is receieved on input port
-
-    // The input stream has changed to a different resolution, which is still supported by the
-    // codec. So the codec is switching to decode the new resolution.
-    bool mChangingResolution;
-    bool mFlushNeeded;
-    bool mSignalledError;
-    size_t mStride;
-
-    status_t initDecoder();
-    status_t deInitDecoder();
-    status_t setFlushMode();
-    status_t setParams(size_t stride);
-    void logVersion();
-    status_t setNumCores();
-    status_t resetDecoder();
-    status_t resetPlugin();
-
-    bool setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
-        ivd_video_decode_op_t *ps_dec_op,
-        OMX_BUFFERHEADERTYPE *inHeader,
-        OMX_BUFFERHEADERTYPE *outHeader,
-        size_t timeStampIx);
-
-    bool getVUIParams();
-
-    DISALLOW_EVIL_CONSTRUCTORS (SoftHEVC);
-};
-
-} // namespace android
-
-#endif  // SOFT_HEVC_H_
diff --git a/media/libstagefright/codecs/hevcdec/exports.lds b/media/libstagefright/codecs/hevcdec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/hevcdec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
deleted file mode 100644
index 725c79c..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ /dev/null
@@ -1,28 +0,0 @@
-
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_mpeg4dec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftMPEG4.cpp"],
-
-    cflags: [
-    ],
-
-    static_libs: ["libstagefright_m4vh263dec"],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
deleted file mode 100644
index 800e2e1..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ /dev/null
@@ -1,440 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftMPEG4"
-#include <utils/Log.h>
-
-#include "SoftMPEG4.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include "mp4dec_api.h"
-
-namespace android {
-
-static const CodecProfileLevel kM4VProfileLevels[] = {
-    { OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level3 },
-};
-
-static const CodecProfileLevel kH263ProfileLevels[] = {
-    { OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level30 },
-    { OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level45 },
-    { OMX_VIDEO_H263ProfileISWV2,    OMX_VIDEO_H263Level30 },
-    { OMX_VIDEO_H263ProfileISWV2,    OMX_VIDEO_H263Level45 },
-};
-
-SoftMPEG4::SoftMPEG4(
-        const char *name,
-        const char *componentRole,
-        OMX_VIDEO_CODINGTYPE codingType,
-        const CodecProfileLevel *profileLevels,
-        size_t numProfileLevels,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVideoDecoderOMXComponent(
-            name, componentRole, codingType, profileLevels, numProfileLevels,
-            352 /* width */, 288 /* height */, callbacks, appData, component),
-      mMode(codingType == OMX_VIDEO_CodingH263 ? MODE_H263 : MODE_MPEG4),
-      mHandle(new tagvideoDecControls),
-      mInputBufferCount(0),
-      mSignalledError(false),
-      mInitialized(false),
-      mFramesConfigured(false),
-      mNumSamplesOutput(0),
-      mPvTime(0) {
-    initPorts(
-            kNumInputBuffers,
-            352 * 288 * 3 / 2 /* minInputBufferSize */,
-            kNumOutputBuffers,
-            (mMode == MODE_MPEG4)
-            ? MEDIA_MIMETYPE_VIDEO_MPEG4 : MEDIA_MIMETYPE_VIDEO_H263);
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftMPEG4::~SoftMPEG4() {
-    if (mInitialized) {
-        PVCleanUpVideoDecoder(mHandle);
-    }
-
-    delete mHandle;
-    mHandle = NULL;
-}
-
-status_t SoftMPEG4::initDecoder() {
-    memset(mHandle, 0, sizeof(tagvideoDecControls));
-    return OK;
-}
-
-void SoftMPEG4::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while (!inQueue.empty() && outQueue.size() == kNumOutputBuffers) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-        if (inHeader == NULL) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            continue;
-        }
-
-        PortInfo *port = editPortInfo(1);
-
-        OMX_BUFFERHEADERTYPE *outHeader =
-            port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader;
-
-        if (inHeader->nFilledLen == 0) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-
-            ++mInputBufferCount;
-
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                outHeader->nFilledLen = 0;
-                outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-                List<BufferInfo *>::iterator it = outQueue.begin();
-                while (it != outQueue.end() && (*it)->mHeader != outHeader) {
-                    ++it;
-                }
-                if (it == outQueue.end()) {
-                    ALOGE("couldn't find port buffer %d in outQueue: b/109891727", mNumSamplesOutput & 1);
-                    android_errorWriteLog(0x534e4554, "109891727");
-                    return;
-                }
-
-                BufferInfo *outInfo = *it;
-                outInfo->mOwnedByUs = false;
-                outQueue.erase(it);
-                outInfo = NULL;
-
-                notifyFillBufferDone(outHeader);
-                outHeader = NULL;
-            }
-            return;
-        }
-
-        uint8_t *bitstream = inHeader->pBuffer + inHeader->nOffset;
-        uint32_t *start_code = (uint32_t *)bitstream;
-        bool volHeader = *start_code == 0xB0010000;
-        if (volHeader) {
-            PVCleanUpVideoDecoder(mHandle);
-            mInitialized = false;
-        }
-
-        if (!mInitialized) {
-            uint8_t *vol_data[1];
-            int32_t vol_size = 0;
-
-            vol_data[0] = NULL;
-
-            if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) || volHeader) {
-                vol_data[0] = bitstream;
-                vol_size = inHeader->nFilledLen;
-            }
-
-            MP4DecodingMode mode =
-                (mMode == MODE_MPEG4) ? MPEG4_MODE : H263_MODE;
-
-            Bool success = PVInitVideoDecoder(
-                    mHandle, vol_data, &vol_size, 1,
-                    outputBufferWidth(), outputBufferHeight(), mode);
-
-            if (!success) {
-                ALOGW("PVInitVideoDecoder failed. Unsupported content?");
-
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle);
-            if (mode != actualMode) {
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            PVSetPostProcType((VideoDecControls *) mHandle, 0);
-
-            bool hasFrameData = false;
-            if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-            } else if (volHeader) {
-                hasFrameData = true;
-            }
-
-            mInitialized = true;
-
-            if (mode == MPEG4_MODE && handlePortSettingsChange()) {
-                return;
-            }
-
-            if (!hasFrameData) {
-                continue;
-            }
-        }
-
-        if (!mFramesConfigured) {
-            PortInfo *port = editPortInfo(1);
-            OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(1).mHeader;
-
-            OMX_U32 yFrameSize = sizeof(uint8) * mHandle->size;
-            if ((outHeader->nAllocLen < yFrameSize) ||
-                    (outHeader->nAllocLen - yFrameSize < yFrameSize / 2)) {
-                ALOGE("Too small output buffer for reference frame: %lu bytes",
-                        (unsigned long)outHeader->nAllocLen);
-                android_errorWriteLog(0x534e4554, "30033990");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-                return;
-            }
-            PVSetReferenceYUV(mHandle, outHeader->pBuffer);
-            mFramesConfigured = true;
-        }
-
-        uint32_t useExtTimestamp = (inHeader->nOffset == 0);
-
-        // decoder deals in ms (int32_t), OMX in us (int64_t)
-        // so use fake timestamp instead
-        uint32_t timestamp = 0xFFFFFFFF;
-        if (useExtTimestamp) {
-            mPvToOmxTimeMap.add(mPvTime, inHeader->nTimeStamp);
-            timestamp = mPvTime;
-            mPvTime++;
-        }
-
-        int32_t bufferSize = inHeader->nFilledLen;
-        int32_t tmp = bufferSize;
-
-        OMX_U32 frameSize;
-        OMX_U64 yFrameSize = (OMX_U64)mWidth * (OMX_U64)mHeight;
-        if (yFrameSize > ((OMX_U64)UINT32_MAX / 3) * 2) {
-            ALOGE("Frame size too large");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-        frameSize = (OMX_U32)(yFrameSize + (yFrameSize / 2));
-
-        if (outHeader->nAllocLen < frameSize) {
-            android_errorWriteLog(0x534e4554, "27833616");
-            ALOGE("Insufficient output buffer size");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        // Need to check if header contains new info, e.g., width/height, etc.
-        VopHeaderInfo header_info;
-        uint8_t *bitstreamTmp = bitstream;
-        if (PVDecodeVopHeader(
-                    mHandle, &bitstreamTmp, &timestamp, &tmp,
-                    &header_info, &useExtTimestamp,
-                    outHeader->pBuffer) != PV_TRUE) {
-            ALOGE("failed to decode vop header.");
-
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-        if (handlePortSettingsChange()) {
-            return;
-        }
-
-        // The PV decoder is lying to us, sometimes it'll claim to only have
-        // consumed a subset of the buffer when it clearly consumed all of it.
-        // ignore whatever it says...
-        if (PVDecodeVopBody(mHandle, &tmp) != PV_TRUE) {
-            ALOGE("failed to decode video frame.");
-
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        // H263 doesn't have VOL header, the frame size information is in short header, i.e. the
-        // decoder may detect size change after PVDecodeVideoFrame.
-        if (handlePortSettingsChange()) {
-            return;
-        }
-
-        if (mPvToOmxTimeMap.indexOfKey(timestamp) >= 0) {
-            // decoder deals in ms, OMX in us.
-            outHeader->nTimeStamp = mPvToOmxTimeMap.valueFor(timestamp);
-            mPvToOmxTimeMap.removeItem(timestamp);
-        }
-
-        inHeader->nOffset += bufferSize;
-        inHeader->nFilledLen = 0;
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-        } else {
-            outHeader->nFlags = 0;
-        }
-
-        if (inHeader->nFilledLen == 0) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-
-        ++mInputBufferCount;
-
-        outHeader->nOffset = 0;
-        outHeader->nFilledLen = frameSize;
-
-        List<BufferInfo *>::iterator it = outQueue.begin();
-        while (it != outQueue.end() && (*it)->mHeader != outHeader) {
-             ++it;
-        }
-        if (it == outQueue.end()) {
-            return;
-        }
-
-        BufferInfo *outInfo = *it;
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(it);
-        outInfo = NULL;
-
-        notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-
-        ++mNumSamplesOutput;
-    }
-}
-
-bool SoftMPEG4::handlePortSettingsChange() {
-    uint32_t disp_width, disp_height;
-    PVGetVideoDimensions(mHandle, (int32 *)&disp_width, (int32 *)&disp_height);
-
-    uint32_t buf_width, buf_height;
-    PVGetBufferDimensions(mHandle, (int32 *)&buf_width, (int32 *)&buf_height);
-
-    CHECK_LE(disp_width, buf_width);
-    CHECK_LE(disp_height, buf_height);
-
-    ALOGV("disp_width = %d, disp_height = %d, buf_width = %d, buf_height = %d",
-            disp_width, disp_height, buf_width, buf_height);
-
-    CropSettingsMode cropSettingsMode = kCropUnSet;
-    if (disp_width != buf_width || disp_height != buf_height) {
-        cropSettingsMode = kCropSet;
-
-        if (mCropWidth != disp_width || mCropHeight != disp_height) {
-            mCropLeft = 0;
-            mCropTop = 0;
-            mCropWidth = disp_width;
-            mCropHeight = disp_height;
-            cropSettingsMode = kCropChanged;
-        }
-    }
-
-    bool portWillReset = false;
-    const bool fakeStride = true;
-    SoftVideoDecoderOMXComponent::handlePortSettingsChange(
-            &portWillReset, buf_width, buf_height,
-            OMX_COLOR_FormatYUV420Planar, cropSettingsMode, fakeStride);
-    if (portWillReset) {
-        if (mMode == MODE_H263) {
-            PVCleanUpVideoDecoder(mHandle);
-
-            uint8_t *vol_data[1];
-            int32_t vol_size = 0;
-
-            vol_data[0] = NULL;
-            if (!PVInitVideoDecoder(
-                    mHandle, vol_data, &vol_size, 1, outputBufferWidth(), outputBufferHeight(),
-                    H263_MODE)) {
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                mSignalledError = true;
-                return true;
-            }
-        }
-
-        mFramesConfigured = false;
-    }
-
-    return portWillReset;
-}
-
-void SoftMPEG4::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0 && mInitialized) {
-        CHECK_EQ((int)PVResetVideoDecoder(mHandle), (int)PV_TRUE);
-    }
-    mFramesConfigured = false;
-}
-
-void SoftMPEG4::onReset() {
-    SoftVideoDecoderOMXComponent::onReset();
-    mPvToOmxTimeMap.clear();
-    mSignalledError = false;
-    mFramesConfigured = false;
-    if (mInitialized) {
-        PVCleanUpVideoDecoder(mHandle);
-        mInitialized = false;
-    }
-}
-
-void SoftMPEG4::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
-    SoftVideoDecoderOMXComponent::updatePortDefinitions(updateCrop, updateInputSize);
-
-    /* We have to align our width and height - this should affect stride! */
-    OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
-    def->format.video.nStride = align(def->format.video.nStride, 16);
-    def->format.video.nSliceHeight = align(def->format.video.nSliceHeight, 16);
-    def->nBufferSize = (def->format.video.nStride * def->format.video.nSliceHeight * 3) / 2;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    using namespace android;
-    if (!strcmp(name, "OMX.google.h263.decoder")) {
-        return new android::SoftMPEG4(
-                name, "video_decoder.h263", OMX_VIDEO_CodingH263,
-                kH263ProfileLevels, ARRAY_SIZE(kH263ProfileLevels),
-                callbacks, appData, component);
-    } else if (!strcmp(name, "OMX.google.mpeg4.decoder")) {
-        return new android::SoftMPEG4(
-                name, "video_decoder.mpeg4", OMX_VIDEO_CodingMPEG4,
-                kM4VProfileLevels, ARRAY_SIZE(kM4VProfileLevels),
-                callbacks, appData, component);
-    } else {
-        CHECK(!"Unknown component");
-    }
-    return NULL;
-}
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
deleted file mode 100644
index e399ac9..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_MPEG4_H_
-
-#define SOFT_MPEG4_H_
-
-#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
-
-struct tagvideoDecControls;
-
-namespace android {
-
-struct SoftMPEG4 : public SoftVideoDecoderOMXComponent {
-    SoftMPEG4(const char *name,
-            const char *componentRole,
-            OMX_VIDEO_CODINGTYPE codingType,
-            const CodecProfileLevel *profileLevels,
-            size_t numProfileLevels,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftMPEG4();
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumInputBuffers  = 4,
-        kNumOutputBuffers = 2,
-    };
-
-    enum {
-        MODE_MPEG4,
-        MODE_H263,
-    } mMode;
-
-    tagvideoDecControls *mHandle;
-
-    size_t mInputBufferCount;
-
-    bool mSignalledError;
-    bool mInitialized;
-    bool mFramesConfigured;
-
-    int32_t mNumSamplesOutput;
-    int32_t mPvTime;
-    KeyedVector<int32_t, OMX_TICKS> mPvToOmxTimeMap;
-
-    status_t initDecoder();
-
-    virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
-    bool handlePortSettingsChange();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4);
-};
-
-}  // namespace android
-
-#endif  // SOFT_MPEG4_H_
-
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/exports.lds b/media/libstagefright/codecs/m4v_h263/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
deleted file mode 100644
index d10e40d..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ /dev/null
@@ -1,30 +0,0 @@
-//###############################################################################
-
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_mpeg4enc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftMPEG4Encoder.cpp"],
-
-    cflags: [
-        "-DBX_RC",
-    ],
-
-    static_libs: ["libstagefright_m4vh263enc"],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
deleted file mode 100644
index bb1cb0b..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ /dev/null
@@ -1,550 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftMPEG4Encoder"
-#include <utils/Log.h>
-#include <utils/misc.h>
-
-#include "mp4enc_api.h"
-#include "OMX_Video.h"
-
-#include <media/hardware/HardwareAPI.h>
-#include <media/hardware/MetadataBufferType.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MetaData.h>
-
-#include "SoftMPEG4Encoder.h"
-
-#include <inttypes.h>
-
-#ifndef INT32_MAX
-#define INT32_MAX   2147483647
-#endif
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-static const CodecProfileLevel kMPEG4ProfileLevels[] = {
-    { OMX_VIDEO_MPEG4ProfileCore, OMX_VIDEO_MPEG4Level2 },
-};
-
-static const CodecProfileLevel kH263ProfileLevels[] = {
-    { OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level45 },
-};
-
-SoftMPEG4Encoder::SoftMPEG4Encoder(
-            const char *name,
-            const char *componentRole,
-            OMX_VIDEO_CODINGTYPE codingType,
-            const char *mime,
-            const CodecProfileLevel *profileLevels,
-            size_t numProfileLevels,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component)
-    : SoftVideoEncoderOMXComponent(
-            name, componentRole, codingType,
-            profileLevels, numProfileLevels,
-            176 /* width */, 144 /* height */,
-            callbacks, appData, component),
-      mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
-      mKeyFrameInterval(30),
-      mNumInputFrames(-1),
-      mStarted(false),
-      mSawInputEOS(false),
-      mSignalledError(false),
-      mHandle(new tagvideoEncControls),
-      mEncParams(new tagvideoEncOptions),
-      mInputFrameData(NULL) {
-
-    if (codingType == OMX_VIDEO_CodingH263) {
-        mEncodeMode = H263_MODE;
-    }
-
-    // 256 * 1024 is a magic number for PV's encoder, not sure why
-    const size_t kOutputBufferSize = 256 * 1024;
-
-    initPorts(kNumBuffers, kNumBuffers, kOutputBufferSize, mime);
-
-    ALOGI("Construct SoftMPEG4Encoder");
-}
-
-SoftMPEG4Encoder::~SoftMPEG4Encoder() {
-    ALOGV("Destruct SoftMPEG4Encoder");
-    onReset();
-    releaseEncoder();
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    CHECK(outQueue.empty());
-    CHECK(inQueue.empty());
-}
-
-OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
-    CHECK(mHandle != NULL);
-    memset(mHandle, 0, sizeof(tagvideoEncControls));
-
-    CHECK(mEncParams != NULL);
-    memset(mEncParams, 0, sizeof(tagvideoEncOptions));
-    if (!PVGetDefaultEncOption(mEncParams, 0)) {
-        ALOGE("Failed to get default encoding parameters");
-        return OMX_ErrorUndefined;
-    }
-    if (mFramerate == 0) {
-        ALOGE("Framerate should not be 0");
-        return OMX_ErrorUndefined;
-    }
-    mEncParams->encMode = mEncodeMode;
-    mEncParams->encWidth[0] = mWidth;
-    mEncParams->encHeight[0] = mHeight;
-    mEncParams->encFrameRate[0] = mFramerate >> 16; // mFramerate is in Q16 format
-    mEncParams->rcType = VBR_1;
-    mEncParams->vbvDelay = 5.0f;
-
-    // FIXME:
-    // Add more profile and level support for MPEG4 encoder
-    mEncParams->profile_level = CORE_PROFILE_LEVEL2;
-    mEncParams->packetSize = 32;
-    mEncParams->rvlcEnable = PV_OFF;
-    mEncParams->numLayers = 1;
-    mEncParams->timeIncRes = 1000;
-    mEncParams->tickPerSrc = ((int64_t)mEncParams->timeIncRes << 16) / mFramerate;
-
-    mEncParams->bitRate[0] = mBitrate;
-    mEncParams->iQuant[0] = 15;
-    mEncParams->pQuant[0] = 12;
-    mEncParams->quantType[0] = 0;
-    mEncParams->noFrameSkipped = PV_OFF;
-
-    if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
-        // Color conversion is needed.
-        free(mInputFrameData);
-        mInputFrameData = NULL;
-        if (((uint64_t)mWidth * mHeight) > ((uint64_t)INT32_MAX / 3)) {
-            ALOGE("b/25812794, Buffer size is too big.");
-            return OMX_ErrorBadParameter;
-        }
-        mInputFrameData =
-            (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
-        CHECK(mInputFrameData != NULL);
-    }
-
-    // PV's MPEG4 encoder requires the video dimension of multiple
-    if (mWidth % 16 != 0 || mHeight % 16 != 0) {
-        ALOGE("Video frame size %dx%d must be a multiple of 16",
-            mWidth, mHeight);
-        return OMX_ErrorBadParameter;
-    }
-
-    // Set IDR frame refresh interval
-    mEncParams->intraPeriod = mKeyFrameInterval;
-
-    mEncParams->numIntraMB = 0;
-    mEncParams->sceneDetect = PV_ON;
-    mEncParams->searchRange = 16;
-    mEncParams->mv8x8Enable = PV_OFF;
-    mEncParams->gobHeaderInterval = 0;
-    mEncParams->useACPred = PV_ON;
-    mEncParams->intraDCVlcTh = 0;
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftMPEG4Encoder::initEncoder() {
-    CHECK(!mStarted);
-
-    OMX_ERRORTYPE errType = OMX_ErrorNone;
-    if (OMX_ErrorNone != (errType = initEncParams())) {
-        ALOGE("Failed to initialized encoder params");
-        mSignalledError = true;
-        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-        return errType;
-    }
-
-    if (!PVInitVideoEncoder(mHandle, mEncParams)) {
-        ALOGE("Failed to initialize the encoder");
-        mSignalledError = true;
-        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-        return OMX_ErrorUndefined;
-    }
-
-    mNumInputFrames = -1;  // 1st buffer for codec specific data
-    mStarted = true;
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftMPEG4Encoder::releaseEncoder() {
-    if (mEncParams) {
-        delete mEncParams;
-        mEncParams = NULL;
-    }
-
-    if (mHandle) {
-        delete mHandle;
-        mHandle = NULL;
-    }
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamVideoBitrate:
-        {
-            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
-                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
-
-            if (!isValidOMXParam(bitRate)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (bitRate->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            bitRate->eControlRate = OMX_Video_ControlRateVariable;
-            bitRate->nTargetBitrate = mBitrate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoH263:
-        {
-            OMX_VIDEO_PARAM_H263TYPE *h263type =
-                (OMX_VIDEO_PARAM_H263TYPE *)params;
-
-            if (!isValidOMXParam(h263type)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (h263type->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            h263type->nAllowedPictureTypes =
-                (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
-            h263type->eProfile = OMX_VIDEO_H263ProfileBaseline;
-            h263type->eLevel = OMX_VIDEO_H263Level45;
-            h263type->bPLUSPTYPEAllowed = OMX_FALSE;
-            h263type->bForceRoundingTypeToZero = OMX_FALSE;
-            h263type->nPictureHeaderRepetition = 0;
-            h263type->nGOBHeaderInterval = 0;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoMpeg4:
-        {
-            OMX_VIDEO_PARAM_MPEG4TYPE *mpeg4type =
-                (OMX_VIDEO_PARAM_MPEG4TYPE *)params;
-
-            if (!isValidOMXParam(mpeg4type)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (mpeg4type->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            mpeg4type->eProfile = OMX_VIDEO_MPEG4ProfileCore;
-            mpeg4type->eLevel = OMX_VIDEO_MPEG4Level2;
-            mpeg4type->nAllowedPictureTypes =
-                (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
-            mpeg4type->nBFrames = 0;
-            mpeg4type->nIDCVLCThreshold = 0;
-            mpeg4type->bACPred = OMX_TRUE;
-            mpeg4type->nMaxPacketSize = 256;
-            mpeg4type->nTimeIncRes = 1000;
-            mpeg4type->nHeaderExtension = 0;
-            mpeg4type->bReversibleVLC = OMX_FALSE;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoBitrate:
-        {
-            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
-                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
-
-            if (!isValidOMXParam(bitRate)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (bitRate->nPortIndex != 1 ||
-                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
-                return OMX_ErrorUndefined;
-            }
-
-            mBitrate = bitRate->nTargetBitrate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoH263:
-        {
-            OMX_VIDEO_PARAM_H263TYPE *h263type =
-                (OMX_VIDEO_PARAM_H263TYPE *)params;
-
-            if (!isValidOMXParam(h263type)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (h263type->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (h263type->eProfile != OMX_VIDEO_H263ProfileBaseline ||
-                h263type->eLevel != OMX_VIDEO_H263Level45 ||
-                (h263type->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) ||
-                h263type->bPLUSPTYPEAllowed != OMX_FALSE ||
-                h263type->bForceRoundingTypeToZero != OMX_FALSE ||
-                h263type->nPictureHeaderRepetition != 0 ||
-                h263type->nGOBHeaderInterval != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoMpeg4:
-        {
-            OMX_VIDEO_PARAM_MPEG4TYPE *mpeg4type =
-                (OMX_VIDEO_PARAM_MPEG4TYPE *)params;
-
-            if (!isValidOMXParam(mpeg4type)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (mpeg4type->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (mpeg4type->eProfile != OMX_VIDEO_MPEG4ProfileCore ||
-                mpeg4type->eLevel > OMX_VIDEO_MPEG4Level2 ||
-                (mpeg4type->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) ||
-                mpeg4type->nBFrames != 0 ||
-                mpeg4type->nIDCVLCThreshold != 0 ||
-                mpeg4type->bACPred != OMX_TRUE ||
-                mpeg4type->nMaxPacketSize != 256 ||
-                mpeg4type->nTimeIncRes != 1000 ||
-                mpeg4type->nHeaderExtension != 0 ||
-                mpeg4type->bReversibleVLC != OMX_FALSE) {
-                return OMX_ErrorUndefined;
-            }
-
-            mKeyFrameInterval = int32_t(mpeg4type->nPFrames + 1);
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError || mSawInputEOS) {
-        return;
-    }
-
-    if (!mStarted) {
-        if (OMX_ErrorNone != initEncoder()) {
-            return;
-        }
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        outHeader->nTimeStamp = 0;
-        outHeader->nFlags = 0;
-        outHeader->nOffset = 0;
-        outHeader->nFilledLen = 0;
-        outHeader->nOffset = 0;
-
-        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
-        int32_t dataLength = outHeader->nAllocLen;
-
-        if (mNumInputFrames < 0) {
-            if (!PVGetVolHeader(mHandle, outPtr, &dataLength, 0)) {
-                ALOGE("Failed to get VOL header");
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                return;
-            }
-            ALOGV("Output VOL header: %d bytes", dataLength);
-            ++mNumInputFrames;
-            outHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
-            outHeader->nFilledLen = dataLength;
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
-        }
-
-        // Save the input buffer info so that it can be
-        // passed to an output buffer
-        InputBufferInfo info;
-        info.mTimeUs = inHeader->nTimeStamp;
-        info.mFlags = inHeader->nFlags;
-        mInputBufferInfoVec.push(info);
-
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            mSawInputEOS = true;
-        }
-
-        if (inHeader->nFilledLen > 0) {
-            OMX_ERRORTYPE error = validateInputBuffer(inHeader);
-            if (error != OMX_ErrorNone) {
-                ALOGE("b/69065651");
-                android_errorWriteLog(0x534e4554, "69065651");
-                mSignalledError = true;
-                notify(OMX_EventError, error, 0, 0);
-                return;
-            }
-            const uint8_t *inputData = NULL;
-            if (mInputDataIsMeta) {
-                inputData =
-                    extractGraphicBuffer(
-                            mInputFrameData, (mWidth * mHeight * 3) >> 1,
-                            inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
-                            mWidth, mHeight);
-                if (inputData == NULL) {
-                    ALOGE("Unable to extract gralloc buffer in metadata mode");
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                        return;
-                }
-            } else {
-                inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
-                if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
-                    ConvertYUV420SemiPlanarToYUV420Planar(
-                        inputData, mInputFrameData, mWidth, mHeight);
-                    inputData = mInputFrameData;
-                }
-            }
-
-            CHECK(inputData != NULL);
-
-            VideoEncFrameIO vin, vout;
-            memset(&vin, 0, sizeof(vin));
-            memset(&vout, 0, sizeof(vout));
-            vin.height = align(mHeight, 16);
-            vin.pitch = align(mWidth, 16);
-            vin.timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
-            vin.yChan = (uint8_t *)inputData;
-            vin.uChan = vin.yChan + vin.height * vin.pitch;
-            vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
-
-            ULong modTimeMs = 0;
-            int32_t nLayer = 0;
-            MP4HintTrack hintTrack;
-            if (!PVEncodeVideoFrame(mHandle, &vin, &vout,
-                    &modTimeMs, outPtr, &dataLength, &nLayer) ||
-                !PVGetHintTrack(mHandle, &hintTrack)) {
-                ALOGE("Failed to encode frame or get hink track at frame %" PRId64,
-                    mNumInputFrames);
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-            }
-            CHECK(NULL == PVGetOverrunBuffer(mHandle));
-            if (hintTrack.CodeType == 0) {  // I-frame serves as sync frame
-                outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
-            }
-
-            ++mNumInputFrames;
-        } else {
-            dataLength = 0;
-        }
-
-        inQueue.erase(inQueue.begin());
-        inInfo->mOwnedByUs = false;
-        notifyEmptyBufferDone(inHeader);
-
-        outQueue.erase(outQueue.begin());
-        CHECK(!mInputBufferInfoVec.empty());
-        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
-        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
-        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
-        outHeader->nFilledLen = dataLength;
-        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-    }
-}
-
-void SoftMPEG4Encoder::onReset() {
-    if (!mStarted) {
-        return;
-    }
-
-    PVCleanUpVideoEncoder(mHandle);
-
-    free(mInputFrameData);
-    mInputFrameData = NULL;
-
-    mStarted = false;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    using namespace android;
-    if (!strcmp(name, "OMX.google.h263.encoder")) {
-        return new android::SoftMPEG4Encoder(
-                name, "video_encoder.h263", OMX_VIDEO_CodingH263, MEDIA_MIMETYPE_VIDEO_H263,
-                kH263ProfileLevels, NELEM(kH263ProfileLevels),
-                callbacks, appData, component);
-    } else if (!strcmp(name, "OMX.google.mpeg4.encoder")) {
-        return new android::SoftMPEG4Encoder(
-                name, "video_encoder.mpeg4", OMX_VIDEO_CodingMPEG4, MEDIA_MIMETYPE_VIDEO_MPEG4,
-                kMPEG4ProfileLevels, NELEM(kMPEG4ProfileLevels),
-                callbacks, appData, component);
-    } else {
-        CHECK(!"Unknown component");
-    }
-    return NULL;
-}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
deleted file mode 100644
index 71e1170..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_MPEG4_ENCODER_H_
-#define SOFT_MPEG4_ENCODER_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
-#include "mp4enc_api.h"
-
-
-namespace android {
-
-struct CodecProfileLevel;
-
-struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
-    SoftMPEG4Encoder(
-            const char *name,
-            const char *componentRole,
-            OMX_VIDEO_CODINGTYPE codingType,
-            const char *mime,
-            const CodecProfileLevel *profileLevels,
-            size_t numProfileLevels,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-    // Override SimpleSoftOMXComponent methods
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-    virtual void onReset();
-
-protected:
-    virtual ~SoftMPEG4Encoder();
-
-private:
-    enum {
-        kNumBuffers = 2,
-    };
-
-    // OMX input buffer's timestamp and flags
-    typedef struct {
-        int64_t mTimeUs;
-        int32_t mFlags;
-    } InputBufferInfo;
-
-    MP4EncodingMode mEncodeMode;
-    int32_t  mKeyFrameInterval; // 1: all I-frames, <0: infinite
-
-    int64_t  mNumInputFrames;
-    bool     mStarted;
-    bool     mSawInputEOS;
-    bool     mSignalledError;
-
-    tagvideoEncControls   *mHandle;
-    tagvideoEncOptions    *mEncParams;
-    uint8_t               *mInputFrameData;
-    Vector<InputBufferInfo> mInputBufferInfoVec;
-
-    OMX_ERRORTYPE initEncParams();
-    OMX_ERRORTYPE initEncoder();
-    OMX_ERRORTYPE releaseEncoder();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4Encoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_MPEG4_ENCODER_H_
diff --git a/media/libstagefright/codecs/m4v_h263/enc/exports.lds b/media/libstagefright/codecs/m4v_h263/enc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
deleted file mode 100644
index b669c84..0000000
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ /dev/null
@@ -1,27 +0,0 @@
-
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_mp3dec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftMP3.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: ["libstagefright_mp3dec"],
-}
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
deleted file mode 100644
index 07bb45a..0000000
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ /dev/null
@@ -1,506 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftMP3"
-#include <utils/Log.h>
-
-#include "SoftMP3.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-#include <pvmp3decoder_api.h>
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftMP3::SoftMP3(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mConfig(new tPVMP3DecoderExternal),
-      mDecoderBuf(NULL),
-      mAnchorTimeUs(0),
-      mNumFramesOutput(0),
-      mNumChannels(2),
-      mSamplingRate(44100),
-      mSignalledError(false),
-      mSawInputEos(false),
-      mSignalledOutputEos(false),
-      mOutputPortSettingsChange(NONE) {
-    initPorts();
-    initDecoder();
-}
-
-SoftMP3::~SoftMP3() {
-    if (mDecoderBuf != NULL) {
-        free(mDecoderBuf);
-        mDecoderBuf = NULL;
-    }
-
-    delete mConfig;
-    mConfig = NULL;
-}
-
-void SoftMP3::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType =
-        const_cast<char *>(MEDIA_MIMETYPE_AUDIO_MPEG);
-
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingMP3;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kOutputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-void SoftMP3::initDecoder() {
-    mConfig->equalizerType = flat;
-    mConfig->crcEnabled = false;
-
-    uint32_t memRequirements = pvmp3_decoderMemRequirements();
-    mDecoderBuf = calloc(1, memRequirements);
-
-    pvmp3_InitDecoder(mConfig, mDecoderBuf);
-    mIsFirst = true;
-}
-
-void *SoftMP3::memsetSafe(OMX_BUFFERHEADERTYPE *outHeader, int c, size_t len) {
-    if (len > outHeader->nAllocLen) {
-        ALOGE("memset buffer too small: got %u, expected %zu", outHeader->nAllocLen, len);
-        android_errorWriteLog(0x534e4554, "29422022");
-        notify(OMX_EventError, OMX_ErrorUndefined, OUTPUT_BUFFER_TOO_SMALL, NULL);
-        mSignalledError = true;
-        return NULL;
-    }
-    return memset(outHeader->pBuffer, c, len);
-}
-
-OMX_ERRORTYPE SoftMP3::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingMP3 : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            pcmParams->nChannels = mNumChannels;
-            pcmParams->nSamplingRate = mSamplingRate;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioMp3:
-        {
-            OMX_AUDIO_PARAM_MP3TYPE *mp3Params =
-                (OMX_AUDIO_PARAM_MP3TYPE *)params;
-
-            if (!isValidOMXParam(mp3Params)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (mp3Params->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            mp3Params->nChannels = mNumChannels;
-            mp3Params->nBitRate = 0 /* unknown */;
-            mp3Params->nSampleRate = mSamplingRate;
-            // other fields are encoder-only
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftMP3::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.mp3",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingMP3)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (const OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            mNumChannels = pcmParams->nChannels;
-            mSamplingRate = pcmParams->nSamplingRate;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-void SoftMP3::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
-        BufferInfo *inInfo = NULL;
-        OMX_BUFFERHEADERTYPE *inHeader = NULL;
-        if (!inQueue.empty()) {
-            inInfo = *inQueue.begin();
-            inHeader = inInfo->mHeader;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-        outHeader->nFlags = 0;
-
-        if (inHeader) {
-            if (inHeader->nOffset == 0 && inHeader->nFilledLen) {
-                mAnchorTimeUs = inHeader->nTimeStamp;
-                mNumFramesOutput = 0;
-            }
-
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                mSawInputEos = true;
-                if (mIsFirst && !inHeader->nFilledLen) {
-                     ALOGV("empty first EOS");
-                     outHeader->nFilledLen = 0;
-                     outHeader->nTimeStamp = inHeader->nTimeStamp;
-                     outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                     mSignalledOutputEos = true;
-                     outInfo->mOwnedByUs = false;
-                     outQueue.erase(outQueue.begin());
-                     notifyFillBufferDone(outHeader);
-                     inInfo->mOwnedByUs = false;
-                     inQueue.erase(inQueue.begin());
-                     notifyEmptyBufferDone(inHeader);
-                     return;
-                }
-            }
-
-            mConfig->pInputBuffer =
-                inHeader->pBuffer + inHeader->nOffset;
-
-            mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
-        } else {
-            mConfig->pInputBuffer = NULL;
-            mConfig->inputBufferCurrentLength = 0;
-        }
-        mConfig->inputBufferMaxLength = 0;
-        mConfig->inputBufferUsedLength = 0;
-
-        mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
-        if ((int32_t)outHeader->nAllocLen < mConfig->outputFrameSize) {
-            ALOGE("input buffer too small: got %u, expected %u",
-                outHeader->nAllocLen, mConfig->outputFrameSize);
-            android_errorWriteLog(0x534e4554, "27793371");
-            notify(OMX_EventError, OMX_ErrorUndefined, OUTPUT_BUFFER_TOO_SMALL, NULL);
-            mSignalledError = true;
-            return;
-        }
-
-        mConfig->pOutputBuffer =
-            reinterpret_cast<int16_t *>(outHeader->pBuffer);
-
-        ERROR_CODE decoderErr;
-        if ((decoderErr = pvmp3_framedecoder(mConfig, mDecoderBuf))
-                != NO_DECODING_ERROR) {
-            ALOGV("mp3 decoder returned error %d", decoderErr);
-
-            if (decoderErr != NO_ENOUGH_MAIN_DATA_ERROR
-                        && decoderErr != SIDE_INFO_ERROR) {
-                ALOGE("mp3 decoder returned error %d", decoderErr);
-
-                notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
-                mSignalledError = true;
-                return;
-            }
-
-            if (mConfig->outputFrameSize == 0) {
-                mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
-            }
-
-            if (decoderErr == NO_ENOUGH_MAIN_DATA_ERROR && mSawInputEos) {
-                if (!mIsFirst) {
-                    // pad the end of the stream with 529 samples, since that many samples
-                    // were trimmed off the beginning when decoding started
-                    outHeader->nOffset = 0;
-                    outHeader->nFilledLen = kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t);
-
-                    if (!memsetSafe(outHeader, 0, outHeader->nFilledLen)) {
-                        return;
-                    }
-
-                }
-                outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                mSignalledOutputEos = true;
-            } else {
-                // This is recoverable, just ignore the current frame and
-                // play silence instead.
-
-                // TODO: should we skip silence (and consume input data)
-                // if mIsFirst is true as we may not have a valid
-                // mConfig->samplingRate and mConfig->num_channels?
-                ALOGV_IF(mIsFirst, "insufficient data for first frame, sending silence");
-                if (!memsetSafe(outHeader, 0, mConfig->outputFrameSize * sizeof(int16_t))) {
-                    return;
-                }
-
-                if (inHeader) {
-                    mConfig->inputBufferUsedLength = inHeader->nFilledLen;
-                }
-            }
-        } else if (mConfig->samplingRate != mSamplingRate
-                || mConfig->num_channels != mNumChannels) {
-            mSamplingRate = mConfig->samplingRate;
-            mNumChannels = mConfig->num_channels;
-
-            notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-            mOutputPortSettingsChange = AWAITING_DISABLED;
-            return;
-        }
-
-        if (mIsFirst) {
-            mIsFirst = false;
-            // The decoder delay is 529 samples, so trim that many samples off
-            // the start of the first output buffer. This essentially makes this
-            // decoder have zero delay, which the rest of the pipeline assumes.
-            outHeader->nOffset =
-                kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t);
-
-            outHeader->nFilledLen =
-                mConfig->outputFrameSize * sizeof(int16_t) - outHeader->nOffset;
-        } else if (!mSignalledOutputEos) {
-            outHeader->nOffset = 0;
-            outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t);
-        }
-
-        outHeader->nTimeStamp =
-            mAnchorTimeUs + (mNumFramesOutput * 1000000LL) / mSamplingRate;
-
-        if (inHeader) {
-            CHECK_GE((int32_t)inHeader->nFilledLen, mConfig->inputBufferUsedLength);
-
-            inHeader->nOffset += mConfig->inputBufferUsedLength;
-            inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
-
-
-            if (inHeader->nFilledLen == 0) {
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-            }
-        }
-
-        mNumFramesOutput += mConfig->outputFrameSize / mNumChannels;
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        outInfo = NULL;
-        notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-    }
-}
-
-void SoftMP3::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0) {
-        // Make sure that the next buffer output does not still
-        // depend on fragments from the last one decoded.
-        pvmp3_InitDecoder(mConfig, mDecoderBuf);
-        mIsFirst = true;
-        mSignalledError = false;
-        mSawInputEos = false;
-        mSignalledOutputEos = false;
-    }
-}
-
-void SoftMP3::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED:
-        {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            break;
-        }
-
-        default:
-        {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-void SoftMP3::onReset() {
-    pvmp3_InitDecoder(mConfig, mDecoderBuf);
-    mIsFirst = true;
-    mSignalledError = false;
-    mSawInputEos = false;
-    mSignalledOutputEos = false;
-    mOutputPortSettingsChange = NONE;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftMP3(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
deleted file mode 100644
index 976fd00..0000000
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_MP3_H_
-
-#define SOFT_MP3_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-struct tPVMP3DecoderExternal;
-
-namespace android {
-
-struct SoftMP3 : public SimpleSoftOMXComponent {
-    SoftMP3(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftMP3();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers = 4,
-        kOutputBufferSize = 4608 * 2,
-        kPVMP3DecoderDelay = 529 // frames
-    };
-
-    tPVMP3DecoderExternal *mConfig;
-    void *mDecoderBuf;
-    int64_t mAnchorTimeUs;
-    int64_t mNumFramesOutput;
-
-    int32_t mNumChannels;
-    int32_t mSamplingRate;
-
-    bool mIsFirst;
-    bool mSignalledError;
-    bool mSawInputEos;
-    bool mSignalledOutputEos;
-
-    enum {
-        NONE,
-        AWAITING_DISABLED,
-        AWAITING_ENABLED
-    } mOutputPortSettingsChange;
-
-    void initPorts();
-    void initDecoder();
-    void *memsetSafe(OMX_BUFFERHEADERTYPE *outHeader, int c, size_t len);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftMP3);
-};
-
-}  // namespace android
-
-#endif  // SOFT_MP3_H_
-
-
diff --git a/media/libstagefright/codecs/mp3dec/exports.lds b/media/libstagefright/codecs/mp3dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/mp3dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.bp b/media/libstagefright/codecs/mpeg2dec/Android.bp
deleted file mode 100644
index abd1379..0000000
--- a/media/libstagefright/codecs/mpeg2dec/Android.bp
+++ /dev/null
@@ -1,32 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_mpeg2dec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    static_libs: ["libmpeg2dec"],
-    srcs: ["SoftMPEG2.cpp"],
-
-    cflags: [
-        "-Wall",
-        "-Wno-unused-variable",
-    ],
-
-    version_script: "exports.lds",
-
-    ldflags: ["-Wl,-Bsymbolic"],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
deleted file mode 100644
index 9f8001f..0000000
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ /dev/null
@@ -1,872 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftMPEG2"
-#include <utils/Log.h>
-
-#include "iv_datatypedef.h"
-#include "iv.h"
-#include "ivd.h"
-#include "impeg2d.h"
-#include "SoftMPEG2.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <OMX_VideoExt.h>
-
-namespace android {
-
-#define componentName                   "video_decoder.mpeg2"
-#define codingType                      OMX_VIDEO_CodingMPEG2
-#define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_MPEG2
-
-/** Function and structure definitions to keep code similar for each codec */
-#define ivdec_api_function              impeg2d_api_function
-#define ivdext_init_ip_t                impeg2d_init_ip_t
-#define ivdext_init_op_t                impeg2d_init_op_t
-#define ivdext_fill_mem_rec_ip_t        impeg2d_fill_mem_rec_ip_t
-#define ivdext_fill_mem_rec_op_t        impeg2d_fill_mem_rec_op_t
-#define ivdext_ctl_set_num_cores_ip_t   impeg2d_ctl_set_num_cores_ip_t
-#define ivdext_ctl_set_num_cores_op_t   impeg2d_ctl_set_num_cores_op_t
-
-#define IVDEXT_CMD_CTL_SET_NUM_CORES    \
-        (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
-
-static const CodecProfileLevel kProfileLevels[] = {
-    { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelHL  },
-
-    { OMX_VIDEO_MPEG2ProfileMain  , OMX_VIDEO_MPEG2LevelHL  },
-};
-
-SoftMPEG2::SoftMPEG2(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVideoDecoderOMXComponent(
-            name, componentName, codingType,
-            kProfileLevels, ARRAY_SIZE(kProfileLevels),
-            320 /* width */, 240 /* height */, callbacks,
-            appData, component),
-      mCodecCtx(NULL),
-      mMemRecords(NULL),
-      mFlushOutBuffer(NULL),
-      mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
-      mIvColorFormat(IV_YUV_420P),
-      mNewWidth(mWidth),
-      mNewHeight(mHeight),
-      mChangingResolution(false),
-      mSignalledError(false),
-      mStride(mWidth) {
-    initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE);
-
-    // If input dump is enabled, then open create an empty file
-    GENERATE_FILE_NAMES();
-    CREATE_DUMP_FILE(mInFile);
-}
-
-SoftMPEG2::~SoftMPEG2() {
-    if (OK != deInitDecoder()) {
-        ALOGE("Failed to deinit decoder");
-        notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-        mSignalledError = true;
-        return;
-    }
-}
-
-
-static ssize_t getMinTimestampIdx(OMX_S64 *pNTimeStamp, bool *pIsTimeStampValid) {
-    OMX_S64 minTimeStamp = LLONG_MAX;
-    ssize_t idx = -1;
-    for (ssize_t i = 0; i < MAX_TIME_STAMPS; i++) {
-        if (pIsTimeStampValid[i]) {
-            if (pNTimeStamp[i] < minTimeStamp) {
-                minTimeStamp = pNTimeStamp[i];
-                idx = i;
-            }
-        }
-    }
-    return idx;
-}
-
-static size_t GetCPUCoreCount() {
-    long cpuCoreCount = 1;
-#if defined(_SC_NPROCESSORS_ONLN)
-    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
-#else
-    // _SC_NPROC_ONLN must be defined...
-    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
-#endif
-    CHECK(cpuCoreCount >= 1);
-    ALOGV("Number of CPU cores: %ld", cpuCoreCount);
-    return (size_t)cpuCoreCount;
-}
-
-void SoftMPEG2::logVersion() {
-    ivd_ctl_getversioninfo_ip_t s_ctl_ip;
-    ivd_ctl_getversioninfo_op_t s_ctl_op;
-    UWORD8 au1_buf[512];
-    IV_API_CALL_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
-    s_ctl_ip.pv_version_buffer = au1_buf;
-    s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
-
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in getting version number: 0x%x",
-                s_ctl_op.u4_error_code);
-    } else {
-        ALOGV("Ittiam decoder version number: %s",
-                (char *)s_ctl_ip.pv_version_buffer);
-    }
-    return;
-}
-
-status_t SoftMPEG2::setParams(size_t stride) {
-    ivd_ctl_set_config_ip_t s_ctl_ip;
-    ivd_ctl_set_config_op_t s_ctl_op;
-    IV_API_CALL_STATUS_T status;
-    s_ctl_ip.u4_disp_wd = (UWORD32)stride;
-    s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-
-    s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
-
-    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in setting the run-time parameters: 0x%x",
-                s_ctl_op.u4_error_code);
-
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-status_t SoftMPEG2::resetPlugin() {
-    mIsInFlush = false;
-    mReceivedEOS = false;
-    memset(mTimeStamps, 0, sizeof(mTimeStamps));
-    memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
-
-    /* Initialize both start and end times */
-    gettimeofday(&mTimeStart, NULL);
-    gettimeofday(&mTimeEnd, NULL);
-
-    return OK;
-}
-
-status_t SoftMPEG2::resetDecoder() {
-    ivd_ctl_reset_ip_t s_ctl_ip;
-    ivd_ctl_reset_op_t s_ctl_op;
-    IV_API_CALL_STATUS_T status;
-
-    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
-    s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
-    s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
-
-    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
-    if (IV_SUCCESS != status) {
-        ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-
-    /* Set the run-time (dynamic) parameters */
-    setParams(outputBufferWidth());
-
-    /* Set number of cores/threads to be used by the codec */
-    setNumCores();
-
-    mStride = 0;
-    mSignalledError = false;
-
-    return OK;
-}
-
-status_t SoftMPEG2::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_cores_op;
-    IV_API_CALL_STATUS_T status;
-    s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
-    s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
-    s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
-    s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
-
-    status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op);
-    if (IV_SUCCESS != status) {
-        ALOGE("Error in setting number of cores: 0x%x",
-                s_set_cores_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-    return OK;
-}
-
-status_t SoftMPEG2::setFlushMode() {
-    IV_API_CALL_STATUS_T status;
-    ivd_ctl_flush_ip_t s_video_flush_ip;
-    ivd_ctl_flush_op_t s_video_flush_op;
-
-    s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
-    s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
-    s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
-
-    /* Set the decoder in Flush mode, subsequent decode() calls will flush */
-    status = ivdec_api_function(
-            mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
-                s_video_flush_op.u4_error_code);
-        return UNKNOWN_ERROR;
-    }
-
-    mWaitForI = true;
-    mIsInFlush = true;
-    return OK;
-}
-
-status_t SoftMPEG2::initDecoder() {
-    IV_API_CALL_STATUS_T status;
-
-    UWORD32 u4_num_reorder_frames;
-    UWORD32 u4_num_ref_frames;
-    UWORD32 u4_share_disp_buf;
-
-    mNumCores = GetCPUCoreCount();
-    mWaitForI = true;
-
-    /* Initialize number of ref and reorder modes (for MPEG2) */
-    u4_num_reorder_frames = 16;
-    u4_num_ref_frames = 16;
-    u4_share_disp_buf = 0;
-
-    uint32_t displayStride = outputBufferWidth();
-    uint32_t displayHeight = outputBufferHeight();
-    uint32_t displaySizeY = displayStride * displayHeight;
-
-    {
-        iv_num_mem_rec_ip_t s_num_mem_rec_ip;
-        iv_num_mem_rec_op_t s_num_mem_rec_op;
-
-        s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
-        s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
-        s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
-
-        status = ivdec_api_function(
-                mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op);
-        if (IV_SUCCESS != status) {
-            ALOGE("Error in getting mem records: 0x%x",
-                    s_num_mem_rec_op.u4_error_code);
-            return UNKNOWN_ERROR;
-        }
-
-        mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
-    }
-
-    mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc(
-            128, mNumMemRecords * sizeof(iv_mem_rec_t));
-    if (mMemRecords == NULL) {
-        ALOGE("Allocation failure");
-        return NO_MEMORY;
-    }
-
-    memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
-
-    {
-        size_t i;
-        ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
-        ivdext_fill_mem_rec_op_t s_fill_mem_op;
-        iv_mem_rec_t *ps_mem_rec;
-
-        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
-            sizeof(ivdext_fill_mem_rec_ip_t);
-
-        s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
-        s_fill_mem_ip.e_output_format = mIvColorFormat;
-        s_fill_mem_ip.u4_deinterlace = 1;
-        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
-        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
-        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
-        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
-        s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
-            sizeof(ivdext_fill_mem_rec_op_t);
-
-        ps_mem_rec = mMemRecords;
-        for (i = 0; i < mNumMemRecords; i++) {
-            ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
-        }
-
-        status = ivdec_api_function(
-                mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op);
-
-        if (IV_SUCCESS != status) {
-            ALOGE("Error in filling mem records: 0x%x",
-                    s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
-            return UNKNOWN_ERROR;
-        }
-        mNumMemRecords =
-            s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
-
-        ps_mem_rec = mMemRecords;
-
-        for (i = 0; i < mNumMemRecords; i++) {
-            ps_mem_rec->pv_base = ivd_aligned_malloc(
-                    ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
-            if (ps_mem_rec->pv_base == NULL) {
-                ALOGE("Allocation failure for memory record #%zu of size %u",
-                        i, ps_mem_rec->u4_mem_size);
-                status = IV_FAIL;
-                return NO_MEMORY;
-            }
-
-            ps_mem_rec++;
-        }
-    }
-
-    /* Initialize the decoder */
-    {
-        ivdext_init_ip_t s_init_ip;
-        ivdext_init_op_t s_init_op;
-
-        void *dec_fxns = (void *)ivdec_api_function;
-
-        s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
-        s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
-        s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
-        s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
-        s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
-
-        s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
-        s_init_ip.u4_deinterlace = 1;
-
-        s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
-
-        s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
-        s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
-
-        mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
-        mCodecCtx->pv_fxns = dec_fxns;
-        mCodecCtx->u4_size = sizeof(iv_obj_t);
-
-        status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op);
-        if (status != IV_SUCCESS) {
-            ALOGE("Error in init: 0x%x",
-                    s_init_op.s_ivd_init_op_t.u4_error_code);
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    /* Reset the plugin state */
-    resetPlugin();
-
-    /* Set the run time (dynamic) parameters */
-    mStride = outputBufferWidth();
-    setParams(mStride);
-
-    /* Set number of cores/threads to be used by the codec */
-    setNumCores();
-
-    /* Get codec version */
-    logVersion();
-
-    /* Allocate internal picture buffer */
-    uint32_t bufferSize = displaySizeY * 3 / 2;
-    mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
-    if (NULL == mFlushOutBuffer) {
-        ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
-        return NO_MEMORY;
-    }
-
-    mInitNeeded = false;
-    mFlushNeeded = false;
-    return OK;
-}
-
-status_t SoftMPEG2::deInitDecoder() {
-    size_t i;
-
-    if (mMemRecords) {
-        iv_mem_rec_t *ps_mem_rec;
-
-        ps_mem_rec = mMemRecords;
-        for (i = 0; i < mNumMemRecords; i++) {
-            if (ps_mem_rec->pv_base) {
-                ivd_aligned_free(ps_mem_rec->pv_base);
-            }
-            ps_mem_rec++;
-        }
-        ivd_aligned_free(mMemRecords);
-        mMemRecords = NULL;
-    }
-
-    if (mFlushOutBuffer) {
-        ivd_aligned_free(mFlushOutBuffer);
-        mFlushOutBuffer = NULL;
-    }
-
-    mInitNeeded = true;
-    mChangingResolution = false;
-    mCodecCtx = NULL;
-
-    return OK;
-}
-
-status_t SoftMPEG2::reInitDecoder() {
-    status_t ret;
-
-    deInitDecoder();
-
-    ret = initDecoder();
-    if (OK != ret) {
-        ALOGE("Failed to initialize decoder");
-        deInitDecoder();
-        return ret;
-    }
-    mSignalledError = false;
-    return OK;
-}
-
-void SoftMPEG2::onReset() {
-    SoftVideoDecoderOMXComponent::onReset();
-
-    mWaitForI = true;
-
-    resetDecoder();
-    resetPlugin();
-}
-
-bool SoftMPEG2::getSeqInfo() {
-    IV_API_CALL_STATUS_T status;
-    impeg2d_ctl_get_seq_info_ip_t s_ctl_get_seq_info_ip;
-    impeg2d_ctl_get_seq_info_op_t s_ctl_get_seq_info_op;
-
-    s_ctl_get_seq_info_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_ctl_get_seq_info_ip.e_sub_cmd =
-        (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_GET_SEQ_INFO;
-
-    s_ctl_get_seq_info_ip.u4_size = sizeof(impeg2d_ctl_get_seq_info_ip_t);
-    s_ctl_get_seq_info_op.u4_size = sizeof(impeg2d_ctl_get_seq_info_op_t);
-
-    status = ivdec_api_function(
-            (iv_obj_t *)mCodecCtx, (void *)&s_ctl_get_seq_info_ip,
-            (void *)&s_ctl_get_seq_info_op);
-
-    if (status != IV_SUCCESS) {
-        ALOGW("Error in getting Sequence info: 0x%x",
-                s_ctl_get_seq_info_op.u4_error_code);
-        return false;
-    }
-
-
-    int32_t primaries = s_ctl_get_seq_info_op.u1_colour_primaries;
-    int32_t transfer = s_ctl_get_seq_info_op.u1_transfer_characteristics;
-    int32_t coeffs = s_ctl_get_seq_info_op.u1_matrix_coefficients;
-    bool fullRange = false;  // mpeg2 video has limited range.
-
-    ColorAspects colorAspects;
-    ColorUtils::convertIsoColorAspectsToCodecAspects(
-            primaries, transfer, coeffs, fullRange, colorAspects);
-
-    // Update color aspects if necessary.
-    if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
-        mBitstreamColorAspects = colorAspects;
-        status_t err = handleColorAspectsChange();
-        CHECK(err == OK);
-    }
-    return true;
-}
-
-OMX_ERRORTYPE SoftMPEG2::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
-    const uint32_t oldWidth = mWidth;
-    const uint32_t oldHeight = mHeight;
-    OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);
-    if (mWidth != oldWidth || mHeight != oldHeight) {
-        reInitDecoder();
-    }
-    return ret;
-}
-
-bool SoftMPEG2::setDecodeArgs(
-        ivd_video_decode_ip_t *ps_dec_ip,
-        ivd_video_decode_op_t *ps_dec_op,
-        OMX_BUFFERHEADERTYPE *inHeader,
-        OMX_BUFFERHEADERTYPE *outHeader,
-        size_t timeStampIx) {
-    size_t sizeY = outputBufferWidth() * outputBufferHeight();
-    size_t sizeUV;
-
-    ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
-    ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
-
-    ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
-
-    /* When in flush and after EOS with zero byte input,
-     * inHeader is set to zero. Hence check for non-null */
-    if (inHeader) {
-        ps_dec_ip->u4_ts = timeStampIx;
-        ps_dec_ip->pv_stream_buffer = inHeader->pBuffer
-                + inHeader->nOffset;
-        ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
-    } else {
-        ps_dec_ip->u4_ts = 0;
-        ps_dec_ip->pv_stream_buffer = NULL;
-        ps_dec_ip->u4_num_Bytes = 0;
-    }
-
-    sizeUV = sizeY / 4;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
-    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
-
-    uint8_t *pBuf;
-    if (outHeader) {
-        if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
-            android_errorWriteLog(0x534e4554, "27833616");
-            return false;
-        }
-        pBuf = outHeader->pBuffer;
-    } else {
-        // mFlushOutBuffer always has the right size.
-        pBuf = mFlushOutBuffer;
-    }
-
-    ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
-    ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
-    ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
-    ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
-    return true;
-}
-void SoftMPEG2::onPortFlushCompleted(OMX_U32 portIndex) {
-    /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
-    if (kOutputPortIndex == portIndex) {
-        setFlushMode();
-
-        while (true) {
-            ivd_video_decode_ip_t s_dec_ip;
-            ivd_video_decode_op_t s_dec_op;
-            IV_API_CALL_STATUS_T status;
-            size_t sizeY, sizeUV;
-
-            setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
-
-            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
-            if (0 == s_dec_op.u4_output_present) {
-                resetPlugin();
-                break;
-            }
-        }
-    }
-}
-
-void SoftMPEG2::onQueueFilled(OMX_U32 portIndex) {
-    UNUSED(portIndex);
-
-    if (mSignalledError) {
-        return;
-    }
-    if (mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    if (NULL == mCodecCtx) {
-        if (OK != initDecoder()) {
-            ALOGE("Failed to initialize decoder");
-            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
-            mSignalledError = true;
-            return;
-        }
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
-    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
-
-    if (outputBufferWidth() != mStride) {
-        /* Set the run-time (dynamic) parameters */
-        mStride = outputBufferWidth();
-        setParams(mStride);
-    }
-
-    while (!outQueue.empty()) {
-        BufferInfo *inInfo;
-        OMX_BUFFERHEADERTYPE *inHeader;
-
-        BufferInfo *outInfo;
-        OMX_BUFFERHEADERTYPE *outHeader;
-        size_t timeStampIx;
-
-        inInfo = NULL;
-        inHeader = NULL;
-
-        if (!mIsInFlush) {
-            if (!inQueue.empty()) {
-                inInfo = *inQueue.begin();
-                inHeader = inInfo->mHeader;
-            } else {
-                break;
-            }
-        }
-
-        outInfo = *outQueue.begin();
-        outHeader = outInfo->mHeader;
-        outHeader->nFlags = 0;
-        outHeader->nTimeStamp = 0;
-        outHeader->nOffset = 0;
-
-        if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
-            mReceivedEOS = true;
-            if (inHeader->nFilledLen == 0) {
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-                setFlushMode();
-            }
-        }
-
-        // When there is an init required and the decoder is not in flush mode,
-        // update output port's definition and reinitialize decoder.
-        if (mInitNeeded && !mIsInFlush) {
-            bool portWillReset = false;
-            handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
-
-            if (OK != reInitDecoder()) {
-                ALOGE("Failed to reinitialize decoder");
-            }
-            return;
-        }
-
-        /* Get a free slot in timestamp array to hold input timestamp */
-        {
-            size_t i;
-            timeStampIx = 0;
-            for (i = 0; i < MAX_TIME_STAMPS; i++) {
-                if (!mTimeStampsValid[i]) {
-                    timeStampIx = i;
-                    break;
-                }
-            }
-            if (inHeader != NULL) {
-                mTimeStampsValid[timeStampIx] = true;
-                mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
-            }
-        }
-
-        {
-            ivd_video_decode_ip_t s_dec_ip;
-            ivd_video_decode_op_t s_dec_op;
-            WORD32 timeDelay, timeTaken;
-            size_t sizeY, sizeUV;
-
-            if (!setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
-                ALOGE("Decoder arg setup failed");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-            // If input dump is enabled, then write to file
-            DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes);
-
-            if (s_dec_ip.u4_num_Bytes > 0) {
-                char *ptr = (char *)s_dec_ip.pv_stream_buffer;
-            }
-
-            GETTIME(&mTimeStart, NULL);
-            /* Compute time elapsed between end of previous decode()
-             * to start of current decode() */
-            TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
-
-            IV_API_CALL_STATUS_T status;
-            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
-
-            bool unsupportedDimensions = (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code);
-            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
-
-            getSeqInfo();
-
-            GETTIME(&mTimeEnd, NULL);
-            /* Compute time taken for decode() */
-            TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
-
-            ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
-                   s_dec_op.u4_num_bytes_consumed);
-            if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
-                mFlushNeeded = true;
-            }
-
-            if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
-                /* If the input did not contain picture data, then ignore
-                 * the associated timestamp */
-                mTimeStampsValid[timeStampIx] = false;
-            }
-
-            // This is needed to handle CTS DecoderTest testCodecResetsMPEG2WithoutSurface,
-            // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
-            if (unsupportedDimensions && !mFlushNeeded) {
-                bool portWillReset = false;
-                handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht);
-
-                if (OK != reInitDecoder()) {
-                    ALOGE("Failed to reinitialize decoder");
-                    return;
-                }
-
-                if (setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
-                    ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
-                }
-                return;
-            }
-
-            // If the decoder is in the changing resolution mode and there is no output present,
-            // that means the switching is done and it's ready to reset the decoder and the plugin.
-            if (mChangingResolution && !s_dec_op.u4_output_present) {
-                mChangingResolution = false;
-                resetDecoder();
-                resetPlugin();
-                mStride = outputBufferWidth();
-                setParams(mStride);
-                continue;
-            }
-
-            if (unsupportedDimensions || resChanged) {
-                mChangingResolution = true;
-                if (mFlushNeeded) {
-                    setFlushMode();
-                }
-
-                if (unsupportedDimensions) {
-                    mNewWidth = s_dec_op.u4_pic_wd;
-                    mNewHeight = s_dec_op.u4_pic_ht;
-                    mInitNeeded = true;
-                }
-                continue;
-            }
-
-            // Combine the resolution change and coloraspects change in one PortSettingChange event
-            // if necessary.
-            if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
-                uint32_t width = s_dec_op.u4_pic_wd;
-                uint32_t height = s_dec_op.u4_pic_ht;
-                bool portWillReset = false;
-                handlePortSettingsChange(&portWillReset, width, height);
-
-                if (portWillReset) {
-                    resetDecoder();
-                    resetPlugin();
-                    return;
-                }
-            } else if (mUpdateColorAspects) {
-                notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
-                    kDescribeColorAspectsIndex, NULL);
-                mUpdateColorAspects = false;
-                return;
-            }
-
-            if (s_dec_op.u4_output_present) {
-                ssize_t timeStampIdx;
-                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
-
-                timeStampIdx = getMinTimestampIdx(mTimeStamps, mTimeStampsValid);
-                if (timeStampIdx < 0) {
-                    ALOGE("b/62872863, Invalid timestamp index!");
-                    android_errorWriteLog(0x534e4554, "62872863");
-                    return;
-                }
-                outHeader->nTimeStamp = mTimeStamps[timeStampIdx];
-                mTimeStampsValid[timeStampIdx] = false;
-
-                /* mWaitForI waits for the first I picture. Once made FALSE, it
-                   has to remain false till explicitly set to TRUE. */
-                mWaitForI = mWaitForI && !(IV_I_FRAME == s_dec_op.e_pic_type);
-
-                if (mWaitForI) {
-                    s_dec_op.u4_output_present = false;
-                } else {
-                    ALOGV("Output timestamp: %lld, res: %ux%u",
-                            (long long)outHeader->nTimeStamp, mWidth, mHeight);
-                    DUMP_TO_FILE(mOutFile, outHeader->pBuffer, outHeader->nFilledLen);
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    outInfo = NULL;
-                    notifyFillBufferDone(outHeader);
-                    outHeader = NULL;
-                }
-            } else if (mIsInFlush) {
-                /* If in flush mode and no output is returned by the codec,
-                 * then come out of flush mode */
-                mIsInFlush = false;
-
-                /* If EOS was recieved on input port and there is no output
-                 * from the codec, then signal EOS on output port */
-                if (mReceivedEOS) {
-                    outHeader->nFilledLen = 0;
-                    outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
-
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    outInfo = NULL;
-                    notifyFillBufferDone(outHeader);
-                    outHeader = NULL;
-                    resetPlugin();
-                }
-            }
-        }
-
-        /* If input EOS is seen and decoder is not in flush mode,
-         * set the decoder in flush mode.
-         * There can be a case where EOS is sent along with last picture data
-         * In that case, only after decoding that input data, decoder has to be
-         * put in flush. This case is handled here  */
-
-        if (mReceivedEOS && !mIsInFlush) {
-            setFlushMode();
-        }
-
-        // TODO: Handle more than one picture data
-        if (inHeader != NULL) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-    }
-}
-
-int SoftMPEG2::getColorAspectPreference() {
-    return kPreferBitstream;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
-        OMX_COMPONENTTYPE **component) {
-    return new android::SoftMPEG2(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
deleted file mode 100644
index 338fc30..0000000
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_MPEG2_H_
-
-#define SOFT_MPEG2_H_
-
-#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
-#include <sys/time.h>
-
-namespace android {
-
-#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
-#define ivd_aligned_free(buf) free(buf)
-
-/** Number of entries in the time-stamp array */
-#define MAX_TIME_STAMPS 64
-
-/** Maximum number of cores supported by the codec */
-#define CODEC_MAX_NUM_CORES 4
-
-#define CODEC_MAX_WIDTH     1920
-
-#define CODEC_MAX_HEIGHT    1088
-
-/** Input buffer size */
-#define INPUT_BUF_SIZE (1024 * 1024)
-
-#define MIN(a, b) ((a) < (b)) ? (a) : (b)
-
-/** Used to remove warnings about unused parameters */
-#define UNUSED(x) ((void)(x))
-
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b);
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff) \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
-
-struct SoftMPEG2 : public SoftVideoDecoderOMXComponent {
-    SoftMPEG2(
-            const char *name, const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData, OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftMPEG2();
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onReset();
-    virtual int getColorAspectPreference();
-    virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
-private:
-    // Number of input and output buffers
-    enum {
-        kNumBuffers = 8
-    };
-
-    iv_obj_t *mCodecCtx;         // Codec context
-    iv_mem_rec_t *mMemRecords;   // Memory records requested by the codec
-    size_t mNumMemRecords;       // Number of memory records requested by the codec
-
-    size_t mNumCores;            // Number of cores to be uesd by the codec
-
-    struct timeval mTimeStart;   // Time at the start of decode()
-    struct timeval mTimeEnd;     // Time at the end of decode()
-
-    // Internal buffer to be used to flush out the buffers from decoder
-    uint8_t *mFlushOutBuffer;
-
-    // Status of entries in the timestamp array
-    bool mTimeStampsValid[MAX_TIME_STAMPS];
-
-    // Timestamp array - Since codec does not take 64 bit timestamps,
-    // they are maintained in the plugin
-    OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
-
-#ifdef FILE_DUMP_ENABLE
-    char mInFile[200];
-#endif /* FILE_DUMP_ENABLE */
-
-    OMX_COLOR_FORMATTYPE mOmxColorFormat;    // OMX Color format
-    IV_COLOR_FORMAT_T mIvColorFormat;        // Ittiam Color format
-
-    bool mIsInFlush;        // codec is flush mode
-    bool mReceivedEOS;      // EOS is receieved on input port
-    bool mInitNeeded;
-    uint32_t mNewWidth;
-    uint32_t mNewHeight;
-    // The input stream has changed to a different resolution, which is still supported by the
-    // codec. So the codec is switching to decode the new resolution.
-    bool mChangingResolution;
-    bool mFlushNeeded;
-    bool mSignalledError;
-    bool mWaitForI;
-    size_t mStride;
-
-    status_t initDecoder();
-    status_t deInitDecoder();
-    status_t setFlushMode();
-    status_t setParams(size_t stride);
-    void logVersion();
-    status_t setNumCores();
-    status_t resetDecoder();
-    status_t resetPlugin();
-    status_t reInitDecoder();
-
-    bool setDecodeArgs(
-            ivd_video_decode_ip_t *ps_dec_ip,
-            ivd_video_decode_op_t *ps_dec_op,
-            OMX_BUFFERHEADERTYPE *inHeader,
-            OMX_BUFFERHEADERTYPE *outHeader,
-            size_t timeStampIx);
-
-    bool getSeqInfo();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG2);
-};
-
-#ifdef FILE_DUMP_ENABLE
-
-#define INPUT_DUMP_PATH     "/sdcard/media/mpeg2d_input"
-#define INPUT_DUMP_EXT      "m2v"
-
-#define GENERATE_FILE_NAMES() {                         \
-    GETTIME(&mTimeStart, NULL);                         \
-    strcpy(mInFile, "");                                \
-    sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH,  \
-            mTimeStart.tv_sec, mTimeStart.tv_usec,      \
-            INPUT_DUMP_EXT);                            \
-}
-
-#define CREATE_DUMP_FILE(m_filename) {                  \
-    FILE *fp = fopen(m_filename, "wb");                 \
-    if (fp != NULL) {                                   \
-        fclose(fp);                                     \
-    } else {                                            \
-        ALOGD("Could not open file %s", m_filename);    \
-    }                                                   \
-}
-#define DUMP_TO_FILE(m_filename, m_buf, m_size)         \
-{                                                       \
-    FILE *fp = fopen(m_filename, "ab");                 \
-    if (fp != NULL && m_buf != NULL) {                  \
-        int i;                                          \
-        i = fwrite(m_buf, 1, m_size, fp);               \
-        ALOGD("fwrite ret %d to write %d", i, m_size);  \
-        if (i != (int)m_size) {                         \
-            ALOGD("Error in fwrite, returned %d", i);   \
-            perror("Error in write to file");           \
-        }                                               \
-        fclose(fp);                                     \
-    } else {                                            \
-        ALOGD("Could not write to file %s", m_filename);\
-    }                                                   \
-}
-#else /* FILE_DUMP_ENABLE */
-#define INPUT_DUMP_PATH
-#define INPUT_DUMP_EXT
-#define OUTPUT_DUMP_PATH
-#define OUTPUT_DUMP_EXT
-#define GENERATE_FILE_NAMES()
-#define CREATE_DUMP_FILE(m_filename)
-#define DUMP_TO_FILE(m_filename, m_buf, m_size)
-#endif /* FILE_DUMP_ENABLE */
-
-} // namespace android
-
-#endif  // SOFT_MPEG2_H_
diff --git a/media/libstagefright/codecs/mpeg2dec/exports.lds b/media/libstagefright/codecs/mpeg2dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/mpeg2dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
deleted file mode 100644
index ba6dc2a..0000000
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ /dev/null
@@ -1,37 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_on2_dec_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_on2_dec_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_vpxdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftVPX.cpp"],
-
-    shared_libs: ["libvpx"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/on2/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/dec/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/on2/dec/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/on2/dec/NOTICE b/media/libstagefright/codecs/on2/dec/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/on2/dec/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
deleted file mode 100644
index bffc23a..0000000
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftVPX"
-#include <utils/Log.h>
-#include <utils/misc.h>
-#include "OMX_VideoExt.h"
-
-#include "SoftVPX.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-
-namespace android {
-
-// Only need to declare the highest supported profile and level here.
-static const CodecProfileLevel kVP9ProfileLevels[] = {
-    { OMX_VIDEO_VP9Profile0, OMX_VIDEO_VP9Level5 },
-    { OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Level5 },
-    { OMX_VIDEO_VP9Profile2HDR, OMX_VIDEO_VP9Level5 },
-    { OMX_VIDEO_VP9Profile2HDR10Plus, OMX_VIDEO_VP9Level5 },
-};
-
-SoftVPX::SoftVPX(
-        const char *name,
-        const char *componentRole,
-        OMX_VIDEO_CODINGTYPE codingType,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVideoDecoderOMXComponent(
-            name, componentRole, codingType,
-            codingType == OMX_VIDEO_CodingVP8 ? NULL : kVP9ProfileLevels,
-            codingType == OMX_VIDEO_CodingVP8 ?  0 : NELEM(kVP9ProfileLevels),
-            320 /* width */, 240 /* height */, callbacks, appData, component),
-      mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
-      mEOSStatus(INPUT_DATA_AVAILABLE),
-      mCtx(NULL),
-      mFrameParallelMode(false),
-      mTimeStampIdx(0),
-      mImg(NULL) {
-    // arbitrary from avc/hevc as vpx does not specify a min compression ratio
-    const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
-    const char *mime = mMode == MODE_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9;
-    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
-    initPorts(
-            kNumBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* inputBufferSize */,
-            kNumBuffers, mime, kMinCompressionRatio);
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftVPX::~SoftVPX() {
-    destroyDecoder();
-}
-
-static int GetCPUCoreCount() {
-    int cpuCoreCount = 1;
-#if defined(_SC_NPROCESSORS_ONLN)
-    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
-#else
-    // _SC_NPROC_ONLN must be defined...
-    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
-#endif
-    CHECK(cpuCoreCount >= 1);
-    ALOGV("Number of CPU cores: %d", cpuCoreCount);
-    return cpuCoreCount;
-}
-
-bool SoftVPX::supportDescribeHdrStaticInfo() {
-    return true;
-}
-
-bool SoftVPX::supportDescribeHdr10PlusInfo() {
-    return true;
-}
-
-status_t SoftVPX::initDecoder() {
-    mCtx = new vpx_codec_ctx_t;
-    vpx_codec_err_t vpx_err;
-    vpx_codec_dec_cfg_t cfg;
-    vpx_codec_flags_t flags;
-    memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
-    memset(&flags, 0, sizeof(vpx_codec_flags_t));
-    cfg.threads = GetCPUCoreCount();
-
-    if (mFrameParallelMode) {
-        flags |= VPX_CODEC_USE_FRAME_THREADING;
-    }
-
-    if ((vpx_err = vpx_codec_dec_init(
-                (vpx_codec_ctx_t *)mCtx,
-                 mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo,
-                 &cfg, flags))) {
-        ALOGE("on2 decoder failed to initialize. (%d)", vpx_err);
-        return UNKNOWN_ERROR;
-    }
-
-    return OK;
-}
-
-status_t SoftVPX::destroyDecoder() {
-    vpx_codec_destroy((vpx_codec_ctx_t *)mCtx);
-    delete (vpx_codec_ctx_t *)mCtx;
-    mCtx = NULL;
-    return OK;
-}
-
-bool SoftVPX::outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset) {
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-    BufferInfo *outInfo = NULL;
-    OMX_BUFFERHEADERTYPE *outHeader = NULL;
-    vpx_codec_iter_t iter = NULL;
-
-    if (flushDecoder && mFrameParallelMode) {
-        // Flush decoder by passing NULL data ptr and 0 size.
-        // Ideally, this should never fail.
-        if (vpx_codec_decode((vpx_codec_ctx_t *)mCtx, NULL, 0, NULL, 0)) {
-            ALOGE("Failed to flush on2 decoder.");
-            return false;
-        }
-    }
-
-    if (!display) {
-        if (!flushDecoder) {
-            ALOGE("Invalid operation.");
-            return false;
-        }
-        // Drop all the decoded frames in decoder.
-        while ((mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter))) {
-        }
-        return true;
-    }
-
-    while (!outQueue.empty()) {
-        if (mImg == NULL) {
-            mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
-            if (mImg == NULL) {
-                break;
-            }
-        }
-        uint32_t width = mImg->d_w;
-        uint32_t height = mImg->d_h;
-        outInfo = *outQueue.begin();
-        outHeader = outInfo->mHeader;
-        CHECK(mImg->fmt == VPX_IMG_FMT_I420 || mImg->fmt == VPX_IMG_FMT_I42016);
-        OMX_COLOR_FORMATTYPE outputColorFormat = OMX_COLOR_FormatYUV420Planar;
-        int32_t bpp = 1;
-        if (mImg->fmt == VPX_IMG_FMT_I42016) {
-            outputColorFormat = OMX_COLOR_FormatYUV420Planar16;
-            bpp = 2;
-        }
-        handlePortSettingsChange(portWillReset, width, height, outputColorFormat);
-        if (*portWillReset) {
-            return true;
-        }
-
-        outHeader->nOffset = 0;
-        outHeader->nFlags = 0;
-        outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * bpp * 3) / 2;
-        PrivInfo *privInfo = (PrivInfo *)mImg->user_priv;
-        outHeader->nTimeStamp = privInfo->mTimeStamp;
-        if (privInfo->mHdr10PlusInfo != nullptr) {
-            queueOutputFrameConfig(privInfo->mHdr10PlusInfo);
-        }
-
-        if (outputBufferSafe(outHeader)) {
-            uint8_t *dst = outHeader->pBuffer;
-            const uint8_t *srcY = (const uint8_t *)mImg->planes[VPX_PLANE_Y];
-            const uint8_t *srcU = (const uint8_t *)mImg->planes[VPX_PLANE_U];
-            const uint8_t *srcV = (const uint8_t *)mImg->planes[VPX_PLANE_V];
-            size_t srcYStride = mImg->stride[VPX_PLANE_Y];
-            size_t srcUStride = mImg->stride[VPX_PLANE_U];
-            size_t srcVStride = mImg->stride[VPX_PLANE_V];
-            copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
-        } else {
-            outHeader->nFilledLen = 0;
-        }
-
-        mImg = NULL;
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        outInfo = NULL;
-        notifyFillBufferDone(outHeader);
-        outHeader = NULL;
-    }
-
-    if (!eos) {
-        return true;
-    }
-
-    if (!outQueue.empty()) {
-        outInfo = *outQueue.begin();
-        outQueue.erase(outQueue.begin());
-        outHeader = outInfo->mHeader;
-        outHeader->nTimeStamp = 0;
-        outHeader->nFilledLen = 0;
-        outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-        mEOSStatus = OUTPUT_FRAMES_FLUSHED;
-    }
-    return true;
-}
-
-bool SoftVPX::outputBufferSafe(OMX_BUFFERHEADERTYPE *outHeader) {
-    uint32_t width = outputBufferWidth();
-    uint32_t height = outputBufferHeight();
-    uint64_t nFilledLen = width;
-    nFilledLen *= height;
-    if (nFilledLen > UINT32_MAX / 3) {
-        ALOGE("b/29421675, nFilledLen overflow %llu w %u h %u",
-                (unsigned long long)nFilledLen, width, height);
-        android_errorWriteLog(0x534e4554, "29421675");
-        return false;
-    } else if (outHeader->nAllocLen < outHeader->nFilledLen) {
-        ALOGE("b/27597103, buffer too small");
-        android_errorWriteLog(0x534e4554, "27597103");
-        return false;
-    }
-
-    return true;
-}
-
-void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mOutputPortSettingsChange != NONE || mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-    bool EOSseen = false;
-    bool portWillReset = false;
-
-    while ((mEOSStatus == INPUT_EOS_SEEN || !inQueue.empty())
-            && !outQueue.empty()) {
-        // Output the pending frames that left from last port reset or decoder flush.
-        if (mEOSStatus == INPUT_EOS_SEEN || mImg != NULL) {
-            if (!outputBuffers(
-                     mEOSStatus == INPUT_EOS_SEEN, true /* display */,
-                     mEOSStatus == INPUT_EOS_SEEN, &portWillReset)) {
-                ALOGE("on2 decoder failed to output frame.");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-            if (portWillReset || mEOSStatus == OUTPUT_FRAMES_FLUSHED ||
-                    mEOSStatus == INPUT_EOS_SEEN) {
-                return;
-            }
-            // Continue as outQueue may be empty now.
-            continue;
-        }
-
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-        // Software VP9 Decoder does not need the Codec Specific Data (CSD)
-        // (specified in http://www.webmproject.org/vp9/profiles/). Ignore it if
-        // it was passed.
-        if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
-            // Only ignore CSD buffer for VP9.
-            if (mMode == MODE_VP9) {
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-                continue;
-            } else {
-                // Tolerate the CSD buffer for VP8. This is a workaround
-                // for b/28689536.
-                ALOGW("WARNING: Got CSD buffer for VP8.");
-            }
-        }
-
-        mPrivInfo[mTimeStampIdx].mTimeStamp = inHeader->nTimeStamp;
-
-        if (inInfo->mFrameConfig) {
-            mPrivInfo[mTimeStampIdx].mHdr10PlusInfo = dequeueInputFrameConfig();
-        } else {
-            mPrivInfo[mTimeStampIdx].mHdr10PlusInfo.clear();
-        }
-
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            mEOSStatus = INPUT_EOS_SEEN;
-            EOSseen = true;
-        }
-
-        if (inHeader->nFilledLen > 0) {
-            vpx_codec_err_t err = vpx_codec_decode(
-                    (vpx_codec_ctx_t *)mCtx, inHeader->pBuffer + inHeader->nOffset,
-                    inHeader->nFilledLen, &mPrivInfo[mTimeStampIdx], 0);
-            if (err == VPX_CODEC_OK) {
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-            } else {
-                ALOGE("on2 decoder failed to decode frame. err: %d", err);
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-        }
-
-        mTimeStampIdx = (mTimeStampIdx + 1) % kNumBuffers;
-
-        if (!outputBuffers(
-                 EOSseen /* flushDecoder */, true /* display */, EOSseen, &portWillReset)) {
-            ALOGE("on2 decoder failed to output frame.");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            return;
-        }
-        if (portWillReset) {
-            return;
-        }
-    }
-}
-
-void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == kInputPortIndex) {
-        bool portWillReset = false;
-        if (!outputBuffers(
-                 true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) {
-            ALOGE("Failed to flush decoder.");
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            return;
-        }
-        mEOSStatus = INPUT_DATA_AVAILABLE;
-    }
-}
-
-void SoftVPX::onReset() {
-    bool portWillReset = false;
-    if (!outputBuffers(
-             true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) {
-        ALOGW("Failed to flush decoder. Try to hard reset decoder");
-        destroyDecoder();
-        initDecoder();
-    }
-    mEOSStatus = INPUT_DATA_AVAILABLE;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    if (!strcmp(name, "OMX.google.vp8.decoder")) {
-        return new android::SoftVPX(
-                name, "video_decoder.vp8", OMX_VIDEO_CodingVP8,
-                callbacks, appData, component);
-    } else if (!strcmp(name, "OMX.google.vp9.decoder")) {
-        return new android::SoftVPX(
-                name, "video_decoder.vp9", OMX_VIDEO_CodingVP9,
-                callbacks, appData, component);
-    } else {
-        CHECK(!"Unknown component");
-    }
-    return NULL;
-}
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
deleted file mode 100644
index 0aa8e9c..0000000
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_VPX_H_
-
-#define SOFT_VPX_H_
-
-#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
-
-#include "vpx/vpx_decoder.h"
-#include "vpx/vpx_codec.h"
-#include "vpx/vp8dx.h"
-
-namespace android {
-
-struct ABuffer;
-
-struct SoftVPX : public SoftVideoDecoderOMXComponent {
-    SoftVPX(const char *name,
-            const char *componentRole,
-            OMX_VIDEO_CODINGTYPE codingType,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftVPX();
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onReset();
-    virtual bool supportDescribeHdrStaticInfo();
-    virtual bool supportDescribeHdr10PlusInfo();
-
-private:
-    enum {
-        kNumBuffers = 10
-    };
-
-    enum {
-        MODE_VP8,
-        MODE_VP9
-    } mMode;
-
-    enum {
-        INPUT_DATA_AVAILABLE,  // VPX component is ready to decode data.
-        INPUT_EOS_SEEN,        // VPX component saw EOS and is flushing On2 decoder.
-        OUTPUT_FRAMES_FLUSHED  // VPX component finished flushing On2 decoder.
-    } mEOSStatus;
-
-    void *mCtx;
-    bool mFrameParallelMode;  // Frame parallel is only supported by VP9 decoder.
-    struct PrivInfo {
-        OMX_TICKS mTimeStamp;
-        sp<ABuffer> mHdr10PlusInfo;
-    };
-    PrivInfo mPrivInfo[kNumBuffers];
-    uint8_t mTimeStampIdx;
-    vpx_image_t *mImg;
-
-    status_t initDecoder();
-    status_t destroyDecoder();
-    bool outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset);
-    bool outputBufferSafe(OMX_BUFFERHEADERTYPE *outHeader);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);
-};
-
-}  // namespace android
-
-#endif  // SOFT_VPX_H_
diff --git a/media/libstagefright/codecs/on2/dec/exports.lds b/media/libstagefright/codecs/on2/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/on2/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
deleted file mode 100644
index e85ff98..0000000
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ /dev/null
@@ -1,44 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_on2_enc_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_on2_enc_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_vpxenc",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: [
-        "SoftVPXEncoder.cpp",
-        "SoftVP8Encoder.cpp",
-        "SoftVP9Encoder.cpp",
-    ],
-
-    cflags: ["-Wall"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    shared_libs: ["libvpx"],
-    header_libs: ["libbase_headers"],
-}
diff --git a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/on2/enc/NOTICE b/media/libstagefright/codecs/on2/enc/NOTICE
deleted file mode 100644
index faed58a..0000000
--- a/media/libstagefright/codecs/on2/enc/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2013, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
deleted file mode 100644
index 9198b7c..0000000
--- a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "SoftVP8Encoder"
-#include "SoftVP8Encoder.h"
-
-#include <utils/Log.h>
-#include <utils/misc.h>
-
-#include <media/hardware/HardwareAPI.h>
-#include <media/hardware/MetadataBufferType.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-#ifndef INT32_MAX
-#define INT32_MAX   2147483647
-#endif
-
-namespace android {
-
-static const CodecProfileLevel kVp8ProfileLevels[] = {
-    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version0 },
-    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version1 },
-    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version2 },
-    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version3 },
-};
-
-SoftVP8Encoder::SoftVP8Encoder(const char *name,
-                               const OMX_CALLBACKTYPE *callbacks,
-                               OMX_PTR appData,
-                               OMX_COMPONENTTYPE **component)
-    : SoftVPXEncoder(
-            name, callbacks, appData, component, "video_encoder.vp8",
-            OMX_VIDEO_CodingVP8, MEDIA_MIMETYPE_VIDEO_VP8, 2,
-            kVp8ProfileLevels, NELEM(kVp8ProfileLevels)),
-      mDCTPartitions(0),
-      mLevel(OMX_VIDEO_VP8Level_Version0) {
-}
-
-void SoftVP8Encoder::setCodecSpecificInterface() {
-    mCodecInterface = vpx_codec_vp8_cx();
-}
-
-void SoftVP8Encoder::setCodecSpecificConfiguration() {
-    switch (mLevel) {
-        case OMX_VIDEO_VP8Level_Version0:
-            mCodecConfiguration->g_profile = 0;
-            break;
-
-        case OMX_VIDEO_VP8Level_Version1:
-            mCodecConfiguration->g_profile = 1;
-            break;
-
-        case OMX_VIDEO_VP8Level_Version2:
-            mCodecConfiguration->g_profile = 2;
-            break;
-
-        case OMX_VIDEO_VP8Level_Version3:
-            mCodecConfiguration->g_profile = 3;
-            break;
-
-        default:
-            mCodecConfiguration->g_profile = 0;
-    }
-}
-
-vpx_codec_err_t SoftVP8Encoder::setCodecSpecificControls() {
-    vpx_codec_err_t codec_return = vpx_codec_control(mCodecContext,
-                                                     VP8E_SET_TOKEN_PARTITIONS,
-                                                     mDCTPartitions);
-    if (codec_return != VPX_CODEC_OK) {
-        ALOGE("Error setting dct partitions for vpx encoder.");
-    }
-    return codec_return;
-}
-
-OMX_ERRORTYPE SoftVP8Encoder::internalGetParameter(OMX_INDEXTYPE index,
-                                                   OMX_PTR param) {
-    // can include extension index OMX_INDEXEXTTYPE
-    const int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoVp8:
-            return internalGetVp8Params(
-                (OMX_VIDEO_PARAM_VP8TYPE *)param);
-
-        default:
-            return SoftVPXEncoder::internalGetParameter(index, param);
-    }
-}
-
-OMX_ERRORTYPE SoftVP8Encoder::internalSetParameter(OMX_INDEXTYPE index,
-                                                   const OMX_PTR param) {
-    // can include extension index OMX_INDEXEXTTYPE
-    const int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoVp8:
-            return internalSetVp8Params(
-                (const OMX_VIDEO_PARAM_VP8TYPE *)param);
-
-        default:
-            return SoftVPXEncoder::internalSetParameter(index, param);
-    }
-}
-
-OMX_ERRORTYPE SoftVP8Encoder::internalGetVp8Params(
-        OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
-    if (!isValidOMXParam(vp8Params)) {
-        android_errorWriteLog(0x534e4554, "273936274");
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vp8Params->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain;
-    vp8Params->eLevel = mLevel;
-    vp8Params->bErrorResilientMode = mErrorResilience;
-    vp8Params->nDCTPartitions = mDCTPartitions;
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftVP8Encoder::internalSetVp8Params(
-        const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
-    if (!isValidOMXParam(vp8Params)) {
-        android_errorWriteLog(0x534e4554, "273937171");
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vp8Params->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) {
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 ||
-        vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 ||
-        vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 ||
-        vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) {
-        mLevel = vp8Params->eLevel;
-    } else {
-        return OMX_ErrorBadParameter;
-    }
-
-    mErrorResilience = vp8Params->bErrorResilientMode;
-    if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) {
-        mDCTPartitions = vp8Params->nDCTPartitions;
-    } else {
-        return OMX_ErrorBadParameter;
-    }
-    return OMX_ErrorNone;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.h b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.h
deleted file mode 100644
index c5c2abf..0000000
--- a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_VP8_ENCODER_H_
-
-#define SOFT_VP8_ENCODER_H_
-
-#include "SoftVPXEncoder.h"
-
-#include <OMX_VideoExt.h>
-#include <OMX_IndexExt.h>
-
-#include "vpx/vpx_encoder.h"
-#include "vpx/vpx_codec.h"
-#include "vpx/vp8cx.h"
-
-namespace android {
-
-// Exposes a vp8 encoder as an OMX Component
-//
-// In addition to the base class settings, Only following encoder settings are
-// available:
-//    - token partitioning
-struct SoftVP8Encoder : public SoftVPXEncoder {
-    SoftVP8Encoder(const char *name,
-                   const OMX_CALLBACKTYPE *callbacks,
-                   OMX_PTR appData,
-                   OMX_COMPONENTTYPE **component);
-
-protected:
-    // Returns current values for requested OMX
-    // parameters
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR param);
-
-    // Validates, extracts and stores relevant OMX
-    // parameters
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR param);
-
-    // Populates |mCodecInterface| with codec specific settings.
-    virtual void setCodecSpecificInterface();
-
-    // Sets codec specific configuration.
-    virtual void setCodecSpecificConfiguration();
-
-    // Initializes codec specific encoder settings.
-    virtual vpx_codec_err_t setCodecSpecificControls();
-
-    // Gets vp8 specific parameters.
-    OMX_ERRORTYPE internalGetVp8Params(
-        OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
-
-    // Handles vp8 specific parameters.
-    OMX_ERRORTYPE internalSetVp8Params(
-        const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
-
-private:
-    // Max value supported for DCT partitions
-    static const uint32_t kMaxDCTPartitions = 3;
-
-    // vp8 specific configuration parameter
-    // that enables token partitioning of
-    // the stream into substreams
-    int32_t mDCTPartitions;
-
-    // Encoder profile corresponding to OMX level parameter
-    //
-    // The inconsistency in the naming is caused by
-    // OMX spec referring vpx profiles (g_profile)
-    // as "levels" whereas using the name "profile" for
-    // something else.
-    OMX_VIDEO_VP8LEVELTYPE mLevel;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftVP8Encoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_VP8_ENCODER_H_
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
deleted file mode 100644
index f8495c2..0000000
--- a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "SoftVP9Encoder"
-#include "SoftVP9Encoder.h"
-
-#include <utils/Log.h>
-#include <utils/misc.h>
-
-#include <media/hardware/HardwareAPI.h>
-#include <media/hardware/MetadataBufferType.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-namespace android {
-
-static const CodecProfileLevel kVp9ProfileLevels[] = {
-    { OMX_VIDEO_VP9Profile0, OMX_VIDEO_VP9Level41 },
-};
-
-SoftVP9Encoder::SoftVP9Encoder(
-        const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SoftVPXEncoder(
-            name, callbacks, appData, component, "video_encoder.vp9",
-            OMX_VIDEO_CodingVP9, MEDIA_MIMETYPE_VIDEO_VP9, 4,
-            kVp9ProfileLevels, NELEM(kVp9ProfileLevels)),
-      mLevel(OMX_VIDEO_VP9Level1),
-      mTileColumns(0),
-      mFrameParallelDecoding(OMX_FALSE) {
-}
-
-void SoftVP9Encoder::setCodecSpecificInterface() {
-    mCodecInterface = vpx_codec_vp9_cx();
-}
-
-void SoftVP9Encoder::setCodecSpecificConfiguration() {
-    mCodecConfiguration->g_profile = 0;
-}
-
-vpx_codec_err_t SoftVP9Encoder::setCodecSpecificControls() {
-    vpx_codec_err_t codecReturn = vpx_codec_control(
-            mCodecContext, VP9E_SET_TILE_COLUMNS, mTileColumns);
-    if (codecReturn != VPX_CODEC_OK) {
-        ALOGE("Error setting VP9E_SET_TILE_COLUMNS to %d. vpx_codec_control() "
-              "returned %d", mTileColumns, codecReturn);
-        return codecReturn;
-    }
-    codecReturn = vpx_codec_control(
-            mCodecContext, VP9E_SET_FRAME_PARALLEL_DECODING,
-            mFrameParallelDecoding);
-    if (codecReturn != VPX_CODEC_OK) {
-        ALOGE("Error setting VP9E_SET_FRAME_PARALLEL_DECODING to %d."
-              "vpx_codec_control() returned %d", mFrameParallelDecoding,
-              codecReturn);
-        return codecReturn;
-    }
-    codecReturn = vpx_codec_control(mCodecContext, VP9E_SET_ROW_MT, 1);
-    if (codecReturn != VPX_CODEC_OK) {
-        ALOGE("Error setting VP9E_SET_ROW_MT to 1. vpx_codec_control() "
-              "returned %d", codecReturn);
-        return codecReturn;
-    }
-
-    // For VP9, we always set CPU_USED to 8 (because the realtime default is 0
-    // which is too slow).
-    codecReturn = vpx_codec_control(mCodecContext, VP8E_SET_CPUUSED, 8);
-    if (codecReturn != VPX_CODEC_OK) {
-        ALOGE("Error setting VP8E_SET_CPUUSED to 8. vpx_codec_control() "
-              "returned %d", codecReturn);
-        return codecReturn;
-    }
-    return codecReturn;
-}
-
-OMX_ERRORTYPE SoftVP9Encoder::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR param) {
-    // can include extension index OMX_INDEXEXTTYPE
-    const int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoVp9:
-            return internalGetVp9Params(
-                    (OMX_VIDEO_PARAM_VP9TYPE *)param);
-
-        default:
-            return SoftVPXEncoder::internalGetParameter(index, param);
-    }
-}
-
-OMX_ERRORTYPE SoftVP9Encoder::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR param) {
-    // can include extension index OMX_INDEXEXTTYPE
-    const int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoVp9:
-            return internalSetVp9Params(
-                    (const OMX_VIDEO_PARAM_VP9TYPE *)param);
-
-        default:
-            return SoftVPXEncoder::internalSetParameter(index, param);
-    }
-}
-
-OMX_ERRORTYPE SoftVP9Encoder::internalGetVp9Params(
-        OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
-    if (!isValidOMXParam(vp9Params)) {
-        android_errorWriteLog(0x534e4554, "273936553");
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vp9Params->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    vp9Params->eProfile = OMX_VIDEO_VP9Profile0;
-    vp9Params->eLevel = mLevel;
-    vp9Params->bErrorResilientMode = mErrorResilience;
-    vp9Params->nTileColumns = mTileColumns;
-    vp9Params->bEnableFrameParallelDecoding = mFrameParallelDecoding;
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftVP9Encoder::internalSetVp9Params(
-        const OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
-    if (!isValidOMXParam(vp9Params)) {
-        android_errorWriteLog(0x534e4554, "273937136");
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vp9Params->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    if (vp9Params->eProfile != OMX_VIDEO_VP9Profile0) {
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vp9Params->eLevel == OMX_VIDEO_VP9Level1 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level11 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level2 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level21 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level3 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level31 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level4 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level41 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level5 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level51 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level52 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level6 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level61 ||
-        vp9Params->eLevel == OMX_VIDEO_VP9Level62) {
-        mLevel = vp9Params->eLevel;
-    } else {
-        return OMX_ErrorBadParameter;
-    }
-
-    mErrorResilience = vp9Params->bErrorResilientMode;
-    mTileColumns = vp9Params->nTileColumns;
-    mFrameParallelDecoding = vp9Params->bEnableFrameParallelDecoding;
-    return OMX_ErrorNone;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.h b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.h
deleted file mode 100644
index 308a9ac..0000000
--- a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.h
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_VP9_ENCODER_H_
-
-#define SOFT_VP9_ENCODER_H_
-
-#include "SoftVPXEncoder.h"
-
-#include <OMX_VideoExt.h>
-#include <OMX_IndexExt.h>
-
-#include "vpx/vpx_encoder.h"
-#include "vpx/vpx_codec.h"
-#include "vpx/vp8cx.h"
-
-namespace android {
-
-// Exposes a VP9 encoder as an OMX Component
-//
-// In addition to the base class settings, Only following encoder settings are
-// available:
-//    - tile rows
-//    - tile columns
-//    - frame parallel mode
-struct SoftVP9Encoder : public SoftVPXEncoder {
-    SoftVP9Encoder(const char *name,
-                   const OMX_CALLBACKTYPE *callbacks,
-                   OMX_PTR appData,
-                   OMX_COMPONENTTYPE **component);
-
-protected:
-    // Returns current values for requested OMX
-    // parameters
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR param);
-
-    // Validates, extracts and stores relevant OMX
-    // parameters
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR param);
-
-    // Populates |mCodecInterface| with codec specific settings.
-    virtual void setCodecSpecificInterface();
-
-    // Sets codec specific configuration.
-    virtual void setCodecSpecificConfiguration();
-
-    // Initializes codec specific encoder settings.
-    virtual vpx_codec_err_t setCodecSpecificControls();
-
-    // Gets vp9 specific parameters.
-    OMX_ERRORTYPE internalGetVp9Params(
-        OMX_VIDEO_PARAM_VP9TYPE* vp9Params);
-
-    // Handles vp9 specific parameters.
-    OMX_ERRORTYPE internalSetVp9Params(
-        const OMX_VIDEO_PARAM_VP9TYPE* vp9Params);
-
-private:
-    // Encoder profile corresponding to OMX level parameter
-    //
-    // The inconsistency in the naming is caused by
-    // OMX spec referring vpx profiles (g_profile)
-    // as "levels" whereas using the name "profile" for
-    // something else.
-    OMX_VIDEO_VP9LEVELTYPE mLevel;
-
-    int32_t mTileColumns;
-
-    OMX_BOOL mFrameParallelDecoding;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftVP9Encoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_VP9_ENCODER_H_
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
deleted file mode 100644
index cbedb72..0000000
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ /dev/null
@@ -1,799 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "SoftVPXEncoder"
-#include "SoftVPXEncoder.h"
-
-#include "SoftVP8Encoder.h"
-#include "SoftVP9Encoder.h"
-
-#include <android-base/macros.h>
-#include <utils/Log.h>
-#include <utils/misc.h>
-
-#include <media/hardware/HardwareAPI.h>
-#include <media/hardware/MetadataBufferType.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-#ifndef INT32_MAX
-#define INT32_MAX   2147483647
-#endif
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    // OMX IL 1.1.2
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 1;
-    params->nVersion.s.nRevision = 2;
-    params->nVersion.s.nStep = 0;
-}
-
-static int GetCPUCoreCount() {
-    int cpuCoreCount = 1;
-#if defined(_SC_NPROCESSORS_ONLN)
-    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
-#else
-    // _SC_NPROC_ONLN must be defined...
-    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
-#endif
-    CHECK_GE(cpuCoreCount, 1);
-    return cpuCoreCount;
-}
-
-SoftVPXEncoder::SoftVPXEncoder(const char *name,
-                               const OMX_CALLBACKTYPE *callbacks,
-                               OMX_PTR appData,
-                               OMX_COMPONENTTYPE **component,
-                               const char* role,
-                               OMX_VIDEO_CODINGTYPE codingType,
-                               const char* mimeType,
-                               int32_t minCompressionRatio,
-                               const CodecProfileLevel *profileLevels,
-                               size_t numProfileLevels)
-    : SoftVideoEncoderOMXComponent(
-            name, role, codingType, profileLevels, numProfileLevels,
-            176 /* width */, 144 /* height */,
-            callbacks, appData, component),
-      mCodecContext(NULL),
-      mCodecConfiguration(NULL),
-      mCodecInterface(NULL),
-      mBitrateUpdated(false),
-      mBitrateControlMode(VPX_VBR),
-      mErrorResilience(OMX_FALSE),
-      mKeyFrameInterval(0),
-      mMinQuantizer(0),
-      mMaxQuantizer(0),
-      mTemporalLayers(0),
-      mTemporalPatternType(OMX_VIDEO_VPXTemporalLayerPatternNone),
-      mTemporalPatternLength(0),
-      mTemporalPatternIdx(0),
-      mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
-      mConversionBuffer(NULL),
-      mKeyFrameRequested(false) {
-    memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
-    mTemporalLayerBitrateRatio[0] = 100;
-
-    const size_t kMinOutputBufferSize = 1024 * 1024; // arbitrary
-
-    initPorts(
-            kNumBuffers, kNumBuffers, kMinOutputBufferSize,
-            mimeType, minCompressionRatio);
-}
-
-SoftVPXEncoder::~SoftVPXEncoder() {
-    releaseEncoder();
-}
-
-status_t SoftVPXEncoder::initEncoder() {
-    vpx_codec_err_t codec_return;
-    status_t result = UNKNOWN_ERROR;
-
-    setCodecSpecificInterface();
-    if (mCodecInterface == NULL) {
-        goto CLEAN_UP;
-    }
-    ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
-          (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
-          mMinQuantizer, mMaxQuantizer);
-
-    mCodecConfiguration = new vpx_codec_enc_cfg_t;
-    codec_return = vpx_codec_enc_config_default(mCodecInterface,
-                                                mCodecConfiguration,
-                                                0);
-
-    if (codec_return != VPX_CODEC_OK) {
-        ALOGE("Error populating default configuration for vpx encoder.");
-        goto CLEAN_UP;
-    }
-
-    mCodecConfiguration->g_w = mWidth;
-    mCodecConfiguration->g_h = mHeight;
-    mCodecConfiguration->g_threads = GetCPUCoreCount();
-    mCodecConfiguration->g_error_resilient = mErrorResilience;
-
-    // OMX timebase unit is microsecond
-    // g_timebase is in seconds (i.e. 1/1000000 seconds)
-    mCodecConfiguration->g_timebase.num = 1;
-    mCodecConfiguration->g_timebase.den = 1000000;
-    // rc_target_bitrate is in kbps, mBitrate in bps
-    mCodecConfiguration->rc_target_bitrate = (mBitrate + 500) / 1000;
-    mCodecConfiguration->rc_end_usage = mBitrateControlMode;
-    // Disable frame drop - not allowed in MediaCodec now.
-    mCodecConfiguration->rc_dropframe_thresh = 0;
-    // Disable lagged encoding.
-    mCodecConfiguration->g_lag_in_frames = 0;
-    if (mBitrateControlMode == VPX_CBR) {
-        // Disable spatial resizing.
-        mCodecConfiguration->rc_resize_allowed = 0;
-        // Single-pass mode.
-        mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
-        // Maximum amount of bits that can be subtracted from the target
-        // bitrate - expressed as percentage of the target bitrate.
-        mCodecConfiguration->rc_undershoot_pct = 100;
-        // Maximum amount of bits that can be added to the target
-        // bitrate - expressed as percentage of the target bitrate.
-        mCodecConfiguration->rc_overshoot_pct = 15;
-        // Initial value of the buffer level in ms.
-        mCodecConfiguration->rc_buf_initial_sz = 500;
-        // Amount of data that the encoder should try to maintain in ms.
-        mCodecConfiguration->rc_buf_optimal_sz = 600;
-        // The amount of data that may be buffered by the decoding
-        // application in ms.
-        mCodecConfiguration->rc_buf_sz = 1000;
-        // Enable error resilience - needed for packet loss.
-        mCodecConfiguration->g_error_resilient = 1;
-        // Maximum key frame interval - for CBR boost to 3000
-        mCodecConfiguration->kf_max_dist = 3000;
-        // Encoder determines optimal key frame placement automatically.
-        mCodecConfiguration->kf_mode = VPX_KF_AUTO;
-    }
-
-    // Frames temporal pattern - for now WebRTC like pattern is only supported.
-    switch (mTemporalLayers) {
-        case 0:
-        {
-            mTemporalPatternLength = 0;
-            break;
-        }
-        case 1:
-        {
-            mCodecConfiguration->ts_number_layers = 1;
-            mCodecConfiguration->ts_rate_decimator[0] = 1;
-            mCodecConfiguration->ts_periodicity = 1;
-            mCodecConfiguration->ts_layer_id[0] = 0;
-            mTemporalPattern[0] = kTemporalUpdateLastRefAll;
-            mTemporalPatternLength = 1;
-            break;
-        }
-        case 2:
-        {
-            mCodecConfiguration->ts_number_layers = 2;
-            mCodecConfiguration->ts_rate_decimator[0] = 2;
-            mCodecConfiguration->ts_rate_decimator[1] = 1;
-            mCodecConfiguration->ts_periodicity = 2;
-            mCodecConfiguration->ts_layer_id[0] = 0;
-            mCodecConfiguration->ts_layer_id[1] = 1;
-            mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
-            mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
-            mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
-            mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
-            mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
-            mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
-            mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
-            mTemporalPattern[7] = kTemporalUpdateNone;
-            mTemporalPatternLength = 8;
-            break;
-        }
-        case 3:
-        {
-            mCodecConfiguration->ts_number_layers = 3;
-            mCodecConfiguration->ts_rate_decimator[0] = 4;
-            mCodecConfiguration->ts_rate_decimator[1] = 2;
-            mCodecConfiguration->ts_rate_decimator[2] = 1;
-            mCodecConfiguration->ts_periodicity = 4;
-            mCodecConfiguration->ts_layer_id[0] = 0;
-            mCodecConfiguration->ts_layer_id[1] = 2;
-            mCodecConfiguration->ts_layer_id[2] = 1;
-            mCodecConfiguration->ts_layer_id[3] = 2;
-            mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
-            mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
-            mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
-            mTemporalPattern[3] = kTemporalUpdateNone;
-            mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
-            mTemporalPattern[5] = kTemporalUpdateNone;
-            mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
-            mTemporalPattern[7] = kTemporalUpdateNone;
-            mTemporalPatternLength = 8;
-            break;
-        }
-        default:
-        {
-            ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
-            goto CLEAN_UP;
-        }
-    }
-    // Set bitrate values for each layer
-    for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
-        mCodecConfiguration->ts_target_bitrate[i] =
-            mCodecConfiguration->rc_target_bitrate *
-            mTemporalLayerBitrateRatio[i] / 100;
-    }
-    if (mKeyFrameInterval > 0) {
-        mCodecConfiguration->kf_max_dist = mKeyFrameInterval;
-        mCodecConfiguration->kf_min_dist = mKeyFrameInterval;
-        mCodecConfiguration->kf_mode = VPX_KF_AUTO;
-    }
-    if (mMinQuantizer > 0) {
-        mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
-    }
-    if (mMaxQuantizer > 0) {
-        mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
-    }
-    setCodecSpecificConfiguration();
-    mCodecContext = new vpx_codec_ctx_t;
-    codec_return = vpx_codec_enc_init(mCodecContext,
-                                      mCodecInterface,
-                                      mCodecConfiguration,
-                                      0);  // flags
-
-    if (codec_return != VPX_CODEC_OK) {
-        ALOGE("Error initializing vpx encoder");
-        goto CLEAN_UP;
-    }
-
-    // Extra CBR settings
-    if (mBitrateControlMode == VPX_CBR) {
-        codec_return = vpx_codec_control(mCodecContext,
-                                         VP8E_SET_STATIC_THRESHOLD,
-                                         1);
-        if (codec_return == VPX_CODEC_OK) {
-            uint32_t rc_max_intra_target =
-                mCodecConfiguration->rc_buf_optimal_sz * (mFramerate >> 17) / 10;
-            // Don't go below 3 times per frame bandwidth.
-            if (rc_max_intra_target < 300) {
-                rc_max_intra_target = 300;
-            }
-            codec_return = vpx_codec_control(mCodecContext,
-                                             VP8E_SET_MAX_INTRA_BITRATE_PCT,
-                                             rc_max_intra_target);
-        }
-        if (codec_return == VPX_CODEC_OK) {
-            codec_return = vpx_codec_control(mCodecContext,
-                                             VP8E_SET_CPUUSED,
-                                             -8);
-        }
-        if (codec_return != VPX_CODEC_OK) {
-            ALOGE("Error setting cbr parameters for vpx encoder.");
-            goto CLEAN_UP;
-        }
-    }
-
-    codec_return = setCodecSpecificControls();
-
-    if (codec_return != VPX_CODEC_OK) {
-        // The codec specific method would have logged the error.
-        goto CLEAN_UP;
-    }
-
-    if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
-        free(mConversionBuffer);
-        mConversionBuffer = NULL;
-        if (((uint64_t)mWidth * mHeight) > ((uint64_t)INT32_MAX / 3)) {
-            ALOGE("b/25812794, Buffer size is too big, width=%d, height=%d.", mWidth, mHeight);
-            goto CLEAN_UP;
-        }
-        mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
-        if (mConversionBuffer == NULL) {
-            ALOGE("Allocating conversion buffer failed.");
-            goto CLEAN_UP;
-        }
-    }
-    return OK;
-
-CLEAN_UP:
-    releaseEncoder();
-    return result;
-}
-
-status_t SoftVPXEncoder::releaseEncoder() {
-    if (mCodecContext != NULL) {
-        vpx_codec_destroy(mCodecContext);
-        delete mCodecContext;
-        mCodecContext = NULL;
-    }
-
-    if (mCodecConfiguration != NULL) {
-        delete mCodecConfiguration;
-        mCodecConfiguration = NULL;
-    }
-
-    if (mConversionBuffer != NULL) {
-        free(mConversionBuffer);
-        mConversionBuffer = NULL;
-    }
-
-    // this one is not allocated by us
-    mCodecInterface = NULL;
-
-    return OK;
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
-                                                   OMX_PTR param) {
-    // can include extension index OMX_INDEXEXTTYPE
-    const int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoBitrate: {
-            OMX_VIDEO_PARAM_BITRATETYPE *bitrate =
-                (OMX_VIDEO_PARAM_BITRATETYPE *)param;
-
-            if (!isValidOMXParam(bitrate)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (bitrate->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorUnsupportedIndex;
-            }
-
-            bitrate->nTargetBitrate = mBitrate;
-
-            if (mBitrateControlMode == VPX_VBR) {
-                bitrate->eControlRate = OMX_Video_ControlRateVariable;
-            } else if (mBitrateControlMode == VPX_CBR) {
-                bitrate->eControlRate = OMX_Video_ControlRateConstant;
-            } else {
-                return OMX_ErrorUnsupportedSetting;
-            }
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoAndroidVp8Encoder:
-            return internalGetAndroidVpxParams(
-                (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
-
-        default:
-            return SoftVideoEncoderOMXComponent::internalGetParameter(index, param);
-    }
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
-                                                   const OMX_PTR param) {
-    // can include extension index OMX_INDEXEXTTYPE
-    const int32_t indexFull = index;
-
-    switch (indexFull) {
-        case OMX_IndexParamVideoBitrate: {
-            const OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
-                (const OMX_VIDEO_PARAM_BITRATETYPE*) param;
-
-            if (!isValidOMXParam(bitRate)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            return internalSetBitrateParams(bitRate);
-        }
-
-        case OMX_IndexParamVideoAndroidVp8Encoder:
-            return internalSetAndroidVpxParams(
-                (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
-
-        default:
-            return SoftVideoEncoderOMXComponent::internalSetParameter(index, param);
-    }
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetConfig(
-        OMX_INDEXTYPE index, const OMX_PTR _params, bool *frameConfig) {
-    switch (index) {
-        case OMX_IndexConfigVideoIntraVOPRefresh:
-        {
-            OMX_CONFIG_INTRAREFRESHVOPTYPE *params =
-                (OMX_CONFIG_INTRAREFRESHVOPTYPE *)_params;
-
-            if (!isValidOMXParam(params)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (params->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorBadPortIndex;
-            }
-
-            mKeyFrameRequested = params->IntraRefreshVOP;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexConfigVideoBitrate:
-        {
-            OMX_VIDEO_CONFIG_BITRATETYPE *params =
-                (OMX_VIDEO_CONFIG_BITRATETYPE *)_params;
-
-            if (!isValidOMXParam(params)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (params->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorBadPortIndex;
-            }
-
-            if (mBitrate != params->nEncodeBitrate) {
-                mBitrate = params->nEncodeBitrate;
-                mBitrateUpdated = true;
-            }
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetConfig(index, _params, frameConfig);
-    }
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalGetBitrateParams(
-        OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
-    if (bitrate->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    bitrate->nTargetBitrate = mBitrate;
-
-    if (mBitrateControlMode == VPX_VBR) {
-        bitrate->eControlRate = OMX_Video_ControlRateVariable;
-    } else if (mBitrateControlMode == VPX_CBR) {
-        bitrate->eControlRate = OMX_Video_ControlRateConstant;
-    } else {
-        return OMX_ErrorUnsupportedSetting;
-    }
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
-        const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
-    if (bitrate->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    mBitrate = bitrate->nTargetBitrate;
-
-    if (bitrate->eControlRate == OMX_Video_ControlRateVariable) {
-        mBitrateControlMode = VPX_VBR;
-    } else if (bitrate->eControlRate == OMX_Video_ControlRateConstant) {
-        mBitrateControlMode = VPX_CBR;
-    } else {
-        return OMX_ErrorUnsupportedSetting;
-    }
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalGetAndroidVpxParams(
-        OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
-    if (!isValidOMXParam(vpxAndroidParams)) {
-        android_errorWriteLog(0x534e4554, "273936601");
-        return OMX_ErrorBadParameter;
-    }
-
-    if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    vpxAndroidParams->nKeyFrameInterval = mKeyFrameInterval;
-    vpxAndroidParams->eTemporalPattern = mTemporalPatternType;
-    vpxAndroidParams->nTemporalLayerCount = mTemporalLayers;
-    vpxAndroidParams->nMinQuantizer = mMinQuantizer;
-    vpxAndroidParams->nMaxQuantizer = mMaxQuantizer;
-    memcpy(vpxAndroidParams->nTemporalLayerBitrateRatio,
-           mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio));
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVpxParams(
-        const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
-    if (!isValidOMXParam(vpxAndroidParams)) {
-        android_errorWriteLog(0x534e4554, "273937551");
-        return OMX_ErrorBadParameter;
-    }
-    if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-    if (vpxAndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone &&
-            vpxAndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
-        return OMX_ErrorBadParameter;
-    }
-    if (vpxAndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
-        return OMX_ErrorBadParameter;
-    }
-    if (vpxAndroidParams->nMinQuantizer > vpxAndroidParams->nMaxQuantizer) {
-        return OMX_ErrorBadParameter;
-    }
-
-    mTemporalPatternType = vpxAndroidParams->eTemporalPattern;
-    if (vpxAndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
-        mTemporalLayers = vpxAndroidParams->nTemporalLayerCount;
-    } else if (vpxAndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) {
-        mTemporalLayers = 0;
-    }
-    // Check the bitrate distribution between layers is in increasing order
-    if (mTemporalLayers > 1) {
-        for (size_t i = 0; i < mTemporalLayers - 1; i++) {
-            if (vpxAndroidParams->nTemporalLayerBitrateRatio[i + 1] <=
-                    vpxAndroidParams->nTemporalLayerBitrateRatio[i]) {
-                ALOGE("Wrong bitrate ratio - should be in increasing order.");
-                return OMX_ErrorBadParameter;
-            }
-        }
-    }
-    mKeyFrameInterval = vpxAndroidParams->nKeyFrameInterval;
-    mMinQuantizer = vpxAndroidParams->nMinQuantizer;
-    mMaxQuantizer = vpxAndroidParams->nMaxQuantizer;
-    memcpy(mTemporalLayerBitrateRatio, vpxAndroidParams->nTemporalLayerBitrateRatio,
-            sizeof(mTemporalLayerBitrateRatio));
-    ALOGD("VPx: internalSetAndroidVpxParams. BRMode: %u. TS: %zu. KF: %u."
-            " QP: %u - %u BR0: %u. BR1: %u. BR2: %u",
-            (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
-            mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0],
-            mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]);
-    return OMX_ErrorNone;
-}
-
-vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() {
-    vpx_enc_frame_flags_t flags = 0;
-    if (mTemporalPatternLength > 0) {
-      int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
-      mTemporalPatternIdx++;
-      switch (mTemporalPattern[patternIdx]) {
-          case kTemporalUpdateLast:
-              flags |= VP8_EFLAG_NO_UPD_GF;
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_REF_GF;
-              flags |= VP8_EFLAG_NO_REF_ARF;
-              break;
-          case kTemporalUpdateGoldenWithoutDependency:
-              flags |= VP8_EFLAG_NO_REF_GF;
-              FALLTHROUGH_INTENDED;
-          case kTemporalUpdateGolden:
-              flags |= VP8_EFLAG_NO_REF_ARF;
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_UPD_LAST;
-              break;
-          case kTemporalUpdateAltrefWithoutDependency:
-              flags |= VP8_EFLAG_NO_REF_ARF;
-              flags |= VP8_EFLAG_NO_REF_GF;
-              FALLTHROUGH_INTENDED;
-          case kTemporalUpdateAltref:
-              flags |= VP8_EFLAG_NO_UPD_GF;
-              flags |= VP8_EFLAG_NO_UPD_LAST;
-              break;
-          case kTemporalUpdateNoneNoRefAltref:
-              flags |= VP8_EFLAG_NO_REF_ARF;
-              FALLTHROUGH_INTENDED;
-          case kTemporalUpdateNone:
-              flags |= VP8_EFLAG_NO_UPD_GF;
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_UPD_LAST;
-              flags |= VP8_EFLAG_NO_UPD_ENTROPY;
-              break;
-          case kTemporalUpdateNoneNoRefGoldenRefAltRef:
-              flags |= VP8_EFLAG_NO_REF_GF;
-              flags |= VP8_EFLAG_NO_UPD_GF;
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_UPD_LAST;
-              flags |= VP8_EFLAG_NO_UPD_ENTROPY;
-              break;
-          case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
-              flags |= VP8_EFLAG_NO_REF_GF;
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_UPD_LAST;
-              break;
-          case kTemporalUpdateLastRefAltRef:
-              flags |= VP8_EFLAG_NO_UPD_GF;
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_REF_GF;
-              break;
-          case kTemporalUpdateGoldenRefAltRef:
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_UPD_LAST;
-              break;
-          case kTemporalUpdateLastAndGoldenRefAltRef:
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_REF_GF;
-              break;
-          case kTemporalUpdateLastRefAll:
-              flags |= VP8_EFLAG_NO_UPD_ARF;
-              flags |= VP8_EFLAG_NO_UPD_GF;
-              break;
-      }
-    }
-    return flags;
-}
-
-void SoftVPXEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
-    // Initialize encoder if not already
-    if (mCodecContext == NULL) {
-        if (OK != initEncoder()) {
-            ALOGE("Failed to initialize encoder");
-            notify(OMX_EventError,
-                   OMX_ErrorUndefined,
-                   0,  // Extra notification data
-                   NULL);  // Notification data pointer
-            return;
-        }
-    }
-
-    vpx_codec_err_t codec_return;
-    List<BufferInfo *> &inputBufferInfoQueue = getPortQueue(kInputPortIndex);
-    List<BufferInfo *> &outputBufferInfoQueue = getPortQueue(kOutputPortIndex);
-
-    while (!inputBufferInfoQueue.empty() && !outputBufferInfoQueue.empty()) {
-        BufferInfo *inputBufferInfo = *inputBufferInfoQueue.begin();
-        OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader;
-
-        BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin();
-        OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader;
-
-        if ((inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) &&
-                inputBufferHeader->nFilledLen == 0) {
-            inputBufferInfoQueue.erase(inputBufferInfoQueue.begin());
-            inputBufferInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inputBufferHeader);
-
-            outputBufferHeader->nFilledLen = 0;
-            outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-            outputBufferInfoQueue.erase(outputBufferInfoQueue.begin());
-            outputBufferInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outputBufferHeader);
-            return;
-        }
-
-        OMX_ERRORTYPE error = validateInputBuffer(inputBufferHeader);
-        if (error != OMX_ErrorNone) {
-            ALOGE("b/27569635");
-            android_errorWriteLog(0x534e4554, "27569635");
-            notify(OMX_EventError, error, 0, 0);
-            return;
-        }
-        const uint8_t *source =
-            inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
-
-        size_t frameSize = mWidth * mHeight * 3 / 2;
-        if (mInputDataIsMeta) {
-            source = extractGraphicBuffer(
-                    mConversionBuffer, frameSize,
-                    source, inputBufferHeader->nFilledLen,
-                    mWidth, mHeight);
-            if (source == NULL) {
-                ALOGE("Unable to extract gralloc buffer in metadata mode");
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                return;
-            }
-        } else {
-            if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
-                ConvertYUV420SemiPlanarToYUV420Planar(
-                        source, mConversionBuffer, mWidth, mHeight);
-
-                source = mConversionBuffer;
-            }
-        }
-        vpx_image_t raw_frame;
-        vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight,
-                     kInputBufferAlignment, (uint8_t *)source);
-
-        vpx_enc_frame_flags_t flags = getEncodeFlags();
-        if (mKeyFrameRequested) {
-            flags |= VPX_EFLAG_FORCE_KF;
-            mKeyFrameRequested = false;
-        }
-
-        if (mBitrateUpdated) {
-            mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
-            vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
-                                                           mCodecConfiguration);
-            if (res != VPX_CODEC_OK) {
-                ALOGE("vpx encoder failed to update bitrate: %s",
-                      vpx_codec_err_to_string(res));
-                notify(OMX_EventError,
-                       OMX_ErrorUndefined,
-                       0, // Extra notification data
-                       NULL); // Notification data pointer
-            }
-            mBitrateUpdated = false;
-        }
-
-        uint32_t frameDuration;
-        if (inputBufferHeader->nTimeStamp > mLastTimestamp) {
-            frameDuration = (uint32_t)(inputBufferHeader->nTimeStamp - mLastTimestamp);
-        } else {
-            // Use default of 30 fps in case of 0 frame rate.
-            uint32_t framerate = mFramerate ?: (30 << 16);
-            frameDuration = (uint32_t)(((uint64_t)1000000 << 16) / framerate);
-        }
-        mLastTimestamp = inputBufferHeader->nTimeStamp;
-        codec_return = vpx_codec_encode(
-                mCodecContext,
-                &raw_frame,
-                inputBufferHeader->nTimeStamp,  // in timebase units
-                frameDuration,  // frame duration in timebase units
-                flags,  // frame flags
-                VPX_DL_REALTIME);  // encoding deadline
-        if (codec_return != VPX_CODEC_OK) {
-            ALOGE("vpx encoder failed to encode frame");
-            notify(OMX_EventError,
-                   OMX_ErrorUndefined,
-                   0,  // Extra notification data
-                   NULL);  // Notification data pointer
-            return;
-        }
-
-        vpx_codec_iter_t encoded_packet_iterator = NULL;
-        const vpx_codec_cx_pkt_t* encoded_packet;
-
-        while ((encoded_packet = vpx_codec_get_cx_data(
-                        mCodecContext, &encoded_packet_iterator))) {
-            if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
-                outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts;
-                outputBufferHeader->nFlags = 0;
-                if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY)
-                    outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
-                outputBufferHeader->nOffset = 0;
-                outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz;
-                if (outputBufferHeader->nFilledLen > outputBufferHeader->nAllocLen) {
-                    android_errorWriteLog(0x534e4554, "27569635");
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
-                    return;
-                }
-                memcpy(outputBufferHeader->pBuffer,
-                       encoded_packet->data.frame.buf,
-                       encoded_packet->data.frame.sz);
-                outputBufferInfo->mOwnedByUs = false;
-                outputBufferInfoQueue.erase(outputBufferInfoQueue.begin());
-                if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                    outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
-                }
-                notifyFillBufferDone(outputBufferHeader);
-            }
-        }
-
-        inputBufferInfo->mOwnedByUs = false;
-        inputBufferInfoQueue.erase(inputBufferInfoQueue.begin());
-        notifyEmptyBufferDone(inputBufferHeader);
-    }
-}
-
-void SoftVPXEncoder::onReset() {
-    releaseEncoder();
-    mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-  if (!strcmp(name, "OMX.google.vp8.encoder")) {
-      return new android::SoftVP8Encoder(name, callbacks, appData, component);
-  } else if (!strcmp(name, "OMX.google.vp9.encoder")) {
-      return new android::SoftVP9Encoder(name, callbacks, appData, component);
-  } else {
-      CHECK(!"Unknown component");
-  }
-  return NULL;
-}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
deleted file mode 100644
index 7208d69..0000000
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_VPX_ENCODER_H_
-
-#define SOFT_VPX_ENCODER_H_
-
-#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
-
-#include <OMX_VideoExt.h>
-#include <OMX_IndexExt.h>
-
-#include "vpx/vpx_encoder.h"
-#include "vpx/vpx_codec.h"
-#include "vpx/vp8cx.h"
-
-namespace android {
-
-// Base class for a VPX Encoder OMX Component
-//
-// Boilerplate for callback bindings are taken care
-// by the base class SimpleSoftOMXComponent and its
-// parent SoftOMXComponent.
-//
-// Only following encoder settings are available (codec specific settings might
-// be available in the sub-classes):
-//    - target bitrate
-//    - rate control (constant / variable)
-//    - frame rate
-//    - error resilience
-//    - reconstruction & loop filters (g_profile)
-//
-// Only following color formats are recognized
-//    - YUV420Planar
-//    - YUV420SemiPlanar
-//    - AndroidOpaque
-//
-// Following settings are not configurable by the client
-//    - encoding deadline is realtime
-//    - multithreaded encoding utilizes a number of threads equal
-// to online cpu's available
-//    - the algorithm interface for encoder is decided by the sub-class in use
-//    - fractional bits of frame rate is discarded
-//    - OMX timestamps are in microseconds, therefore
-// encoder timebase is fixed to 1/1000000
-
-struct SoftVPXEncoder : public SoftVideoEncoderOMXComponent {
-    SoftVPXEncoder(const char *name,
-                   const OMX_CALLBACKTYPE *callbacks,
-                   OMX_PTR appData,
-                   OMX_COMPONENTTYPE **component,
-                   const char* role,
-                   OMX_VIDEO_CODINGTYPE codingType,
-                   const char* mimeType,
-                   int32_t minCompressionRatio,
-                   const CodecProfileLevel *profileLevels,
-                   size_t numProfileLevels);
-
-protected:
-    virtual ~SoftVPXEncoder();
-
-    // Returns current values for requested OMX
-    // parameters
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR param);
-
-    // Validates, extracts and stores relevant OMX
-    // parameters
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR param);
-
-    virtual OMX_ERRORTYPE internalSetConfig(
-            OMX_INDEXTYPE index, const OMX_PTR params, bool *frameConfig);
-
-    // OMX callback when buffers available
-    // Note that both an input and output buffer
-    // is expected to be available to carry out
-    // encoding of the frame
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-    virtual void onReset();
-
-    // Initializes vpx encoder with available settings.
-    status_t initEncoder();
-
-    // Populates mCodecInterface with codec specific settings.
-    virtual void setCodecSpecificInterface() = 0;
-
-    // Sets codec specific configuration.
-    virtual void setCodecSpecificConfiguration() = 0;
-
-    // Sets codec specific encoder controls.
-    virtual vpx_codec_err_t setCodecSpecificControls() = 0;
-
-    // Get current encode flags.
-    virtual vpx_enc_frame_flags_t getEncodeFlags();
-
-    // Releases vpx encoder instance, with it's associated
-    // data structures.
-    //
-    // Unless called earlier, this is handled by the
-    // dtor.
-    status_t releaseEncoder();
-
-    // Get bitrate parameters.
-    virtual OMX_ERRORTYPE internalGetBitrateParams(
-        OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
-
-    // Updates bitrate to reflect port settings.
-    virtual OMX_ERRORTYPE internalSetBitrateParams(
-        const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
-
-    // Gets Android vpx specific parameters.
-    OMX_ERRORTYPE internalGetAndroidVpxParams(
-            OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams);
-
-    // Handles Android vpx specific parameters.
-    OMX_ERRORTYPE internalSetAndroidVpxParams(
-            const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams);
-
-    enum TemporalReferences {
-        // For 1 layer case: reference all (last, golden, and alt ref), but only
-        // update last.
-        kTemporalUpdateLastRefAll = 12,
-        // First base layer frame for 3 temporal layers, which updates last and
-        // golden with alt ref dependency.
-        kTemporalUpdateLastAndGoldenRefAltRef = 11,
-        // First enhancement layer with alt ref dependency.
-        kTemporalUpdateGoldenRefAltRef = 10,
-        // First enhancement layer with alt ref dependency.
-        kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
-        // Base layer with alt ref dependency.
-        kTemporalUpdateLastRefAltRef = 8,
-        // Highest enhacement layer without dependency on golden with alt ref
-        // dependency.
-        kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
-        // Second layer and last frame in cycle, for 2 layers.
-        kTemporalUpdateNoneNoRefAltref = 6,
-        // Highest enhancement layer.
-        kTemporalUpdateNone = 5,
-        // Second enhancement layer.
-        kTemporalUpdateAltref = 4,
-        // Second enhancement layer without dependency on previous frames in
-        // the second enhancement layer.
-        kTemporalUpdateAltrefWithoutDependency = 3,
-        // First enhancement layer.
-        kTemporalUpdateGolden = 2,
-        // First enhancement layer without dependency on previous frames in
-        // the first enhancement layer.
-        kTemporalUpdateGoldenWithoutDependency = 1,
-        // Base layer.
-        kTemporalUpdateLast = 0,
-    };
-    enum {
-        kMaxTemporalPattern = 8
-    };
-
-    // number of buffers allocated per port
-    static const uint32_t kNumBuffers = 4;
-
-    // OMX port indexes that refer to input and
-    // output ports respectively
-    static const uint32_t kInputPortIndex = 0;
-    static const uint32_t kOutputPortIndex = 1;
-
-    // Byte-alignment required for buffers
-    static const uint32_t kInputBufferAlignment = 1;
-    static const uint32_t kOutputBufferAlignment = 2;
-
-    // Number of supported input color formats
-    static const uint32_t kNumberOfSupportedColorFormats = 3;
-
-    // vpx specific opaque data structure that
-    // stores encoder state
-    vpx_codec_ctx_t* mCodecContext;
-
-    // vpx specific data structure that
-    // stores encoder configuration
-    vpx_codec_enc_cfg_t* mCodecConfiguration;
-
-    // vpx specific read-only data structure
-    // that specifies algorithm interface (e.g. vp8)
-    vpx_codec_iface_t* mCodecInterface;
-
-    // If a request for a change it bitrate has been received.
-    bool mBitrateUpdated;
-
-    // Bitrate control mode, either constant or variable
-    vpx_rc_mode mBitrateControlMode;
-
-    // Parameter that denotes whether error resilience
-    // is enabled in encoder
-    OMX_BOOL mErrorResilience;
-
-    // Key frame interval in frames
-    uint32_t mKeyFrameInterval;
-
-    // Minimum (best quality) quantizer
-    uint32_t mMinQuantizer;
-
-    // Maximum (worst quality) quantizer
-    uint32_t mMaxQuantizer;
-
-    // Number of coding temporal layers to be used.
-    size_t mTemporalLayers;
-
-    // Temporal layer bitrare ratio in percentage
-    uint32_t mTemporalLayerBitrateRatio[OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS];
-
-    // Temporal pattern type
-    OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE mTemporalPatternType;
-
-    // Temporal pattern length
-    size_t mTemporalPatternLength;
-
-    // Temporal pattern current index
-    size_t mTemporalPatternIdx;
-
-    // Frame type temporal pattern
-    TemporalReferences mTemporalPattern[kMaxTemporalPattern];
-
-    // Last input buffer timestamp
-    OMX_TICKS mLastTimestamp;
-
-    // Conversion buffer is needed to convert semi
-    // planar yuv420 to planar format
-    // It is only allocated if input format is
-    // indeed YUV420SemiPlanar.
-    uint8_t* mConversionBuffer;
-
-    bool mKeyFrameRequested;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_VPX_ENCODER_H_
diff --git a/media/libstagefright/codecs/on2/enc/exports.lds b/media/libstagefright/codecs/on2/enc/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/on2/enc/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/opus/dec/Android.bp b/media/libstagefright/codecs/opus/dec/Android.bp
deleted file mode 100644
index 3d8af69..0000000
--- a/media/libstagefright/codecs/opus/dec/Android.bp
+++ /dev/null
@@ -1,29 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_opusdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftOpus.cpp"],
-
-    shared_libs: [
-        "libopus",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
deleted file mode 100644
index dcd8dda..0000000
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ /dev/null
@@ -1,674 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftOpus"
-#include <utils/Log.h>
-
-#include "SoftOpus.h"
-#include <OMX_AudioExt.h>
-#include <OMX_IndexExt.h>
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-extern "C" {
-    #include <opus.h>
-    #include <opus_multistream.h>
-}
-
-namespace android {
-
-static const int kRate = 48000;
-
-// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
-// mappings for up to 8 channels. This information is part of the Vorbis I
-// Specification:
-// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
-static const int kMaxChannels = 8;
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftOpus::SoftOpus(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mInputBufferCount(0),
-      mDecoder(NULL),
-      mHeader(NULL),
-      mNumChannels(1),
-      mSamplingRate(kRate),
-      mCodecDelay(0),
-      mSeekPreRoll(0),
-      mAnchorTimeUs(0),
-      mNumFramesOutput(0),
-      mHaveEOS(false),
-      mOutputPortSettingsChange(NONE) {
-    initPorts();
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftOpus::~SoftOpus() {
-    if (mDecoder != NULL) {
-        opus_multistream_decoder_destroy(mDecoder);
-        mDecoder = NULL;
-    }
-    if (mHeader != NULL) {
-        delete mHeader;
-        mHeader = NULL;
-    }
-}
-
-void SoftOpus::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 960 * 6;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType =
-        const_cast<char *>(MEDIA_MIMETYPE_AUDIO_OPUS);
-
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding =
-        (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t) * kMaxChannels;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-status_t SoftOpus::initDecoder() {
-    return OK;
-}
-
-OMX_ERRORTYPE SoftOpus::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch ((int)index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS :
-                       OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAndroidOpus:
-        {
-            OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
-                (OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
-
-            if (!isValidOMXParam(opusParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (opusParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            opusParams->nAudioBandWidth = 0;
-            opusParams->nSampleRate = mSamplingRate;
-            opusParams->nBitRate = 0;
-
-            if (!isConfigured()) {
-                opusParams->nChannels = mNumChannels;
-            } else {
-                opusParams->nChannels = mHeader->channels;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-            pcmParams->nSamplingRate = kRate;
-
-            if (!isConfigured()) {
-                pcmParams->nChannels = 1;
-            } else {
-                pcmParams->nChannels = mHeader->channels;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftOpus::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch ((int)index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.opus",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding !=
-                           (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAndroidOpus:
-        {
-            const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
-                (const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
-
-            if (!isValidOMXParam(opusParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (opusParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-            mNumChannels = opusParams->nChannels;
-            mSamplingRate = opusParams->nSampleRate;
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-bool SoftOpus::isConfigured() const {
-    return mInputBufferCount >= 1;
-}
-
-static uint16_t ReadLE16(const uint8_t *data, size_t data_size,
-                         uint32_t read_offset) {
-    if (read_offset + 1 > data_size)
-        return 0;
-    uint16_t val;
-    val = data[read_offset];
-    val |= data[read_offset + 1] << 8;
-    return val;
-}
-
-// Maximum packet size used in Xiph's opusdec.
-static const int kMaxOpusOutputPacketSizeSamples = 960 * 6;
-
-// Default audio output channel layout. Used to initialize |stream_map| in
-// OpusHeader, and passed to opus_multistream_decoder_create() when the header
-// does not contain mapping information. The values are valid only for mono and
-// stereo output: Opus streams with more than 2 channels require a stream map.
-static const int kMaxChannelsWithDefaultLayout = 2;
-static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 };
-
-// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header
-static bool ParseOpusHeader(const uint8_t *data, size_t data_size,
-                            OpusHeader* header) {
-    // Size of the Opus header excluding optional mapping information.
-    const size_t kOpusHeaderSize = 19;
-
-    // Offset to the channel count byte in the Opus header.
-    const size_t kOpusHeaderChannelsOffset = 9;
-
-    // Offset to the pre-skip value in the Opus header.
-    const size_t kOpusHeaderSkipSamplesOffset = 10;
-
-    // Offset to the gain value in the Opus header.
-    const size_t kOpusHeaderGainOffset = 16;
-
-    // Offset to the channel mapping byte in the Opus header.
-    const size_t kOpusHeaderChannelMappingOffset = 18;
-
-    // Opus Header contains a stream map. The mapping values are in the header
-    // beyond the always present |kOpusHeaderSize| bytes of data. The mapping
-    // data contains stream count, coupling information, and per channel mapping
-    // values:
-    //   - Byte 0: Number of streams.
-    //   - Byte 1: Number coupled.
-    //   - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping
-    //             values.
-    const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize;
-    const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1;
-    const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2;
-
-    if (data_size < kOpusHeaderSize) {
-        ALOGV("Header size is too small.");
-        return false;
-    }
-    header->channels = *(data + kOpusHeaderChannelsOffset);
-
-    if (header->channels <= 0 || header->channels > kMaxChannels) {
-        ALOGV("Invalid Header, wrong channel count: %d", header->channels);
-        return false;
-    }
-    header->skip_samples = ReadLE16(data, data_size,
-                                        kOpusHeaderSkipSamplesOffset);
-    header->gain_db = static_cast<int16_t>(
-                              ReadLE16(data, data_size,
-                                       kOpusHeaderGainOffset));
-    header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset);
-    if (!header->channel_mapping) {
-        if (header->channels > kMaxChannelsWithDefaultLayout) {
-            ALOGV("Invalid Header, missing stream map.");
-            return false;
-        }
-        header->num_streams = 1;
-        header->num_coupled = header->channels > 1;
-        header->stream_map[0] = 0;
-        header->stream_map[1] = 1;
-        return true;
-    }
-    if (data_size < kOpusHeaderStreamMapOffset + header->channels) {
-        ALOGV("Invalid stream map; insufficient data for current channel "
-              "count: %d", header->channels);
-        return false;
-    }
-    header->num_streams = *(data + kOpusHeaderNumStreamsOffset);
-    header->num_coupled = *(data + kOpusHeaderNumCoupledOffset);
-    if (header->num_streams + header->num_coupled != header->channels) {
-        ALOGV("Inconsistent channel mapping.");
-        return false;
-    }
-    for (int i = 0; i < header->channels; ++i)
-      header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i);
-    return true;
-}
-
-// Convert nanoseconds to number of samples.
-static uint64_t ns_to_samples(uint64_t ns, int kRate) {
-    return static_cast<double>(ns) * kRate / 1000000000;
-}
-
-void SoftOpus::handleEOS() {
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-    CHECK(!inQueue.empty() && !outQueue.empty());
-
-    BufferInfo *outInfo = *outQueue.begin();
-    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-    outHeader->nFilledLen = 0;
-    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-    mHaveEOS = true;
-
-    outQueue.erase(outQueue.begin());
-    outInfo->mOwnedByUs = false;
-    notifyFillBufferDone(outHeader);
-
-    BufferInfo *inInfo = *inQueue.begin();
-    OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-    inQueue.erase(inQueue.begin());
-    inInfo->mOwnedByUs = false;
-    notifyEmptyBufferDone(inHeader);
-
-    ++mInputBufferCount;
-}
-
-void SoftOpus::onQueueFilled(OMX_U32 /* portIndex */) {
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    if (mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    while (!mHaveEOS && !inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-        if (mInputBufferCount < 3) {
-            const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
-            size_t size = inHeader->nFilledLen;
-
-            if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && size == 0) {
-                handleEOS();
-                return;
-            }
-
-            if (size < sizeof(int64_t)) {
-                // The 2nd and 3rd input buffer are expected to contain
-                //  an int64_t (see below), so make sure we get at least
-                //  that much. The first input buffer must contain 19 bytes,
-                //  but that is checked already.
-                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                return;
-            }
-
-            if (mInputBufferCount == 0) {
-                delete mHeader;
-                mHeader = new OpusHeader();
-                memset(mHeader, 0, sizeof(*mHeader));
-                if (!ParseOpusHeader(data, size, mHeader)) {
-                    ALOGV("Parsing Opus Header failed.");
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-
-                uint8_t channel_mapping[kMaxChannels] = {0};
-                if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
-                    memcpy(&channel_mapping,
-                           kDefaultOpusChannelLayout,
-                           kMaxChannelsWithDefaultLayout);
-                } else {
-                    memcpy(&channel_mapping,
-                           mHeader->stream_map,
-                           mHeader->channels);
-                }
-
-                int status = OPUS_INVALID_STATE;
-                if (mDecoder != NULL) {
-                    opus_multistream_decoder_destroy(mDecoder);
-                }
-                mDecoder = opus_multistream_decoder_create(kRate,
-                                                           mHeader->channels,
-                                                           mHeader->num_streams,
-                                                           mHeader->num_coupled,
-                                                           channel_mapping,
-                                                           &status);
-                if (!mDecoder || status != OPUS_OK) {
-                    ALOGV("opus_multistream_decoder_create failed status=%s",
-                          opus_strerror(status));
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-                status =
-                    opus_multistream_decoder_ctl(mDecoder,
-                                                 OPUS_SET_GAIN(mHeader->gain_db));
-                if (status != OPUS_OK) {
-                    ALOGV("Failed to set OPUS header gain; status=%s",
-                          opus_strerror(status));
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-            } else if (mInputBufferCount == 1) {
-                mCodecDelay = ns_to_samples(
-                                  *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
-                                                               inHeader->nOffset)),
-                                  kRate);
-                mSamplesToDiscard = mCodecDelay;
-            } else {
-                mSeekPreRoll = ns_to_samples(
-                                   *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
-                                                                inHeader->nOffset)),
-                                   kRate);
-                mSamplingRate = kRate;
-                mNumChannels = mHeader->channels;
-                notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                mOutputPortSettingsChange = AWAITING_DISABLED;
-            }
-
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                handleEOS();
-                return;
-            }
-
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-            ++mInputBufferCount;
-
-            continue;
-        }
-
-        // Ignore CSD re-submissions.
-        if (mInputBufferCount >= 3 && (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                handleEOS();
-                return;
-            }
-
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-            continue;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
-            handleEOS();
-            return;
-        }
-
-        if (inHeader->nOffset == 0) {
-            mAnchorTimeUs = inHeader->nTimeStamp;
-            mNumFramesOutput = 0;
-        }
-
-        // When seeking to zero, |mCodecDelay| samples has to be discarded
-        // instead of |mSeekPreRoll| samples (as we would when seeking to any
-        // other timestamp).
-        if (inHeader->nTimeStamp == 0) {
-            mSamplesToDiscard = mCodecDelay;
-        }
-
-        const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
-        const uint32_t size = inHeader->nFilledLen;
-        size_t frameSize = kMaxOpusOutputPacketSizeSamples;
-        if (frameSize > outHeader->nAllocLen / sizeof(int16_t) / mHeader->channels) {
-            frameSize = outHeader->nAllocLen / sizeof(int16_t) / mHeader->channels;
-            android_errorWriteLog(0x534e4554, "27833616");
-        }
-
-        int numFrames = opus_multistream_decode(mDecoder,
-                                                data,
-                                                size,
-                                                (int16_t *)outHeader->pBuffer,
-                                                frameSize,
-                                                0);
-        if (numFrames < 0) {
-            ALOGE("opus_multistream_decode returned %d", numFrames);
-            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-            return;
-        }
-
-        outHeader->nOffset = 0;
-        if (mSamplesToDiscard > 0) {
-            if (mSamplesToDiscard > numFrames) {
-                mSamplesToDiscard -= numFrames;
-                numFrames = 0;
-            } else {
-                numFrames -= mSamplesToDiscard;
-                outHeader->nOffset = mSamplesToDiscard * sizeof(int16_t) *
-                                     mHeader->channels;
-                mSamplesToDiscard = 0;
-            }
-        }
-
-        outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels;
-
-        outHeader->nTimeStamp = mAnchorTimeUs +
-                                (mNumFramesOutput * 1000000LL) /
-                                kRate;
-
-        mNumFramesOutput += numFrames;
-
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-            mHaveEOS = true;
-        } else {
-            outHeader->nFlags = 0;
-        }
-
-        inInfo->mOwnedByUs = false;
-        inQueue.erase(inQueue.begin());
-        notifyEmptyBufferDone(inHeader);
-        ++mInputBufferCount;
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        notifyFillBufferDone(outHeader);
-    }
-}
-
-void SoftOpus::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0 && mDecoder != NULL) {
-        // Make sure that the next buffer output does not still
-        // depend on fragments from the last one decoded.
-        mNumFramesOutput = 0;
-        opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
-        mAnchorTimeUs = 0;
-        mSamplesToDiscard = mSeekPreRoll;
-        mHaveEOS = false;
-    }
-}
-
-void SoftOpus::onReset() {
-    mInputBufferCount = 0;
-    mNumFramesOutput = 0;
-    if (mDecoder != NULL) {
-        opus_multistream_decoder_destroy(mDecoder);
-        mDecoder = NULL;
-    }
-    if (mHeader != NULL) {
-        delete mHeader;
-        mHeader = NULL;
-    }
-
-    mOutputPortSettingsChange = NONE;
-    mHaveEOS = false;
-}
-
-void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED:
-        {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            break;
-        }
-
-        default:
-        {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftOpus(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
deleted file mode 100644
index 00058c8..0000000
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * The Opus specification is part of IETF RFC 6716:
- * http://tools.ietf.org/html/rfc6716
- */
-
-#ifndef SOFT_OPUS_H_
-
-#define SOFT_OPUS_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-struct OpusMSDecoder;
-
-namespace android {
-
-struct OpusHeader {
-  int channels;
-  int skip_samples;
-  int channel_mapping;
-  int num_streams;
-  int num_coupled;
-  int16_t gain_db;
-  uint8_t stream_map[8];
-};
-
-struct SoftOpus : public SimpleSoftOMXComponent {
-    SoftOpus(const char *name,
-             const OMX_CALLBACKTYPE *callbacks,
-             OMX_PTR appData,
-             OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftOpus();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers = 4,
-        kMaxNumSamplesPerBuffer = 960 * 6
-    };
-
-    size_t mInputBufferCount;
-
-    OpusMSDecoder *mDecoder;
-    OpusHeader *mHeader;
-
-    int32_t mNumChannels;
-    int32_t mSamplingRate;
-    int64_t mCodecDelay;
-    int64_t mSeekPreRoll;
-    int64_t mSamplesToDiscard;
-    int64_t mAnchorTimeUs;
-    int64_t mNumFramesOutput;
-    bool mHaveEOS;
-
-    enum {
-        NONE,
-        AWAITING_DISABLED,
-        AWAITING_ENABLED
-    } mOutputPortSettingsChange;
-
-    void initPorts();
-    status_t initDecoder();
-    bool isConfigured() const;
-    void handleEOS();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftOpus);
-};
-
-}  // namespace android
-
-#endif  // SOFT_OPUS_H_
diff --git a/media/libstagefright/codecs/opus/dec/exports.lds b/media/libstagefright/codecs/opus/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/opus/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/raw/Android.bp b/media/libstagefright/codecs/raw/Android.bp
deleted file mode 100644
index 3673786..0000000
--- a/media/libstagefright/codecs/raw/Android.bp
+++ /dev/null
@@ -1,35 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_raw_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_raw_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_rawdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftRaw.cpp"],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/raw/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/raw/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/raw/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/raw/NOTICE b/media/libstagefright/codecs/raw/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/raw/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/raw/SoftRaw.cpp b/media/libstagefright/codecs/raw/SoftRaw.cpp
deleted file mode 100644
index 82dd171..0000000
--- a/media/libstagefright/codecs/raw/SoftRaw.cpp
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftRaw"
-#include <utils/Log.h>
-
-#include "SoftRaw.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftRaw::SoftRaw(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mSignalledError(false),
-      mChannelCount(2),
-      mSampleRate(44100),
-      mNumericalData(OMX_NumericalDataSigned),
-      mBitsPerSample(16) {
-    initPorts();
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftRaw::~SoftRaw() {
-}
-
-void SoftRaw::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 192 * 1024;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 192 * 1024;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-status_t SoftRaw::initDecoder() {
-    return OK;
-}
-
-OMX_ERRORTYPE SoftRaw::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding = OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0 && pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = (OMX_NUMERICALDATATYPE)mNumericalData;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = mBitsPerSample;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            pcmParams->nChannels = mChannelCount;
-            pcmParams->nSamplingRate = mSampleRate;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftRaw::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.raw",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->eEncoding != OMX_AUDIO_CodingPCM) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            mChannelCount = pcmParams->nChannels;
-            mSampleRate = pcmParams->nSamplingRate;
-            mNumericalData = pcmParams->eNumData;
-            mBitsPerSample = pcmParams->nBitPerSample;
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-        {
-            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(
-                    index, params);
-            // In case inPort->mDef.nBufferSize changed, the output buffer size
-            // should match the input buffer size.
-            PortInfo *inPort = editPortInfo(0);
-            PortInfo *outPort = editPortInfo(1);
-            outPort->mDef.nBufferSize = inPort->mDef.nBufferSize;
-            return err;
-        }
-    }
-}
-
-void SoftRaw::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError) {
-        return;
-    }
-
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        CHECK_GE(outHeader->nAllocLen, inHeader->nFilledLen);
-        memcpy(outHeader->pBuffer,
-               inHeader->pBuffer + inHeader->nOffset,
-               inHeader->nFilledLen);
-
-        outHeader->nFlags = inHeader->nFlags;
-        outHeader->nOffset = 0;
-        outHeader->nFilledLen = inHeader->nFilledLen;
-        outHeader->nTimeStamp = inHeader->nTimeStamp;
-
-        bool sawEOS = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
-
-        inQueue.erase(inQueue.begin());
-        inInfo->mOwnedByUs = false;
-        notifyEmptyBufferDone(inHeader);
-
-        outQueue.erase(outQueue.begin());
-        outInfo->mOwnedByUs = false;
-        notifyFillBufferDone(outHeader);
-
-        if (sawEOS) {
-            break;
-        }
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftRaw(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/raw/SoftRaw.h b/media/libstagefright/codecs/raw/SoftRaw.h
deleted file mode 100644
index ebc2741..0000000
--- a/media/libstagefright/codecs/raw/SoftRaw.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_RAW_H_
-
-#define SOFT_RAW_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-struct tPVMP4AudioDecoderExternal;
-
-namespace android {
-
-struct SoftRaw : public SimpleSoftOMXComponent {
-    SoftRaw(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftRaw();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-private:
-    enum {
-        kNumBuffers = 4
-    };
-
-    bool mSignalledError;
-
-    int32_t mChannelCount;
-    int32_t mSampleRate;
-    int32_t mNumericalData;
-    int32_t mBitsPerSample;
-
-    void initPorts();
-    status_t initDecoder();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftRaw);
-};
-
-}  // namespace android
-
-#endif  // SOFT_RAW_H_
diff --git a/media/libstagefright/codecs/raw/exports.lds b/media/libstagefright/codecs/raw/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/raw/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.bp b/media/libstagefright/codecs/vorbis/dec/Android.bp
deleted file mode 100644
index 7764294..0000000
--- a/media/libstagefright/codecs/vorbis/dec/Android.bp
+++ /dev/null
@@ -1,38 +0,0 @@
-package {
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_codecs_vorbis_dec_license",
-    ],
-}
-
-// Added automatically by a large-scale-change
-// See: http://go/android-license-faq
-license {
-    name: "frameworks_av_media_libstagefright_codecs_vorbis_dec_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_vorbisdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: ["SoftVorbis.cpp"],
-
-    shared_libs: [
-        "libvorbisidec",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-    },
-}
diff --git a/media/libstagefright/codecs/vorbis/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/vorbis/dec/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/vorbis/dec/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/vorbis/dec/NOTICE b/media/libstagefright/codecs/vorbis/dec/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/vorbis/dec/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
deleted file mode 100644
index 3daed10..0000000
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ /dev/null
@@ -1,644 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftVorbis"
-#include <utils/Log.h>
-
-#include "SoftVorbis.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-
-static int kDefaultChannelCount = 1;
-static int kDefaultSamplingRate = 48000;
-
-extern "C" {
-    #include <Tremolo/codec_internal.h>
-
-    int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb);
-    int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb);
-    int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb);
-}
-
-namespace android {
-
-template<class T>
-static void InitOMXParams(T *params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-SoftVorbis::SoftVorbis(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mInputBufferCount(0),
-      mState(NULL),
-      mVi(NULL),
-      mAnchorTimeUs(0),
-      mNumFramesOutput(0),
-      mNumFramesLeftOnPage(-1),
-      mSawInputEos(false),
-      mSignalledOutputEos(false),
-      mSignalledError(false),
-      mOutputPortSettingsChange(NONE) {
-    initPorts();
-    CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftVorbis::~SoftVorbis() {
-    if (mState != NULL) {
-        vorbis_dsp_clear(mState);
-        delete mState;
-        mState = NULL;
-    }
-
-    if (mVi != NULL) {
-        vorbis_info_clear(mVi);
-        delete mVi;
-        mVi = NULL;
-    }
-}
-
-void SoftVorbis::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType =
-        const_cast<char *>(MEDIA_MIMETYPE_AUDIO_VORBIS);
-
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingVORBIS;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t);
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-status_t SoftVorbis::initDecoder() {
-    return OK;
-}
-
-OMX_ERRORTYPE SoftVorbis::internalGetParameter(
-        OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamAudioPortFormat:
-        {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0)
-                    ? OMX_AUDIO_CodingVORBIS : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioVorbis:
-        {
-            OMX_AUDIO_PARAM_VORBISTYPE *vorbisParams =
-                (OMX_AUDIO_PARAM_VORBISTYPE *)params;
-
-            if (!isValidOMXParam(vorbisParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (vorbisParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            vorbisParams->nBitRate = 0;
-            vorbisParams->nMinBitRate = 0;
-            vorbisParams->nMaxBitRate = 0;
-            vorbisParams->nAudioBandWidth = 0;
-            vorbisParams->nQuality = 3;
-            vorbisParams->bManaged = OMX_FALSE;
-            vorbisParams->bDownmix = OMX_FALSE;
-
-            if (!isConfigured()) {
-                vorbisParams->nChannels = kDefaultChannelCount;
-                vorbisParams->nSampleRate = kDefaultSamplingRate;
-            } else {
-                vorbisParams->nChannels = mVi->channels;
-                vorbisParams->nSampleRate = mVi->rate;
-                vorbisParams->nBitRate = mVi->bitrate_nominal;
-                vorbisParams->nMinBitRate = mVi->bitrate_lower;
-                vorbisParams->nMaxBitRate = mVi->bitrate_upper;
-            }
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm:
-        {
-            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
-                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-
-            if (!isConfigured()) {
-                pcmParams->nChannels = kDefaultChannelCount;
-                pcmParams->nSamplingRate = kDefaultSamplingRate;
-            } else {
-                pcmParams->nChannels = mVi->channels;
-                pcmParams->nSamplingRate = mVi->rate;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftVorbis::internalSetParameter(
-        OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch (index) {
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "audio_decoder.vorbis",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat:
-        {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0
-                        && formatParams->eEncoding != OMX_AUDIO_CodingVORBIS)
-                || (formatParams->nPortIndex == 1
-                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioVorbis:
-        {
-            const OMX_AUDIO_PARAM_VORBISTYPE *vorbisParams =
-                (const OMX_AUDIO_PARAM_VORBISTYPE *)params;
-
-            if (!isValidOMXParam(vorbisParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (vorbisParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-bool SoftVorbis::isConfigured() const {
-    return (mState != NULL && mVi != NULL);
-}
-
-static void makeBitReader(
-        const void *data, size_t size,
-        ogg_buffer *buf, ogg_reference *ref, oggpack_buffer *bits) {
-    buf->data = (uint8_t *)data;
-    buf->size = size;
-    buf->refcount = 1;
-    buf->ptr.owner = NULL;
-
-    ref->buffer = buf;
-    ref->begin = 0;
-    ref->length = size;
-    ref->next = NULL;
-
-    oggpack_readinit(bits, ref);
-}
-
-void SoftVorbis::handleEOS() {
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    CHECK(!inQueue.empty() && !outQueue.empty());
-
-    mSawInputEos = true;
-
-    BufferInfo *outInfo = *outQueue.begin();
-    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-    outHeader->nFilledLen = 0;
-    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-    outQueue.erase(outQueue.begin());
-    outInfo->mOwnedByUs = false;
-    notifyFillBufferDone(outHeader);
-    mSignalledOutputEos = true;
-
-    BufferInfo *inInfo = *inQueue.begin();
-    OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-    inQueue.erase(inQueue.begin());
-    inInfo->mOwnedByUs = false;
-    notifyEmptyBufferDone(inHeader);
-    ++mInputBufferCount;
-}
-
-void SoftVorbis::onQueueFilled(OMX_U32 /* portIndex */) {
-    List<BufferInfo *> &inQueue = getPortQueue(0);
-    List<BufferInfo *> &outQueue = getPortQueue(1);
-
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        return;
-    }
-
-    while (!mSignalledOutputEos && (!inQueue.empty() || mSawInputEos) && !outQueue.empty()) {
-        BufferInfo *inInfo = NULL;
-        OMX_BUFFERHEADERTYPE *inHeader = NULL;
-        if (!inQueue.empty()) {
-            inInfo = *inQueue.begin();
-            inHeader = inInfo->mHeader;
-        }
-
-        BufferInfo *outInfo = *outQueue.begin();
-        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
-        int32_t numPageSamples = 0;
-
-        if (inHeader) {
-            // Assume the very first 2 buffers are always codec config (in this case mState is NULL)
-            // After flush, handle CSD
-            if (mInputBufferCount < 2 &&
-                    (mState == NULL || (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG))) {
-                const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
-                size_t size = inHeader->nFilledLen;
-
-                if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && size == 0) {
-                    handleEOS();
-                    return;
-                }
-
-                if (size < 7) {
-                    ALOGE("Too small input buffer: %zu bytes", size);
-                    android_errorWriteLog(0x534e4554, "27833616");
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    mSignalledError = true;
-                    return;
-                }
-
-                ogg_buffer buf;
-                ogg_reference ref;
-                oggpack_buffer bits;
-
-                makeBitReader((const uint8_t *)data + 7, size - 7, &buf, &ref, &bits);
-
-                // Assume very first frame is identification header - or reset identification
-                // header after flush, but allow only specifying setup header after flush if
-                // identification header was already set up.
-                if (mInputBufferCount == 0 &&
-                        (mVi == NULL || data[0] == 1 /* identification header */)) {
-                    // remove any prior state
-                    if (mVi != NULL) {
-                        // also clear mState as it may refer to the old mVi
-                        if (mState != NULL) {
-                            vorbis_dsp_clear(mState);
-                            delete mState;
-                            mState = NULL;
-                        }
-                        vorbis_info_clear(mVi);
-                        delete mVi;
-                        mVi = NULL;
-                    }
-
-                    CHECK(mVi == NULL);
-                    mVi = new vorbis_info;
-                    vorbis_info_init(mVi);
-
-                    int ret = _vorbis_unpack_info(mVi, &bits);
-                    if (ret != 0) {
-                        notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
-                        mSignalledError = true;
-                        return;
-                    }
-                } else {
-                    // remove any prior state
-                    if (mState != NULL) {
-                        vorbis_dsp_clear(mState);
-                        delete mState;
-                        mState = NULL;
-                    }
-
-                    int ret = _vorbis_unpack_books(mVi, &bits);
-                    if (ret != 0 || mState != NULL) {
-                        notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
-                        mSignalledError = true;
-                        return;
-                    }
-
-                    CHECK(mState == NULL);
-                    mState = new vorbis_dsp_state;
-                    CHECK_EQ(0, vorbis_dsp_init(mState, mVi));
-
-                    if (mVi->rate != kDefaultSamplingRate ||
-                            mVi->channels != kDefaultChannelCount) {
-                        ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels);
-                        notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                        mOutputPortSettingsChange = AWAITING_DISABLED;
-                    }
-                    mInputBufferCount = 1;
-                }
-
-                if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                    handleEOS();
-                    return;
-                }
-
-                inQueue.erase(inQueue.begin());
-                inInfo->mOwnedByUs = false;
-                notifyEmptyBufferDone(inHeader);
-                ++mInputBufferCount;
-
-                continue;
-            }
-
-            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-                mSawInputEos = true;
-            }
-
-            if (inHeader->nFilledLen || !mSawInputEos) {
-                if (inHeader->nFilledLen < sizeof(numPageSamples)) {
-                    notify(OMX_EventError, OMX_ErrorBadParameter, 0, NULL);
-                    mSignalledError = true;
-                    ALOGE("onQueueFilled, input header has nFilledLen %u, expected %zu",
-                            inHeader->nFilledLen, sizeof(numPageSamples));
-                    return;
-                }
-                memcpy(&numPageSamples,
-                       inHeader->pBuffer + inHeader->nOffset + inHeader->nFilledLen - 4,
-                       sizeof(numPageSamples));
-
-                if (inHeader->nOffset == 0) {
-                    mAnchorTimeUs = inHeader->nTimeStamp;
-                    mNumFramesOutput = 0;
-                }
-
-                inHeader->nFilledLen -= sizeof(numPageSamples);;
-            }
-        }
-
-        if (numPageSamples >= 0) {
-            mNumFramesLeftOnPage = numPageSamples;
-        }
-
-        ogg_buffer buf;
-        buf.data = inHeader ? inHeader->pBuffer + inHeader->nOffset : NULL;
-        buf.size = inHeader ? inHeader->nFilledLen : 0;
-        buf.refcount = 1;
-        buf.ptr.owner = NULL;
-
-        ogg_reference ref;
-        ref.buffer = &buf;
-        ref.begin = 0;
-        ref.length = buf.size;
-        ref.next = NULL;
-
-        ogg_packet pack;
-        pack.packet = &ref;
-        pack.bytes = ref.length;
-        pack.b_o_s = 0;
-        pack.e_o_s = 0;
-        pack.granulepos = 0;
-        pack.packetno = 0;
-
-        int numFrames = 0;
-
-        outHeader->nFlags = 0;
-
-        if (mState == nullptr || mVi == nullptr) {
-            notify(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL);
-            mSignalledError = true;
-            ALOGE("onQueueFilled, input does not have CSD");
-            return;
-        }
-
-        int err = vorbis_dsp_synthesis(mState, &pack, 1);
-        if (err != 0) {
-            // FIXME temporary workaround for log spam
-#if !defined(__arm__) && !defined(__aarch64__)
-            ALOGV("vorbis_dsp_synthesis returned %d", err);
-#else
-            ALOGW("vorbis_dsp_synthesis returned %d", err);
-#endif
-        } else {
-            size_t numSamplesPerBuffer = kMaxNumSamplesPerBuffer;
-            if (numSamplesPerBuffer > outHeader->nAllocLen / sizeof(int16_t)) {
-                numSamplesPerBuffer = outHeader->nAllocLen / sizeof(int16_t);
-                android_errorWriteLog(0x534e4554, "27833616");
-            }
-            numFrames = vorbis_dsp_pcmout(
-                    mState, (int16_t *)outHeader->pBuffer,
-                    (numSamplesPerBuffer / mVi->channels));
-
-            if (numFrames < 0) {
-                ALOGE("vorbis_dsp_pcmout returned %d", numFrames);
-                numFrames = 0;
-            }
-        }
-
-        if (mNumFramesLeftOnPage >= 0) {
-            if (numFrames > mNumFramesLeftOnPage) {
-                ALOGV("discarding %d frames at end of page",
-                     numFrames - mNumFramesLeftOnPage);
-                numFrames = mNumFramesLeftOnPage;
-                if (mSawInputEos) {
-                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                    mSignalledOutputEos = true;
-                }
-            }
-            mNumFramesLeftOnPage -= numFrames;
-        }
-
-        outHeader->nFilledLen = numFrames * sizeof(int16_t) * mVi->channels;
-        outHeader->nOffset = 0;
-
-        outHeader->nTimeStamp =
-            mAnchorTimeUs
-                + (mNumFramesOutput * 1000000LL) / mVi->rate;
-
-        mNumFramesOutput += numFrames;
-
-        if (inHeader) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            notifyEmptyBufferDone(inHeader);
-            ++mInputBufferCount;
-        }
-
-        outInfo->mOwnedByUs = false;
-        outQueue.erase(outQueue.begin());
-        notifyFillBufferDone(outHeader);
-    }
-}
-
-void SoftVorbis::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0) {
-        mInputBufferCount = 0;
-        mNumFramesOutput = 0;
-        mSawInputEos = false;
-        mSignalledOutputEos = false;
-        mNumFramesLeftOnPage = -1;
-        if (mState != NULL) {
-            // Make sure that the next buffer output does not still
-            // depend on fragments from the last one decoded.
-            vorbis_dsp_restart(mState);
-        }
-    }
-}
-
-void SoftVorbis::onReset() {
-    mInputBufferCount = 0;
-    mNumFramesOutput = 0;
-    if (mState != NULL) {
-        vorbis_dsp_clear(mState);
-        delete mState;
-        mState = NULL;
-    }
-
-    if (mVi != NULL) {
-        vorbis_info_clear(mVi);
-        delete mVi;
-        mVi = NULL;
-    }
-
-    mSawInputEos = false;
-    mSignalledOutputEos = false;
-    mSignalledError = false;
-    mNumFramesLeftOnPage = -1;
-    mOutputPortSettingsChange = NONE;
-}
-
-void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED:
-        {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            break;
-        }
-
-        default:
-        {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent *createSoftOMXComponent(
-        const char *name, const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftVorbis(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
deleted file mode 100644
index 5ff8ea4..0000000
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_VORBIS_H_
-
-#define SOFT_VORBIS_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-struct vorbis_dsp_state;
-struct vorbis_info;
-
-namespace android {
-
-struct SoftVorbis : public SimpleSoftOMXComponent {
-    SoftVorbis(const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftVorbis();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers = 4,
-        kMaxNumSamplesPerBuffer = 8192 * 2
-    };
-
-    size_t mInputBufferCount;
-
-    vorbis_dsp_state *mState;
-    vorbis_info *mVi;
-
-    int64_t mAnchorTimeUs;
-    int64_t mNumFramesOutput;
-    int32_t mNumFramesLeftOnPage;
-    bool mSawInputEos;
-    bool mSignalledOutputEos;
-    bool mSignalledError;
-
-    enum {
-        NONE,
-        AWAITING_DISABLED,
-        AWAITING_ENABLED
-    } mOutputPortSettingsChange;
-
-    void initPorts();
-    status_t initDecoder();
-    bool isConfigured() const;
-    void handleEOS();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftVorbis);
-};
-
-}  // namespace android
-
-#endif  // SOFT_VORBIS_H_
-
diff --git a/media/libstagefright/codecs/vorbis/dec/exports.lds b/media/libstagefright/codecs/vorbis/dec/exports.lds
deleted file mode 100644
index e24f3fa..0000000
--- a/media/libstagefright/codecs/vorbis/dec/exports.lds
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    global:
-        _Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPEPvPP17OMX_COMPONENTTYPE;
-    local: *;
-};
diff --git a/media/libstagefright/codecs/xaacdec/Android.bp b/media/libstagefright/codecs/xaacdec/Android.bp
deleted file mode 100644
index 1d03c16..0000000
--- a/media/libstagefright/codecs/xaacdec/Android.bp
+++ /dev/null
@@ -1,36 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_shared {
-    name: "libstagefright_soft_xaacdec",
-    defaults: ["libstagefright_softomx-defaults"],
-
-    srcs: [
-        "SoftXAAC.cpp",
-    ],
-
-    cflags: [
-        "-DENABLE_MPEG_D_DRC"
-    ],
-
-    sanitize: {
-        // integer_overflow: true,
-        misc_undefined: [ "signed-integer-overflow", "unsigned-integer-overflow", ],
-        cfi: true,
-        config: {
-            cfi_assembly_support: true,
-        },
-    },
-
-    static_libs: ["libxaacdec"],
-
-    shared_libs: [
-        "libcutils",
-    ],
-}
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
deleted file mode 100644
index a478642..0000000
--- a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
+++ /dev/null
@@ -1,1702 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftXAAC"
-#include <utils/Log.h>
-
-#include "SoftXAAC.h"
-
-#include <OMX_AudioExt.h>
-#include <OMX_IndexExt.h>
-#include <cutils/properties.h>
-#include <math.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <utils/misc.h>
-
-/* 64*-0.25dB = -16 dB below full scale for mobile conf */
-#define DRC_DEFAULT_MOBILE_REF_LEVEL 64
-/* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_CUT 127
-/* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_BOOST 127
-/* switch for heavy compression for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1
-/* encoder target level; -1 => the value is unknown,
- * otherwise dB step value (e.g. 64 for -16 dB) */
-#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1)
-
-/* Default Effect type is "Limited playback" */
-#define DRC_KEY_AAC_DRC_EFFECT_TYPE (3)
-
-/* REF_LEVEL of 64 pairs well with EFFECT_TYPE of 3. */
-/* Default loudness value for MPEG-D DRC */
-#define DRC_DEFAULT_MOBILE_LOUDNESS_LEVEL (64)
-
-#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
-#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
-#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
-#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
-#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
-#define PROP_DRC_OVERRIDE_EFFECT_TYPE "ro.aac_drc_effect_type"
-
-/* maximum number of audio channels that can be decoded */
-#define MAX_CHANNEL_COUNT 8
-
-#define RETURN_IF_FATAL(retval, str)                       \
-    if (retval & IA_FATAL_ERROR) {                         \
-        ALOGE("Error in %s: Returned: %d", str, retval);   \
-        return retval;                                     \
-    } else if (retval != IA_NO_ERROR) {                    \
-        ALOGW("Warning in %s: Returned: %d", str, retval); \
-    }
-
-namespace android {
-
-template <class T>
-static void InitOMXParams(T* params) {
-    params->nSize = sizeof(T);
-    params->nVersion.s.nVersionMajor = 1;
-    params->nVersion.s.nVersionMinor = 0;
-    params->nVersion.s.nRevision = 0;
-    params->nVersion.s.nStep = 0;
-}
-
-static const OMX_U32 kSupportedProfiles[] = {
-    OMX_AUDIO_AACObjectLC, OMX_AUDIO_AACObjectHE,  OMX_AUDIO_AACObjectHE_PS,
-    OMX_AUDIO_AACObjectLD, OMX_AUDIO_AACObjectELD, OMX_AUDIO_AACObjectXHE
-};
-
-SoftXAAC::SoftXAAC(const char* name, const OMX_CALLBACKTYPE* callbacks, OMX_PTR appData,
-                   OMX_COMPONENTTYPE** component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mIsADTS(false),
-      mInputBufferCount(0),
-      mOutputBufferCount(0),
-      mSignalledError(false),
-      mLastInHeader(NULL),
-      mPrevTimestamp(0),
-      mCurrentTimestamp(0),
-      mOutputPortSettingsChange(NONE),
-      mXheaacCodecHandle(NULL),
-      mMpegDDrcHandle(NULL),
-      mInputBufferSize(0),
-      mOutputFrameLength(1024),
-      mInputBuffer(NULL),
-      mOutputBuffer(NULL),
-      mSampFreq(0),
-      mNumChannels(0),
-      mPcmWdSz(0),
-      mChannelMask(0),
-      mIsCodecInitialized(false),
-      mIsCodecConfigFlushRequired(false),
-      mMpegDDRCPresent(0),
-      mDRCFlag(0)
-
-{
-    initPorts();
-    mMemoryVec.clear();
-    mDrcMemoryVec.clear();
-
-    CHECK_EQ(initDecoder(), IA_NO_ERROR);
-}
-
-SoftXAAC::~SoftXAAC() {
-    IA_ERRORCODE err_code = deInitXAACDecoder();
-    if (IA_NO_ERROR != err_code) {
-        ALOGE("deInitXAACDecoder() failed %d", err_code);
-    }
-
-    err_code = deInitMPEGDDDrc();
-    if (IA_NO_ERROR != err_code) {
-        ALOGE("deInitMPEGDDDrc() failed %d", err_code);
-    }
-    mIsCodecInitialized = false;
-    mIsCodecConfigFlushRequired = false;
-}
-
-void SoftXAAC::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumInputBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 8192;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.audio.cMIMEType = const_cast<char*>("audio/aac");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumOutputBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = 4096 * MAX_CHANNEL_COUNT;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainAudio;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.audio.cMIMEType = const_cast<char*>("audio/raw");
-    def.format.audio.pNativeRender = NULL;
-    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
-    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
-
-    addPort(def);
-}
-
-IA_ERRORCODE SoftXAAC::initDecoder() {
-    int ui_drc_val;
-    IA_ERRORCODE err_code = IA_NO_ERROR;
-    int loop = 0;
-
-    err_code = initXAACDecoder();
-    if (err_code != IA_NO_ERROR) {
-        ALOGE("initXAACDecoder failed with error %d", err_code);
-        deInitXAACDecoder();
-        return err_code;
-    }
-
-    mEndOfInput = false;
-    mEndOfOutput = false;
-
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
-        ui_drc_val = atoi(value);
-        ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d",
-              ui_drc_val, DRC_DEFAULT_MOBILE_REF_LEVEL);
-    } else {
-        ui_drc_val = DRC_DEFAULT_MOBILE_REF_LEVEL;
-    }
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL, &ui_drc_val);
-
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL");
-#ifdef ENABLE_MPEG_D_DRC
-    /* Use ui_drc_val from PROP_DRC_OVERRIDE_REF_LEVEL or DRC_DEFAULT_MOBILE_REF_LEVEL
-     * for IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS too */
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS, &ui_drc_val);
-
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS");
-#endif
-
-    if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
-        ui_drc_val = atoi(value);
-        ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", ui_drc_val,
-              DRC_DEFAULT_MOBILE_DRC_CUT);
-    } else {
-        ui_drc_val = DRC_DEFAULT_MOBILE_DRC_CUT;
-    }
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT, &ui_drc_val);
-
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT");
-
-    if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
-        ui_drc_val = atoi(value);
-        ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", ui_drc_val,
-              DRC_DEFAULT_MOBILE_DRC_BOOST);
-    } else {
-        ui_drc_val = DRC_DEFAULT_MOBILE_DRC_BOOST;
-    }
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST, &ui_drc_val);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST");
-
-    if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
-        ui_drc_val = atoi(value);
-        ALOGV("AAC decoder using desired Heavy compression factor of %d instead of %d", ui_drc_val,
-              DRC_DEFAULT_MOBILE_DRC_HEAVY);
-    } else {
-        ui_drc_val = DRC_DEFAULT_MOBILE_DRC_HEAVY;
-    }
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP, &ui_drc_val);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP");
-
-#ifdef ENABLE_MPEG_D_DRC
-    if (property_get(PROP_DRC_OVERRIDE_EFFECT_TYPE, value, NULL)) {
-        ui_drc_val = atoi(value);
-        ALOGV("AAC decoder using desired DRC effect type of %d instead of %d", ui_drc_val,
-              DRC_KEY_AAC_DRC_EFFECT_TYPE);
-    } else {
-        ui_drc_val = DRC_KEY_AAC_DRC_EFFECT_TYPE;
-    }
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE, &ui_drc_val);
-
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE");
-
-#endif
-    return IA_NO_ERROR;
-}
-
-OMX_ERRORTYPE SoftXAAC::internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params) {
-    switch ((OMX_U32)index) {
-        case OMX_IndexParamAudioPortFormat: {
-            OMX_AUDIO_PARAM_PORTFORMATTYPE* formatParams = (OMX_AUDIO_PARAM_PORTFORMATTYPE*)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            formatParams->eEncoding =
-                (formatParams->nPortIndex == 0) ? OMX_AUDIO_CodingAAC : OMX_AUDIO_CodingPCM;
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAac: {
-            OMX_AUDIO_PARAM_AACPROFILETYPE* aacParams = (OMX_AUDIO_PARAM_AACPROFILETYPE*)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            aacParams->nBitRate = 0;
-            aacParams->nAudioBandWidth = 0;
-            aacParams->nAACtools = 0;
-            aacParams->nAACERtools = 0;
-            aacParams->eAACProfile = OMX_AUDIO_AACObjectMain;
-
-            aacParams->eAACStreamFormat =
-                mIsADTS ? OMX_AUDIO_AACStreamFormatMP4ADTS : OMX_AUDIO_AACStreamFormatMP4FF;
-
-            aacParams->eChannelMode = OMX_AUDIO_ChannelModeStereo;
-
-            if (!isConfigured()) {
-                aacParams->nChannels = 1;
-                aacParams->nSampleRate = 44100;
-                aacParams->nFrameLength = 0;
-            } else {
-                aacParams->nChannels = mNumChannels;
-                aacParams->nSampleRate = mSampFreq;
-                aacParams->nFrameLength = mOutputFrameLength;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm: {
-            OMX_AUDIO_PARAM_PCMMODETYPE* pcmParams = (OMX_AUDIO_PARAM_PCMMODETYPE*)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            pcmParams->eNumData = OMX_NumericalDataSigned;
-            pcmParams->eEndian = OMX_EndianBig;
-            pcmParams->bInterleaved = OMX_TRUE;
-            pcmParams->nBitPerSample = 16;
-            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
-            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
-            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
-            pcmParams->eChannelMapping[2] = OMX_AUDIO_ChannelCF;
-            pcmParams->eChannelMapping[3] = OMX_AUDIO_ChannelLFE;
-            pcmParams->eChannelMapping[4] = OMX_AUDIO_ChannelLS;
-            pcmParams->eChannelMapping[5] = OMX_AUDIO_ChannelRS;
-
-            if (!isConfigured()) {
-                pcmParams->nChannels = 1;
-                pcmParams->nSamplingRate = 44100;
-            } else {
-                pcmParams->nChannels = mNumChannels;
-                pcmParams->nSamplingRate = mSampFreq;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioProfileQuerySupported: {
-            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE* profileParams =
-                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE*)params;
-
-            if (!isValidOMXParam(profileParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (profileParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
-                return OMX_ErrorNoMore;
-            }
-
-            profileParams->eProfile = kSupportedProfiles[profileParams->nProfileIndex];
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
-    }
-}
-
-OMX_ERRORTYPE SoftXAAC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
-    switch ((int)index) {
-        case OMX_IndexParamStandardComponentRole: {
-            const OMX_PARAM_COMPONENTROLETYPE* roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE*)params;
-
-            if (!isValidOMXParam(roleParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (strncmp((const char*)roleParams->cRole, "audio_decoder.aac",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPortFormat: {
-            const OMX_AUDIO_PARAM_PORTFORMATTYPE* formatParams =
-                (const OMX_AUDIO_PARAM_PORTFORMATTYPE*)params;
-
-            if (!isValidOMXParam(formatParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if ((formatParams->nPortIndex == 0 && formatParams->eEncoding != OMX_AUDIO_CodingAAC) ||
-                (formatParams->nPortIndex == 1 && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAac: {
-            const OMX_AUDIO_PARAM_AACPROFILETYPE* aacParams =
-                (const OMX_AUDIO_PARAM_AACPROFILETYPE*)params;
-
-            if (!isValidOMXParam(aacParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (aacParams->nPortIndex != 0) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (aacParams->eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4FF) {
-                mIsADTS = false;
-            } else if (aacParams->eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4ADTS) {
-                mIsADTS = true;
-            } else {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioAndroidAacDrcPresentation: {
-            const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE* aacPresParams =
-                (const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE*)params;
-
-            if (!isValidOMXParam(aacPresParams)) {
-                ALOGE("set OMX_ErrorBadParameter");
-                return OMX_ErrorBadParameter;
-            }
-
-            // for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
-            // a value of -1 implies the parameter is not set by the application:
-            //   nMaxOutputChannels     -1 by default
-            //   nDrcCut                uses default platform properties, see initDecoder()
-            //   nDrcBoost                idem
-            //   nHeavyCompression        idem
-            //   nTargetReferenceLevel    idem
-            //   nEncodedTargetLevel      idem
-            if (aacPresParams->nMaxOutputChannels >= 0) {
-                int max;
-                if (aacPresParams->nMaxOutputChannels >= 8) {
-                    max = 8;
-                } else if (aacPresParams->nMaxOutputChannels >= 6) {
-                    max = 6;
-                } else if (aacPresParams->nMaxOutputChannels >= 2) {
-                    max = 2;
-                } else {
-                    // -1 or 0: disable downmix,  1: mono
-                    max = aacPresParams->nMaxOutputChannels;
-                }
-            }
-            /* Apply DRC Changes */
-            IA_ERRORCODE err_code = setXAACDRCInfo(aacPresParams->nDrcCut, aacPresParams->nDrcBoost,
-                                                   aacPresParams->nTargetReferenceLevel,
-                                                   aacPresParams->nHeavyCompression
-#ifdef ENABLE_MPEG_D_DRC
-                                                   ,
-                                                   aacPresParams->nDrcEffectType
-#endif
-            );  // TOD0 : Revert this change
-            if (err_code != IA_NO_ERROR) {
-                ALOGE("Error in OMX_IndexParamAudioAndroidAacDrcPresentation");
-                return OMX_ErrorBadParameter;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamAudioPcm: {
-            const OMX_AUDIO_PARAM_PCMMODETYPE* pcmParams = (OMX_AUDIO_PARAM_PCMMODETYPE*)params;
-
-            if (!isValidOMXParam(pcmParams)) {
-                return OMX_ErrorBadParameter;
-            }
-
-            if (pcmParams->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
-    }
-}
-
-bool SoftXAAC::isConfigured() const {
-    return mInputBufferCount > 0;
-}
-
-void SoftXAAC::onQueueFilled(OMX_U32 /* portIndex */) {
-    if (mSignalledError || mOutputPortSettingsChange != NONE) {
-        ALOGE("onQueueFilled do not process %d %d", mSignalledError, mOutputPortSettingsChange);
-        return;
-    }
-
-    uint8_t* inBuffer = NULL;
-    uint32_t inBufferLength = 0;
-
-    List<BufferInfo*>& inQueue = getPortQueue(0);
-    List<BufferInfo*>& outQueue = getPortQueue(1);
-
-    signed int numOutBytes = 0;
-
-    /* If decoder call fails in between, then mOutputFrameLength is used  */
-    /* Decoded output for AAC is 1024/2048 samples / channel             */
-    /* TODO: For USAC mOutputFrameLength can go up to 4096                 */
-    /* Note: entire buffer logic to save and retrieve assumes 2 bytes per*/
-    /* sample currently                                                  */
-    if (mIsCodecInitialized) {
-        numOutBytes = mOutputFrameLength * (mPcmWdSz / 8) * mNumChannels;
-    }
-
-    while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
-        if (!inQueue.empty()) {
-            BufferInfo* inInfo = *inQueue.begin();
-            OMX_BUFFERHEADERTYPE* inHeader = inInfo->mHeader;
-
-            /* No need to check inHeader != NULL, as inQueue is not empty */
-            mEndOfInput = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
-
-            if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
-                ALOGW("first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
-                inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
-            }
-            if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
-                inBuffer = inHeader->pBuffer + inHeader->nOffset;
-                inBufferLength = inHeader->nFilledLen;
-
-                /* GA header configuration sent to Decoder! */
-                IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength);
-                if (IA_NO_ERROR != err_code) {
-                    ALOGW("configXAACDecoder err_code = %d", err_code);
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, err_code, NULL);
-                    return;
-                }
-                mInputBufferCount++;
-                mOutputBufferCount++;  // fake increase of outputBufferCount to keep the counters
-                                       // aligned
-
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                mLastInHeader = NULL;
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-
-                // Only send out port settings changed event if both sample rate
-                // and mNumChannels are valid.
-                if (mSampFreq && mNumChannels && !mIsCodecConfigFlushRequired) {
-                    ALOGV("Configuring decoder: %d Hz, %d channels", mSampFreq, mNumChannels);
-                    notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                    mOutputPortSettingsChange = AWAITING_DISABLED;
-                }
-
-                return;
-            }
-
-            if (inHeader->nFilledLen == 0) {
-                inInfo->mOwnedByUs = false;
-                inQueue.erase(inQueue.begin());
-                mLastInHeader = NULL;
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-                continue;
-            }
-
-            // Restore Offset and Length for Port reconfig case
-            size_t tempOffset = inHeader->nOffset;
-            size_t tempFilledLen = inHeader->nFilledLen;
-            if (mIsADTS) {
-                size_t adtsHeaderSize = 0;
-                // skip 30 bits, aac_frame_length follows.
-                // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
-
-                const uint8_t* adtsHeader = inHeader->pBuffer + inHeader->nOffset;
-
-                bool signalError = false;
-                if (inHeader->nFilledLen < 7) {
-                    ALOGE(
-                        "Audio data too short to contain even the ADTS header. "
-                        "Got %d bytes.",
-                        inHeader->nFilledLen);
-                    hexdump(adtsHeader, inHeader->nFilledLen);
-                    signalError = true;
-                } else {
-                    bool protectionAbsent = (adtsHeader[1] & 1);
-
-                    unsigned aac_frame_length =
-                        ((adtsHeader[3] & 3) << 11) | (adtsHeader[4] << 3) | (adtsHeader[5] >> 5);
-
-                    if (inHeader->nFilledLen < aac_frame_length) {
-                        ALOGE(
-                            "Not enough audio data for the complete frame. "
-                            "Got %d bytes, frame size according to the ADTS "
-                            "header is %u bytes.",
-                            inHeader->nFilledLen, aac_frame_length);
-                        hexdump(adtsHeader, inHeader->nFilledLen);
-                        signalError = true;
-                    } else {
-                        adtsHeaderSize = (protectionAbsent ? 7 : 9);
-                        if (aac_frame_length < adtsHeaderSize) {
-                            signalError = true;
-                        } else {
-                            inBuffer = (uint8_t*)adtsHeader + adtsHeaderSize;
-                            inBufferLength = aac_frame_length - adtsHeaderSize;
-
-                            inHeader->nOffset += adtsHeaderSize;
-                            inHeader->nFilledLen -= adtsHeaderSize;
-                        }
-                    }
-                }
-
-                if (signalError) {
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
-                    return;
-                }
-
-                // insert buffer size and time stamp
-                if (mLastInHeader != inHeader) {
-                    mCurrentTimestamp = inHeader->nTimeStamp;
-                    mLastInHeader = inHeader;
-                } else {
-                    mCurrentTimestamp = mPrevTimestamp + mOutputFrameLength * 1000000LL / mSampFreq;
-                }
-            } else {
-                inBuffer = inHeader->pBuffer + inHeader->nOffset;
-                inBufferLength = inHeader->nFilledLen;
-                mLastInHeader = inHeader;
-                mCurrentTimestamp = inHeader->nTimeStamp;
-            }
-
-            int numLoops = 0;
-            signed int prevSampleRate = mSampFreq;
-            signed int prevNumChannels = mNumChannels;
-
-            /* XAAC decoder expects first frame to be fed via configXAACDecoder API */
-            /* which should initialize the codec. Once this state is reached, call the  */
-            /* decodeXAACStream API with same frame to decode!                        */
-            if (!mIsCodecInitialized) {
-                IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength);
-                if (IA_NO_ERROR != err_code) {
-                    ALOGW("configXAACDecoder Failed 2 err_code = %d", err_code);
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, err_code, NULL);
-                    return;
-                }
-            }
-
-            if (!mSampFreq || !mNumChannels) {
-                if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) {
-                    ALOGW("Invalid AAC stream");
-                    ALOGW("mSampFreq %d mNumChannels %d ", mSampFreq, mNumChannels);
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-            } else if ((mSampFreq != prevSampleRate) || (mNumChannels != prevNumChannels)) {
-                ALOGV("Reconfiguring decoder: %d->%d Hz, %d->%d channels", prevSampleRate,
-                      mSampFreq, prevNumChannels, mNumChannels);
-                inHeader->nOffset = tempOffset;
-                inHeader->nFilledLen = tempFilledLen;
-                notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
-                mOutputPortSettingsChange = AWAITING_DISABLED;
-                return;
-            }
-
-            signed int bytesConsumed = 0;
-            int errorCode = 0;
-            if (mIsCodecInitialized) {
-                mIsCodecConfigFlushRequired = true;
-                errorCode =
-                    decodeXAACStream(inBuffer, inBufferLength, &bytesConsumed, &numOutBytes);
-            } else if (!mIsCodecConfigFlushRequired) {
-                ALOGW("Assumption that first frame after header initializes decoder failed!");
-                mSignalledError = true;
-                notify(OMX_EventError, OMX_ErrorUndefined, -1, NULL);
-                return;
-            }
-            inHeader->nFilledLen -= bytesConsumed;
-            inHeader->nOffset += bytesConsumed;
-
-            if (inHeader->nFilledLen != 0) {
-                ALOGE("All data not consumed");
-            }
-
-            /* In case of error, decoder would have given out empty buffer */
-            if ((0 != errorCode) && (0 == numOutBytes) && mIsCodecInitialized) {
-                numOutBytes = mOutputFrameLength * (mPcmWdSz / 8) * mNumChannels;
-            }
-            numLoops++;
-
-            if (0 == bytesConsumed) {
-                ALOGW("bytesConsumed is zero");
-            }
-
-            if (errorCode) {
-                /* Clear buffer for output buffer is done inside XAAC codec */
-                /* TODO - Check if below memset is on top of reset inside codec */
-                memset(mOutputBuffer, 0, numOutBytes);  // TODO: check for overflow, ASAN
-                // Discard input buffer.
-                inHeader->nFilledLen = 0;
-                // fall through
-            }
-
-            if (inHeader->nFilledLen == 0) {
-                inInfo->mOwnedByUs = false;
-                mInputBufferCount++;
-                inQueue.erase(inQueue.begin());
-                mLastInHeader = NULL;
-                inInfo = NULL;
-                notifyEmptyBufferDone(inHeader);
-                inHeader = NULL;
-            } else {
-                ALOGV("inHeader->nFilledLen = %d", inHeader->nFilledLen);
-            }
-
-            if (!outQueue.empty() && numOutBytes) {
-                BufferInfo* outInfo = *outQueue.begin();
-                OMX_BUFFERHEADERTYPE* outHeader = outInfo->mHeader;
-
-                if (outHeader->nOffset != 0) {
-                    ALOGE("outHeader->nOffset != 0 is not handled");
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-
-                signed short* outBuffer =
-                    reinterpret_cast<signed short*>(outHeader->pBuffer + outHeader->nOffset);
-                int samplesize = mNumChannels * sizeof(int16_t);
-                if (outHeader->nOffset + mOutputFrameLength * samplesize > outHeader->nAllocLen) {
-                    ALOGE("buffer overflow");
-                    mSignalledError = true;
-                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
-                    return;
-                }
-                memcpy(outBuffer, mOutputBuffer, numOutBytes);
-                outHeader->nFilledLen = numOutBytes;
-
-                if (mEndOfInput && !outQueue.empty()) {
-                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                    mEndOfOutput = true;
-                } else {
-                    outHeader->nFlags = 0;
-                }
-                outHeader->nTimeStamp = mCurrentTimestamp;
-                mPrevTimestamp = mCurrentTimestamp;
-
-                mOutputBufferCount++;
-                outInfo->mOwnedByUs = false;
-                outQueue.erase(outQueue.begin());
-                outInfo = NULL;
-                notifyFillBufferDone(outHeader);
-                outHeader = NULL;
-            }
-        }
-
-        if (mEndOfInput) {
-            if (!outQueue.empty()) {
-                if (!mEndOfOutput) {
-                    ALOGV(" empty block signaling EOS");
-                    // send partial or empty block signaling EOS
-                    mEndOfOutput = true;
-                    BufferInfo* outInfo = *outQueue.begin();
-                    OMX_BUFFERHEADERTYPE* outHeader = outInfo->mHeader;
-
-                    outHeader->nFilledLen = 0;
-                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-                    outHeader->nTimeStamp = mPrevTimestamp;
-
-                    mOutputBufferCount++;
-                    outInfo->mOwnedByUs = false;
-                    outQueue.erase(outQueue.begin());
-                    outInfo = NULL;
-                    notifyFillBufferDone(outHeader);
-                    outHeader = NULL;
-                }
-                break;  // if outQueue not empty but no more output
-            }
-        }
-    }
-}
-
-void SoftXAAC::onPortFlushCompleted(OMX_U32 portIndex) {
-    if (portIndex == 0) {
-        // Make sure that the next buffer output does not still
-        // depend on fragments from the last one decoded.
-        // drain all existing data
-        if (mIsCodecInitialized) {
-            IA_ERRORCODE err_code = configflushDecode();
-            if (err_code != IA_NO_ERROR) {
-                ALOGE("Error in configflushDecode: Error %d", err_code);
-            }
-        }
-        drainDecoder();
-        mLastInHeader = NULL;
-        mEndOfInput = false;
-    } else {
-        mEndOfOutput = false;
-    }
-}
-
-IA_ERRORCODE SoftXAAC::configflushDecode() {
-    IA_ERRORCODE err_code;
-    UWORD32 ui_init_done;
-    uint32_t inBufferLength = 8203;
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT, IA_CMD_TYPE_FLUSH_MEM, NULL);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_FLUSH_MEM");
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_INPUT_BYTES, 0, &inBufferLength);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT, IA_CMD_TYPE_FLUSH_MEM, NULL);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_FLUSH_MEM");
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT, IA_CMD_TYPE_INIT_DONE_QUERY,
-                                &ui_init_done);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_DONE_QUERY");
-
-    if (ui_init_done) {
-        err_code = getXAACStreamInfo();
-        RETURN_IF_FATAL(err_code, "getXAACStreamInfo");
-
-        ALOGV(
-            "Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz "
-            "%d\nchannelMask %d\noutputFrameLength %d",
-            mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength);
-
-        mIsCodecInitialized = true;
-    }
-    return IA_NO_ERROR;
-}
-IA_ERRORCODE SoftXAAC::drainDecoder() {
-    return IA_NO_ERROR;
-}
-
-void SoftXAAC::onReset() {
-    drainDecoder();
-
-    // reset the "configured" state
-    mInputBufferCount = 0;
-    mOutputBufferCount = 0;
-    mEndOfInput = false;
-    mEndOfOutput = false;
-    mLastInHeader = NULL;
-
-    mSignalledError = false;
-    mOutputPortSettingsChange = NONE;
-}
-
-void SoftXAAC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
-    if (portIndex != 1) {
-        return;
-    }
-
-    switch (mOutputPortSettingsChange) {
-        case NONE:
-            break;
-
-        case AWAITING_DISABLED: {
-            CHECK(!enabled);
-            mOutputPortSettingsChange = AWAITING_ENABLED;
-            break;
-        }
-
-        default: {
-            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
-            CHECK(enabled);
-            mOutputPortSettingsChange = NONE;
-            break;
-        }
-    }
-}
-
-IA_ERRORCODE SoftXAAC::initXAACDecoder() {
-    LOOPIDX i;
-
-    /* Error code */
-    IA_ERRORCODE err_code = IA_NO_ERROR;
-
-    /* First part                                        */
-    /* Error Handler Init                                */
-    /* Get Library Name, Library Version and API Version */
-    /* Initialize API structure + Default config set     */
-    /* Set config params from user                       */
-    /* Initialize memory tables                          */
-    /* Get memory information and allocate memory        */
-
-    /* Memory variables */
-    UWORD32 ui_proc_mem_tabs_size;
-    /* API size */
-    UWORD32 pui_api_size;
-    pVOID pv_alloc_ptr;
-
-    mInputBufferSize = 0;
-    mInputBuffer = 0;
-    mOutputBuffer = 0;
-
-    /* Process struct initing end */
-    /* ******************************************************************/
-    /* Initialize API structure and set config params to default        */
-    /* ******************************************************************/
-
-    /* Get the API size */
-    err_code = ixheaacd_dec_api(NULL, IA_API_CMD_GET_API_SIZE, 0, &pui_api_size);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE");
-
-    /* Allocate memory for API */
-    mXheaacCodecHandle = memalign(4, pui_api_size);
-    if (mXheaacCodecHandle == NULL) {
-        ALOGE("malloc for pui_api_size + 4 >> %d Failed", pui_api_size + 4);
-        return IA_FATAL_ERROR;
-    }
-    mMemoryVec.push(mXheaacCodecHandle);
-
-    /* Set the config params to default values */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT,
-                                IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS, NULL);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS");
-#ifdef ENABLE_MPEG_D_DRC
-    /* Get the API size */
-    err_code = ia_drc_dec_api(NULL, IA_API_CMD_GET_API_SIZE, 0, &pui_api_size);
-
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE");
-
-    /* Allocate memory for API */
-    mMpegDDrcHandle = memalign(4, pui_api_size);
-
-    if (mMpegDDrcHandle == NULL) {
-        ALOGE("malloc for drc api structure Failed");
-        return IA_FATAL_ERROR;
-    }
-    mMemoryVec.push(mMpegDDrcHandle);
-
-    memset(mMpegDDrcHandle, 0, pui_api_size);
-
-    /* Set the config params to default values */
-    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                              IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS, NULL);
-
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS");
-#endif
-
-    /* ******************************************************************/
-    /* Set config parameters                                            */
-    /* ******************************************************************/
-    UWORD32 ui_mp4_flag = 1;
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4, &ui_mp4_flag);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4");
-
-    /* ******************************************************************/
-    /* Initialize Memory info tables                                    */
-    /* ******************************************************************/
-
-    /* Get memory info tables size */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_MEMTABS_SIZE, 0,
-                                &ui_proc_mem_tabs_size);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEMTABS_SIZE");
-
-    pv_alloc_ptr = memalign(4, ui_proc_mem_tabs_size);
-    if (pv_alloc_ptr == NULL) {
-        ALOGE("Malloc for size (ui_proc_mem_tabs_size + 4) = %d failed!",
-              ui_proc_mem_tabs_size + 4);
-        return IA_FATAL_ERROR;
-    }
-    mMemoryVec.push(pv_alloc_ptr);
-
-    /* Set pointer for process memory tables    */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_MEMTABS_PTR, 0, pv_alloc_ptr);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEMTABS_PTR");
-
-    /* initialize the API, post config, fill memory tables  */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT,
-                                IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS, NULL);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
-
-    /* ******************************************************************/
-    /* Allocate Memory with info from library                           */
-    /* ******************************************************************/
-    /* There are four different types of memories, that needs to be allocated */
-    /* persistent,scratch,input and output */
-    for (i = 0; i < 4; i++) {
-        int ui_size = 0, ui_alignment = 0, ui_type = 0;
-        pVOID pv_alloc_ptr;
-
-        /* Get memory size */
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size);
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
-
-        /* Get memory alignment */
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i,
-                                    &ui_alignment);
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
-
-        /* Get memory type */
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
-
-        pv_alloc_ptr = memalign(ui_alignment, ui_size);
-        if (pv_alloc_ptr == NULL) {
-            ALOGE("Malloc for size (ui_size + ui_alignment) = %d failed!", ui_size + ui_alignment);
-            return IA_FATAL_ERROR;
-        }
-        mMemoryVec.push(pv_alloc_ptr);
-
-        /* Set the buffer pointer */
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
-        if (ui_type == IA_MEMTYPE_INPUT) {
-            mInputBuffer = (pWORD8)pv_alloc_ptr;
-            mInputBufferSize = ui_size;
-        }
-
-        if (ui_type == IA_MEMTYPE_OUTPUT) {
-            mOutputBuffer = (pWORD8)pv_alloc_ptr;
-        }
-    }
-    /* End first part */
-
-    return IA_NO_ERROR;
-}
-
-IA_ERRORCODE SoftXAAC::configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength) {
-    UWORD32 ui_init_done;
-    int32_t i_bytes_consumed;
-
-    if (mInputBufferSize < inBufferLength) {
-        ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize,
-              inBufferLength);
-        return false;
-    }
-
-    /* Copy the buffer passed by Android plugin to codec input buffer */
-    memcpy(mInputBuffer, inBuffer, inBufferLength);
-
-    /* Set number of bytes to be processed */
-    IA_ERRORCODE err_code =
-        ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_INPUT_BYTES, 0, &inBufferLength);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
-
-    if (mIsCodecConfigFlushRequired) {
-        /* If codec is already initialized, then GA header is passed again */
-        /* Need to call the Flush API instead of INIT_PROCESS */
-        mIsCodecInitialized = false; /* Codec needs to be Reinitialized after flush */
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT, IA_CMD_TYPE_GA_HDR, NULL);
-        RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_GA_HDR");
-    } else {
-        /* Initialize the process */
-        err_code =
-            ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT, IA_CMD_TYPE_INIT_PROCESS, NULL);
-        RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_PROCESS");
-    }
-
-    /* Checking for end of initialization */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT, IA_CMD_TYPE_INIT_DONE_QUERY,
-                                &ui_init_done);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_DONE_QUERY");
-
-    /* How much buffer is used in input buffers */
-    err_code =
-        ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CURIDX_INPUT_BUF, 0, &i_bytes_consumed);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF");
-
-    if (ui_init_done) {
-        err_code = getXAACStreamInfo();
-        RETURN_IF_FATAL(err_code, "getXAACStreamInfo");
-
-        ALOGI(
-            "Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz "
-            "%d\nchannelMask %d\noutputFrameLength %d",
-            mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength);
-        mIsCodecInitialized = true;
-
-#ifdef ENABLE_MPEG_D_DRC
-        err_code = configMPEGDDrc();
-        RETURN_IF_FATAL(err_code, "configMPEGDDrc");
-#endif
-    }
-
-    return IA_NO_ERROR;
-}
-IA_ERRORCODE SoftXAAC::initMPEGDDDrc() {
-    IA_ERRORCODE err_code = IA_NO_ERROR;
-    int i;
-
-    for (i = 0; i < (WORD32)2; i++) {
-        WORD32 ui_size, ui_alignment, ui_type;
-        pVOID pv_alloc_ptr;
-
-        /* Get memory size */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size);
-
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
-
-        /* Get memory alignment */
-        err_code =
-            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i, &ui_alignment);
-
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
-
-        /* Get memory type */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
-
-        pv_alloc_ptr = memalign(4, ui_size);
-        if (pv_alloc_ptr == NULL) {
-            ALOGE(" Cannot create requested memory  %d", ui_size);
-            return IA_FATAL_ERROR;
-        }
-        mDrcMemoryVec.push(pv_alloc_ptr);
-
-        /* Set the buffer pointer */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
-
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
-    }
-
-    WORD32 ui_size;
-    ui_size = 8192 * 2;
-
-    mDrcInBuf = (int8_t*)memalign(4, ui_size);
-    if (mDrcInBuf == NULL) {
-        ALOGE(" Cannot create requested memory  %d", ui_size);
-        return IA_FATAL_ERROR;
-    }
-    mDrcMemoryVec.push(mDrcInBuf);
-
-    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 2, mDrcInBuf);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
-
-    mDrcOutBuf = (int8_t*)memalign(4, ui_size);
-    if (mDrcOutBuf == NULL) {
-        ALOGE(" Cannot create requested memory  %d", ui_size);
-        return IA_FATAL_ERROR;
-    }
-    mDrcMemoryVec.push(mDrcOutBuf);
-
-    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 3, mDrcOutBuf);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
-
-    return IA_NO_ERROR;
-}
-IA_ERRORCODE SoftXAAC::configMPEGDDrc() {
-    IA_ERRORCODE err_code = IA_NO_ERROR;
-    int i_effect_type;
-    int i_loud_norm;
-    int i_target_loudness;
-    unsigned int i_sbr_mode;
-    int i;
-    int ui_proc_mem_tabs_size = 0;
-    pVOID pv_alloc_ptr = NULL;
-
-#ifdef ENABLE_MPEG_D_DRC
-    {
-        /* Sampling Frequency */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                  IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ, &mSampFreq);
-        RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ");
-        /* Total Number of Channels */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                  IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &mNumChannels);
-        RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS");
-
-        /* PCM word size  */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                  IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ, &mPcmWdSz);
-        RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ");
-
-        /*Set Effect Type*/
-
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, &i_effect_type);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE");
-
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                  IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type);
-        RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE");
-
-        /*Set target loudness */
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS,
-                                    &i_target_loudness);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS");
-
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                  IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, &i_target_loudness);
-        RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS");
-
-        /*Set loud_norm_flag*/
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, &i_loud_norm);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM");
-
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                  IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm);
-        RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_LOUD_NORM");
-
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE, &i_sbr_mode);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE");
-
-        /* Get memory info tables size */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEMTABS_SIZE, 0,
-                                  &ui_proc_mem_tabs_size);
-
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEMTABS_SIZE");
-
-        pv_alloc_ptr = memalign(4, ui_proc_mem_tabs_size);
-
-        if (pv_alloc_ptr == NULL) {
-            ALOGE("Cannot create requested memory  %d", ui_proc_mem_tabs_size);
-            return IA_FATAL_ERROR;
-        }
-
-        memset(pv_alloc_ptr, 0, ui_proc_mem_tabs_size);
-
-        mMemoryVec.push(pv_alloc_ptr);
-
-        /* Set pointer for process memory tables */
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEMTABS_PTR, 0,
-                                  pv_alloc_ptr);
-
-        RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEMTABS_PTR");
-
-        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                                  IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS, NULL);
-
-        RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
-
-        /* Free any memory that is allocated for MPEG D Drc so far */
-        deInitMPEGDDDrc();
-
-        err_code = initMPEGDDDrc();
-        if (err_code != IA_NO_ERROR) {
-            ALOGE("initMPEGDDDrc failed with error %d", err_code);
-            deInitMPEGDDDrc();
-            return err_code;
-        }
-
-        /* DRC buffers
-            buf[0] - contains extension element pay load loudness related
-            buf[1] - contains extension element pay load*/
-        {
-            VOID* p_array[2][16];
-            WORD32 ii;
-            WORD32 buf_sizes[2][16];
-            WORD32 num_elements;
-            WORD32 num_config_ext;
-            WORD32 bit_str_fmt = 1;
-
-            WORD32 uo_num_chan;
-
-            memset(buf_sizes, 0, 32 * sizeof(WORD32));
-
-            err_code =
-                ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                 IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES, &buf_sizes[0][0]);
-            RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES");
-
-            err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                        IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR, &p_array);
-            RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR");
-
-            err_code =
-                ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, IA_CMD_TYPE_INIT_SET_BUFF_PTR, 0);
-            RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_SET_BUFF_PTR");
-
-            err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                        IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE, &num_elements);
-            RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE");
-
-            err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                        IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT, &num_config_ext);
-            RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT");
-
-            for (ii = 0; ii < num_config_ext; ii++) {
-                /*copy loudness bitstream*/
-                if (buf_sizes[0][ii] > 0) {
-                    memcpy(mDrcInBuf, p_array[0][ii], buf_sizes[0][ii]);
-
-                    /*Set bitstream_split_format */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                              IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
-                    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
-
-                    /* Set number of bytes to be processed */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_IL_BS, 0,
-                                              &buf_sizes[0][ii]);
-                    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES_IL_BS");
-
-                    /* Execute process */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                                              IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF, NULL);
-                    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF");
-
-                    mDRCFlag = 1;
-                }
-            }
-
-            for (ii = 0; ii < num_elements; ii++) {
-                /*copy config bitstream*/
-                if (buf_sizes[1][ii] > 0) {
-                    memcpy(mDrcInBuf, p_array[1][ii], buf_sizes[1][ii]);
-                    /* Set number of bytes to be processed */
-
-                    /*Set bitstream_split_format */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                              IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
-                    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
-
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_IC_BS, 0,
-                                              &buf_sizes[1][ii]);
-                    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES_IC_BS");
-
-                    /* Execute process */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                                              IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF, NULL);
-
-                    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF");
-
-                    mDRCFlag = 1;
-                }
-            }
-
-            if (mDRCFlag == 1) {
-                mMpegDDRCPresent = 1;
-            } else {
-                mMpegDDRCPresent = 0;
-            }
-
-            /*Read interface buffer config file bitstream*/
-            if (mMpegDDRCPresent == 1) {
-                WORD32 interface_is_present = 1;
-                WORD32 frame_length;
-
-                if (i_sbr_mode != 0) {
-                    if (i_sbr_mode == 1) {
-                        frame_length = 2048;
-                    } else if (i_sbr_mode == 3) {
-                        frame_length = 4096;
-                    } else {
-                        frame_length = 1024;
-                    }
-                } else {
-                    frame_length = 4096;
-                }
-
-                err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                          IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE, &frame_length);
-                RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE");
-
-                err_code =
-                    ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                   IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT, &interface_is_present);
-                RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT");
-
-                /* Execute process */
-                err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                                          IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF, NULL);
-                RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF");
-
-                err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                                          IA_CMD_TYPE_INIT_PROCESS, NULL);
-                RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_PROCESS");
-
-                err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                          IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &uo_num_chan);
-                RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS");
-            }
-        }
-    }
-#endif
-
-    return IA_NO_ERROR;
-}
-IA_ERRORCODE SoftXAAC::decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength,
-                                        int32_t* bytesConsumed, int32_t* outBytes) {
-    if (mInputBufferSize < inBufferLength) {
-        ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize,
-              inBufferLength);
-        return -1;
-    }
-
-    /* Copy the buffer passed by Android plugin to codec input buffer */
-    memcpy(mInputBuffer, inBuffer, inBufferLength);
-
-    /* Set number of bytes to be processed */
-    IA_ERRORCODE err_code =
-        ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_INPUT_BYTES, 0, &inBufferLength);
-    RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
-
-    /* Execute process */
-    err_code =
-        ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_EXECUTE, IA_CMD_TYPE_DO_EXECUTE, NULL);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DO_EXECUTE");
-
-    UWORD32 ui_exec_done;
-    WORD32 i_num_preroll = 0;
-    /* Checking for end of processing */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_EXECUTE, IA_CMD_TYPE_DONE_QUERY,
-                                &ui_exec_done);
-    RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DONE_QUERY");
-
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                              IA_ENHAACPLUS_DEC_CONFIG_GET_NUM_PRE_ROLL_FRAMES,
-                              &i_num_preroll);
-
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GET_NUM_PRE_ROLL_FRAMES");
-    {
-        int32_t pi_preroll_frame_offset = 0;
-        do {
-#ifdef ENABLE_MPEG_D_DRC
-            if (ui_exec_done != 1) {
-                VOID* p_array;        // ITTIAM:buffer to handle gain payload
-                WORD32 buf_size = 0;  // ITTIAM:gain payload length
-                WORD32 bit_str_fmt = 1;
-                WORD32 gain_stream_flag = 1;
-
-                err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                            IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN, &buf_size);
-                RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN");
-
-                err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                            IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF, &p_array);
-                RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF");
-
-                if (buf_size > 0) {
-                    /*Set bitstream_split_format */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                              IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
-                    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
-
-                    memcpy(mDrcInBuf, p_array, buf_size);
-                    /* Set number of bytes to be processed */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_BS,
-                                              0, &buf_size);
-                    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
-
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                              IA_DRC_DEC_CONFIG_GAIN_STREAM_FLAG,
-                                              &gain_stream_flag);
-                    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
-
-                    /* Execute process */
-                    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
-                                              IA_CMD_TYPE_INIT_CPY_BSF_BUFF, NULL);
-                    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
-
-                    mMpegDDRCPresent = 1;
-                }
-            }
-#endif
-            /* How much buffer is used in input buffers */
-            err_code =
-                ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CURIDX_INPUT_BUF,
-                                 0, bytesConsumed);
-            RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF");
-
-            /* Get the output bytes */
-            err_code = ixheaacd_dec_api(mXheaacCodecHandle,
-                                        IA_API_CMD_GET_OUTPUT_BYTES, 0, outBytes);
-            RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_OUTPUT_BYTES");
-#ifdef ENABLE_MPEG_D_DRC
-
-            if (mMpegDDRCPresent == 1) {
-                memcpy(mDrcInBuf, mOutputBuffer + pi_preroll_frame_offset, *outBytes);
-                pi_preroll_frame_offset += *outBytes;
-                err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES,
-                                          0, outBytes);
-                RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
-
-                err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_EXECUTE,
-                                          IA_CMD_TYPE_DO_EXECUTE, NULL);
-                RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DO_EXECUTE");
-
-                memcpy(mOutputBuffer, mDrcOutBuf, *outBytes);
-            }
-#endif
-            i_num_preroll--;
-        } while (i_num_preroll > 0);
-    }
-    return IA_NO_ERROR;
-}
-
-IA_ERRORCODE SoftXAAC::deInitXAACDecoder() {
-    ALOGI("deInitXAACDecoder");
-
-    /* Tell that the input is over in this buffer */
-    IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INPUT_OVER, 0, NULL);
-
-    /* Irrespective of error returned in IA_API_CMD_INPUT_OVER, free allocated memory */
-    for (void* buf : mMemoryVec) {
-        free(buf);
-    }
-    mMemoryVec.clear();
-    return err_code;
-}
-
-IA_ERRORCODE SoftXAAC::deInitMPEGDDDrc() {
-    ALOGI("deInitMPEGDDDrc");
-
-    for (void* buf : mDrcMemoryVec) {
-        free(buf);
-    }
-    mDrcMemoryVec.clear();
-    return IA_NO_ERROR;
-}
-
-IA_ERRORCODE SoftXAAC::getXAACStreamInfo() {
-    IA_ERRORCODE err_code = IA_NO_ERROR;
-
-    /* Sampling frequency */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ, &mSampFreq);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ");
-
-    /* Total Number of Channels */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS, &mNumChannels);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS");
-    if (mNumChannels > MAX_CHANNEL_COUNT) {
-        ALOGE(" No of channels are more than max channels\n");
-        return IA_FATAL_ERROR;
-    }
-
-    /* PCM word size */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ, &mPcmWdSz);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ");
-    if ((mPcmWdSz / 8) != 2) {
-        ALOGE("Invalid Number of bytes per sample: %d, Expected is 2", mPcmWdSz);
-        return IA_FATAL_ERROR;
-    }
-
-    /* channel mask to tell the arrangement of channels in bit stream */
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK, &mChannelMask);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK");
-
-    /* Channel mode to tell MONO/STEREO/DUAL-MONO/NONE_OF_THESE */
-    UWORD32 ui_channel_mode;
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE, &ui_channel_mode);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE");
-    if (ui_channel_mode == 0)
-        ALOGV("Channel Mode: MONO_OR_PS\n");
-    else if (ui_channel_mode == 1)
-        ALOGV("Channel Mode: STEREO\n");
-    else if (ui_channel_mode == 2)
-        ALOGV("Channel Mode: DUAL-MONO\n");
-    else
-        ALOGV("Channel Mode: NONE_OF_THESE or MULTICHANNEL\n");
-
-    /* Channel mode to tell SBR PRESENT/NOT_PRESENT */
-    UWORD32 ui_sbr_mode;
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE, &ui_sbr_mode);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE");
-    if (ui_sbr_mode == 0)
-        ALOGV("SBR Mode: NOT_PRESENT\n");
-    else if (ui_sbr_mode == 1)
-        ALOGV("SBR Mode: PRESENT\n");
-    else
-        ALOGV("SBR Mode: ILLEGAL\n");
-
-    /* mOutputFrameLength = 1024 * (1 + SBR_MODE) for AAC */
-    /* For USAC it could be 1024 * 3 , support to query  */
-    /* not yet added in codec                            */
-    mOutputFrameLength = 1024 * (1 + ui_sbr_mode);
-
-    ALOGI("mOutputFrameLength %d ui_sbr_mode %d", mOutputFrameLength, ui_sbr_mode);
-
-    return IA_NO_ERROR;
-}
-
-IA_ERRORCODE SoftXAAC::setXAACDRCInfo(int32_t drcCut, int32_t drcBoost, int32_t drcRefLevel,
-                                      int32_t drcHeavyCompression
-#ifdef ENABLE_MPEG_D_DRC
-                                      ,
-                                      int32_t drEffectType
-#endif
-) {
-    IA_ERRORCODE err_code = IA_NO_ERROR;
-
-    int32_t ui_drc_enable = 1;
-    int32_t i_effect_type, i_target_loudness, i_loud_norm;
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE, &ui_drc_enable);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE");
-    if (drcCut != -1) {
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT, &drcCut);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT");
-    }
-
-    if (drcBoost != -1) {
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST, &drcBoost);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST");
-    }
-
-    if (drcRefLevel != -1) {
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL, &drcRefLevel);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL");
-    }
-#ifdef ENABLE_MPEG_D_DRC
-    if (drcRefLevel != -1) {
-        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                    IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS, &drcRefLevel);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS");
-    }
-#endif
-    if (drcHeavyCompression != -1) {
-        err_code =
-            ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                             IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP, &drcHeavyCompression);
-        RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP");
-    }
-
-#ifdef ENABLE_MPEG_D_DRC
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE, &drEffectType);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE");
-#endif
-
-#ifdef ENABLE_MPEG_D_DRC
-    /*Set Effect Type*/
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, &i_effect_type);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE");
-
-    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                              IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type);
-
-    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE");
-
-    /*Set target loudness */
-    err_code =
-        ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                         IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS, &i_target_loudness);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS");
-
-    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                              IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, &i_target_loudness);
-    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS");
-
-    /*Set loud_norm_flag*/
-    err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
-                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, &i_loud_norm);
-    RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM");
-
-    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
-                              IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm);
-
-    RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_LOUD_NORM");
-
-#endif
-
-    return IA_NO_ERROR;
-}
-
-}  // namespace android
-
-__attribute__((cfi_canonical_jump_table))
-android::SoftOMXComponent* createSoftOMXComponent(const char* name,
-                                                  const OMX_CALLBACKTYPE* callbacks,
-                                                  OMX_PTR appData, OMX_COMPONENTTYPE** component) {
-    ALOGI("createSoftOMXComponent for SoftXAACDEC");
-    return new android::SoftXAAC(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.h b/media/libstagefright/codecs/xaacdec/SoftXAAC.h
deleted file mode 100644
index a62a797..0000000
--- a/media/libstagefright/codecs/xaacdec/SoftXAAC.h
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFTXAAC_H_
-#define SOFTXAAC_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-#include <string.h>
-#include <stdlib.h>
-#include <stdio.h>
-
-#include "ixheaacd_type_def.h"
-#include "ixheaacd_error_standards.h"
-#include "ixheaacd_error_handler.h"
-#include "ixheaacd_apicmd_standards.h"
-#include "ixheaacd_memory_standards.h"
-#include "ixheaacd_aac_config.h"
-
-#include "impd_apicmd_standards.h"
-#include "impd_drc_config_params.h"
-
-extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj, WORD32 i_cmd, WORD32 i_idx,
-                                         pVOID pv_value);
-extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj, WORD32 i_cmd, WORD32 i_idx,
-                                       pVOID pv_value);
-extern "C" IA_ERRORCODE ixheaacd_get_config_param(pVOID p_ia_process_api_obj, pWORD32 pi_samp_freq,
-                                                  pWORD32 pi_num_chan, pWORD32 pi_pcm_wd_sz,
-                                                  pWORD32 pi_channel_mask);
-
-namespace android {
-
-struct SoftXAAC : public SimpleSoftOMXComponent {
-    SoftXAAC(const char* name, const OMX_CALLBACKTYPE* callbacks, OMX_PTR appData,
-             OMX_COMPONENTTYPE** component);
-
-   protected:
-    virtual ~SoftXAAC();
-
-    virtual OMX_ERRORTYPE internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-    virtual void onPortFlushCompleted(OMX_U32 portIndex);
-    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-    virtual void onReset();
-
-   private:
-    enum {
-        kNumInputBuffers = 4,
-        kNumOutputBuffers = 4,
-        kNumDelayBlocksMax = 8,
-    };
-
-    bool mIsADTS;
-    size_t mInputBufferCount;
-    size_t mOutputBufferCount;
-    bool mSignalledError;
-    OMX_BUFFERHEADERTYPE* mLastInHeader;
-    int64_t mPrevTimestamp;
-    int64_t mCurrentTimestamp;
-    uint32_t mBufSize;
-
-    enum { NONE, AWAITING_DISABLED, AWAITING_ENABLED } mOutputPortSettingsChange;
-
-    void initPorts();
-    IA_ERRORCODE initDecoder();
-    bool isConfigured() const;
-    IA_ERRORCODE drainDecoder();
-    IA_ERRORCODE initXAACDecoder();
-    IA_ERRORCODE deInitXAACDecoder();
-    IA_ERRORCODE initMPEGDDDrc();
-    IA_ERRORCODE deInitMPEGDDDrc();
-    IA_ERRORCODE configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength);
-    IA_ERRORCODE configMPEGDDrc();
-    IA_ERRORCODE decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength,
-                                  int32_t* bytesConsumed, int32_t* outBytes);
-
-    IA_ERRORCODE configflushDecode();
-    IA_ERRORCODE getXAACStreamInfo();
-    IA_ERRORCODE setXAACDRCInfo(int32_t drcCut, int32_t drcBoost, int32_t drcRefLevel,
-                                int32_t drcHeavyCompression
-#ifdef ENABLE_MPEG_D_DRC
-                                ,
-                                int32_t drEffectType
-#endif
-    );
-
-    bool mEndOfInput;
-    bool mEndOfOutput;
-
-    void* mXheaacCodecHandle;
-    void* mMpegDDrcHandle;
-    uint32_t mInputBufferSize;
-    uint32_t mOutputFrameLength;
-    int8_t* mInputBuffer;
-    int8_t* mOutputBuffer;
-    int32_t mSampFreq;
-    int32_t mNumChannels;
-    int32_t mPcmWdSz;
-    int32_t mChannelMask;
-    bool mIsCodecInitialized;
-    bool mIsCodecConfigFlushRequired;
-    int8_t* mDrcInBuf;
-    int8_t* mDrcOutBuf;
-    int32_t mMpegDDRCPresent;
-    int32_t mDRCFlag;
-    Vector<void*> mMemoryVec;
-    Vector<void*> mDrcMemoryVec;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftXAAC);
-};
-
-}  // namespace android
-
-#endif  // SOFTXAAC_H_
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index c79ac5c..4f45817 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -420,12 +420,12 @@
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
-            <Limit name="size" min="2x2" max="1920x1920" />
+            <Limit name="size" min="16x16" max="1920x1920" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
             <Limit name="bitrate" range="1-240000000" />
-            <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="bitrate-modes" value="VBR" />
             <Attribute name="software-codec" />
         </MediaCodec>
     </Encoders>
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index 94c201f..fd49010 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -120,6 +120,7 @@
     sp<ALooper> mAsyncLooper;
     bool mHaveMoreInputs;
     bool mFirstSample;
+    bool mSourceStopped;
     bool mHandleOutputBufferAsyncDone;
     sp<Surface> mSurface;
     std::mutex mMutex;
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index c6087b0..dab6a11 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -31,6 +31,7 @@
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/ResourceInfo.h>
 #include <system/graphics.h>
 #include <utils/NativeHandle.h>
 
@@ -218,6 +219,10 @@
          * @param updatedMetrics metrics need to be updated.
          */
         virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) = 0;
+        /**
+         * Notify MediaCodec that there is a change in the required resources.
+         */
+        virtual void onRequiredResourcesChanged() = 0;
     };
 
     /**
@@ -328,6 +333,13 @@
      */
     virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names);
 
+    /**
+     * Get the required resources for the compomemt at the current
+     * configuration.
+     *
+     */
+    virtual std::vector<InstanceResourceInfo> getRequiredSystemResources();
+
     typedef CodecBase *(*CreateCodecFunc)(void);
     typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index df1ebd7..f03a2a0 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -29,10 +29,12 @@
 #include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/Mutexed.h>
 #include <media/stagefright/CodecErrorLog.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaHistogram.h>
 #include <media/stagefright/PlaybackDurationAccumulator.h>
+#include <media/stagefright/ResourceInfo.h>
 #include <media/stagefright/VideoRenderQualityTracker.h>
 #include <utils/Vector.h>
 
@@ -120,7 +122,7 @@
         CB_OUTPUT_AVAILABLE = 2,
         CB_ERROR = 3,
         CB_OUTPUT_FORMAT_CHANGED = 4,
-        CB_RESOURCE_RECLAIMED = 5,
+        CB_RESOURCE_RECLAIMED = 5,      // deprecated and not used
         CB_CRYPTO_ERROR = 6,
         CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
 
@@ -155,64 +157,6 @@
     static sp<PersistentSurface> CreatePersistentInputSurface();
 
     /**
-     * Abstraction for the Global Codec resources.
-     * This encapsulates all the available codec resources on the device.
-     */
-    struct GlobalResourceInfo {
-        /**
-         * Name of the Resource type.
-         */
-        std::string mName;
-        /**
-         * Total count/capacity of resources of this type.
-         */
-        int mCapacity;
-        /**
-         * Available count of this resource type.
-         */
-        int mAvailable;
-
-        GlobalResourceInfo(const std::string& name, int capacity, int available) :
-                mName(name),
-                mCapacity(capacity),
-                mAvailable(available) {}
-
-        GlobalResourceInfo(const GlobalResourceInfo& info) :
-                mName(info.mName),
-                mCapacity(info.mCapacity),
-                mAvailable(info.mAvailable) {}
-    };
-
-    /**
-     * Abstraction for the resources associated with a codec instance.
-     * This encapsulates the required codec resources for a configured codec instance.
-     */
-    struct InstanceResourceInfo {
-        /**
-         * Name of the Resource type.
-         */
-        std::string mName;
-        /**
-         * Required resource count of this type.
-         */
-        int mStaticCount;
-        /**
-         * Per frame resource requirement of this resource type.
-         */
-        int mPerFrameCount;
-
-        InstanceResourceInfo(const std::string& name, int staticCount, int perFrameCount) :
-                mName(name),
-                mStaticCount(staticCount),
-                mPerFrameCount(perFrameCount) {}
-
-        InstanceResourceInfo(const InstanceResourceInfo& info) :
-                mName(info.mName),
-                mStaticCount(info.mStaticCount),
-                mPerFrameCount(info.mPerFrameCount) {}
-    };
-
-    /**
      * Get a list of Globally available device codec resources.
      *
      * It will return INVALID_OPERATION if:
@@ -437,6 +381,15 @@
                                              uint32_t flags,
                                              status_t* err);
 
+    // Get the required system resources for the current configuration.
+    bool getRequiredSystemResources();
+    // Convert all dynamic (non-constant) resource types into
+    // constant resource counts.
+    std::vector<InstanceResourceInfo> computeDynamicResources(
+            const std::vector<InstanceResourceInfo>& resources);
+    void updateResourceUsage(const std::vector<InstanceResourceInfo>& oldResources,
+                             const std::vector<InstanceResourceInfo>& newResources);
+
 private:
     enum State {
         UNINITIALIZED,
@@ -594,6 +547,7 @@
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
     void updateCodecImportance(const sp<AMessage>& msg);
+    void updatePictureProfile(const sp<AMessage>& msg, bool applyDefaultProfile);
     void onGetMetrics(const sp<AMessage>& msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
@@ -773,7 +727,7 @@
     void onCryptoError(const sp<AMessage> &msg);
     void onError(status_t err, int32_t actionCode, const char *detail = NULL);
     void onOutputFormatChanged();
-    void onRequiredResourcesChanged(const std::vector<InstanceResourceInfo>& resourceInfo);
+    void onRequiredResourcesChanged();
 
     status_t onSetParameters(const sp<AMessage> &params);
 
@@ -804,6 +758,8 @@
 
     void onReleaseCrypto(const sp<AMessage>& msg);
 
+    void stopCryptoAsync();
+
     // managing time-of-flight aka latency
     typedef struct {
             int64_t presentationUs;
@@ -874,7 +830,10 @@
 
     CodecErrorLog mErrorLog;
     // Required resource info for this codec.
-    std::vector<InstanceResourceInfo> mRequiredResourceInfo;
+    Mutexed<std::vector<InstanceResourceInfo>> mRequiredResourceInfo;
+
+    // Default frame-rate.
+    float mFrameRate = 30.0;
 
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index b0f671d..af1e6dd 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -593,9 +593,9 @@
 
 inline static const char *asString_APVProfile(int32_t i, const char *def = "??") {
     switch (i) {
-        case APVProfile422_10:           return "APVProfile422_10";
-        case APVProfile422_10HDR10:      return "APVProfile422_10HDR10";
-        case APVProfile422_10HDR10Plus:  return "APVProfile422_10HDR10Plus";
+        case APVProfile422_10:           return "422_10";
+        case APVProfile422_10HDR10:      return "422_10HDR10";
+        case APVProfile422_10HDR10Plus:  return "422_10HDR10Plus";
         default:                        return def;
     }
 }
@@ -719,6 +719,54 @@
     }
 }
 
+// Profiles and levels for AC-4 Codec, corresponding to the definitions in
+// "The MIME codecs parameter", Annex E.13
+// found at https://www.etsi.org/deliver/etsi_ts/103100_103199/10319002/01.02.01_60/ts_10319002v010201p.pdf
+// profile = ((1 << bitstream_version) << 8) | (1 << presentation_version);
+// level = 1 << mdcompat;
+
+inline constexpr int32_t AC4BitstreamVersion0 = 0x01;
+inline constexpr int32_t AC4BitstreamVersion1 = 0x02;
+inline constexpr int32_t AC4BitstreamVersion2 = 0x04;
+
+inline constexpr int32_t AC4PresentationVersion0 = 0x01;
+inline constexpr int32_t AC4PresentationVersion1 = 0x02;
+inline constexpr int32_t AC4PresentationVersion2 = 0x04;
+
+inline constexpr int32_t AC4Profile00 = AC4BitstreamVersion0 << 8 | AC4PresentationVersion0;
+inline constexpr int32_t AC4Profile10 = AC4BitstreamVersion1 << 8 | AC4PresentationVersion0;
+inline constexpr int32_t AC4Profile11 = AC4BitstreamVersion1 << 8 | AC4PresentationVersion1;
+inline constexpr int32_t AC4Profile21 = AC4BitstreamVersion2 << 8 | AC4PresentationVersion1;
+inline constexpr int32_t AC4Profile22 = AC4BitstreamVersion2 << 8 | AC4PresentationVersion2;
+
+inline static const char *asString_AC4Profile(int32_t profile, const char *def = "??") {
+    switch (profile) {
+        case AC4Profile00: return "00.00";
+        case AC4Profile10: return "01.00";
+        case AC4Profile11: return "01.01";
+        case AC4Profile21: return "02.01";
+        case AC4Profile22: return "02.02";
+        default:           return def;
+    }
+}
+
+inline constexpr int32_t AC4Level0 = 0x01;
+inline constexpr int32_t AC4Level1 = 0x02;
+inline constexpr int32_t AC4Level2 = 0x04;
+inline constexpr int32_t AC4Level3 = 0x08;
+inline constexpr int32_t AC4Level4 = 0x10;
+
+inline static const char *asString_AC4Level(int32_t level, const char *def = "??") {
+    switch (level) {
+        case AC4Level0: return "00";
+        case AC4Level1: return "01";
+        case AC4Level2: return "02";
+        case AC4Level3: return "03";
+        case AC4Level4: return "04";
+        default:        return def;
+    }
+}
+
 inline constexpr int32_t BITRATE_MODE_CBR = 2;
 inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -854,15 +902,19 @@
 }
 
 inline constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
+inline constexpr char FEATURE_DetachedSurface[]        = "detached-surface";
+inline constexpr char FEATURE_DynamicColorAspects[]    = "dynamic-color-aspects";
 inline constexpr char FEATURE_DynamicTimestamp[]       = "dynamic-timestamp";
 inline constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
 inline constexpr char FEATURE_FrameParsing[]           = "frame-parsing";
 inline constexpr char FEATURE_HdrEditing[]             = "hdr-editing";
+inline constexpr char FEATURE_HlgEditing[]             = "hlg-editing";
 inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
 inline constexpr char FEATURE_LowLatency[]             = "low-latency";
 inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
 inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
 inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
+inline constexpr char FEATURE_Roi[]                    = "region-of-interest";
 inline constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
 inline constexpr char FEATURE_SpecialCodec[]           = "special-codec";
 inline constexpr char FEATURE_TunneledPlayback[]       = "tunneled-playback";
@@ -1020,6 +1072,8 @@
 inline constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
 inline constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
 inline constexpr char KEY_WIDTH[] = "width";
+inline constexpr char KEY_PICTURE_PROFILE_HANDLE[] = "picture-profile-handle";
+inline constexpr char KEY_PICTURE_PROFILE_ID[] = "picture-profile-id";
 
 // from MediaCodec.java
 inline constexpr int32_t ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index 08a5324..34d6a35 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -90,11 +90,7 @@
         void binderDied(const wp<IBinder> &the_late_who __unused);
     };
 
-    static sp<BinderDeathObserver> sBinderDeathObserver;
-    static sp<IBinder> sMediaPlayer;
-
-    static sp<IMediaCodecList> sCodecList;
-    static sp<IMediaCodecList> sRemoteList;
+    class InstanceCache;
 
     status_t mInitCheck{NO_INIT};
 
diff --git a/media/libstagefright/include/media/stagefright/MediaSync.h b/media/libstagefright/include/media/stagefright/MediaSync.h
index ef8cb23..f6f36bb 100644
--- a/media/libstagefright/include/media/stagefright/MediaSync.h
+++ b/media/libstagefright/include/media/stagefright/MediaSync.h
@@ -17,7 +17,13 @@
 #ifndef MEDIA_SYNC_H
 #define MEDIA_SYNC_H
 
+#include <com_android_graphics_libgui_flags.h>
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+#include <gui/BufferItemConsumer.h>
+#else
 #include <gui/IConsumerListener.h>
+#endif
 #include <gui/IProducerListener.h>
 
 #include <media/AudioResamplerPublic.h>
@@ -34,7 +40,9 @@
 class BufferItem;
 class Fence;
 class GraphicBuffer;
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
 class IGraphicBufferConsumer;
+#endif
 class IGraphicBufferProducer;
 struct MediaClock;
 struct VideoFrameScheduler;
@@ -140,14 +148,19 @@
 
     // This is a thin wrapper class that lets us listen to
     // IConsumerListener::onFrameAvailable from mInput.
-    class InputListener : public BnConsumerListener,
-                          public IBinder::DeathRecipient {
-    public:
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+    class InputListener : public BufferItemConsumer::FrameAvailableListener {
+#else
+    class InputListener : public IConsumerListener, public IBinder::DeathRecipient {
+#endif
+      public:
         InputListener(const sp<MediaSync> &sync);
         virtual ~InputListener();
 
-        // From IConsumerListener
-        virtual void onFrameAvailable(const BufferItem &item);
+        // From FrameAvailableListener
+        virtual void onFrameAvailable(const BufferItem&) override;
+
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
 
         // From IConsumerListener
         // We don't care about released buffers because we detach each buffer as
@@ -161,8 +174,9 @@
 
         // From IBinder::DeathRecipient
         virtual void binderDied(const wp<IBinder> &who);
+#endif
 
-    private:
+      private:
         sp<MediaSync> mSync;
     };
 
@@ -193,7 +207,12 @@
     mutable Mutex mMutex;
     Condition mReleaseCondition;
     size_t mNumOutstandingBuffers;
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
+    sp<BufferItemConsumer> mInput;
+    sp<InputListener> mListener;  // listener for mInput, so the reference isn't dropped.
+#else
     sp<IGraphicBufferConsumer> mInput;
+#endif
     sp<IGraphicBufferProducer> mOutput;
     int mUsageFlagsFromOutput;
     uint32_t mMaxAcquiredBufferCount; // max acquired buffer count
diff --git a/media/libstagefright/include/media/stagefright/ResourceInfo.h b/media/libstagefright/include/media/stagefright/ResourceInfo.h
new file mode 100644
index 0000000..545d94e
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/ResourceInfo.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESOURCE_INFO_H_
+#define RESOURCE_INFO_H_
+
+#include <string>
+
+namespace android {
+/**
+ * Abstraction for the Global Codec resources.
+ * This encapsulates all the available codec resources on the device.
+ */
+struct GlobalResourceInfo {
+    /**
+     * Name of the Resource type.
+     */
+    std::string mName;
+    /**
+     * Total count/capacity of resources of this type.
+     */
+    uint64_t mCapacity;
+    /**
+     * Available count of this resource type.
+     */
+    uint64_t mAvailable;
+
+    GlobalResourceInfo(const std::string& name, uint64_t capacity, uint64_t available) :
+            mName(name),
+            mCapacity(capacity),
+            mAvailable(available) {}
+
+    GlobalResourceInfo(const GlobalResourceInfo& info) :
+            mName(info.mName),
+            mCapacity(info.mCapacity),
+            mAvailable(info.mAvailable) {}
+};
+
+/**
+ * Abstraction for the resources associated with a codec instance.
+ * This encapsulates the required codec resources for a configured codec instance.
+ */
+struct InstanceResourceInfo {
+    /**
+     * Name of the Resource type.
+     */
+    std::string mName;
+    /**
+     * Required resource count of this type.
+     */
+    uint64_t mStaticCount;
+    /**
+     * Per frame resource requirement of this resource type.
+     */
+    uint64_t mPerFrameCount;
+
+    InstanceResourceInfo(const std::string& name, uint64_t staticCount, uint64_t perFrameCount) :
+            mName(name),
+            mStaticCount(staticCount),
+            mPerFrameCount(perFrameCount) {}
+
+    InstanceResourceInfo(const InstanceResourceInfo& info) :
+            mName(info.mName),
+            mStaticCount(info.mStaticCount),
+            mPerFrameCount(info.mPerFrameCount) {}
+};
+
+}  // namespace android
+
+#endif // RESOURCE_INFO_H_
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 630817c..6ba7896 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -179,39 +179,6 @@
     },
 }
 
-cc_defaults {
-    name: "libstagefright_softomx-defaults",
-    // TODO (b/316432618) Software OMX codecs are no longer used, disable building them till
-    // this code is removed completely.
-    enabled: false,
-    vendor_available: true,
-
-    cflags: [
-        "-Werror",
-    ],
-
-    header_libs: [
-        "media_plugin_headers"
-    ],
-
-    shared_libs: [
-        "libstagefright_softomx",
-        "libstagefright_foundation",
-        "libutils",
-        "liblog",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    compile_multilib: "32",
-}
-
 cc_library_shared {
     name: "libstagefright_omx_utils",
     vendor_available: true,
@@ -241,5 +208,8 @@
         ],
         cfi: true,
     },
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
 }
diff --git a/media/libstagefright/tests/extractorFactory/AndroidTest.xml b/media/libstagefright/tests/extractorFactory/AndroidTest.xml
index f1d4201..f1ea5c2 100644
--- a/media/libstagefright/tests/extractorFactory/AndroidTest.xml
+++ b/media/libstagefright/tests/extractorFactory/AndroidTest.xml
@@ -26,13 +26,13 @@
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="extractor-1.5" />
+        <option name="media-folder-name" value="/data/local/tmp/extractorFactory-1.5" />
         <option name="dynamic-config-module" value="ExtractorFactoryTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="ExtractorFactoryTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/extractor-1.5/" />
+        <option name="native-test-flag" value="-P /data/local/tmp/extractorFactory-1.5/" />
     </test>
 </configuration>
diff --git a/media/libstagefright/tests/extractorFactory/DynamicConfig.xml b/media/libstagefright/tests/extractorFactory/DynamicConfig.xml
index 7bce77f..11b8289 100644
--- a/media/libstagefright/tests/extractorFactory/DynamicConfig.xml
+++ b/media/libstagefright/tests/extractorFactory/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/libstagefright/tests/extractorFactory/extractor-1.5.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/libstagefright/tests/extractorFactory/extractorFactory-1.5.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/libstagefright/tests/extractorFactory/README.md b/media/libstagefright/tests/extractorFactory/README.md
index aae247a..1350ff9 100644
--- a/media/libstagefright/tests/extractorFactory/README.md
+++ b/media/libstagefright/tests/extractorFactory/README.md
@@ -19,16 +19,16 @@
 
 adb push ${OUT}/data/nativetest/ExtractorFactoryTest/ExtractorFactoryTest /data/local/tmp/
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/libstagefright/tests/extractorFactory/extractor-1.5.zip).
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/libstagefright/tests/extractorFactory/extractorFactory-1.5.zip).
 Download, unzip and push these files into device for testing.
 
 ```
-adb push extractor-1.5 /data/local/tmp/
+adb push extractorFactory-1.5 /data/local/tmp/
 ```
 
 usage: ExtractorFactoryTest -P \<path_to_res_folder\>
 ```
-adb shell /data/local/tmp/ExtractorFactoryTest -P /data/local/tmp/extractor-1.5/
+adb shell /data/local/tmp/ExtractorFactoryTest -P /data/local/tmp/extractorFactory-1.5/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 151ce7c..e49d2ef 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -284,7 +284,7 @@
 
     // Max file size limit is set
     if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
-        size = mMaxFileSizeLimitBytes * 6 / 1000;
+        size = mMaxFileSizeLimitBytes / 1000 * 6;
     }
 
     // Max file duration limit is set
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index bd11326..f88bfd3 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -515,11 +515,6 @@
 
     std::call_once(sCheckOnce, [&](){
         mysdk = android_get_device_api_level();
-
-        // work around main development branch being on same SDK as the last dessert release.
-        if (__ANDROID_API__ == __ANDROID_API_FUTURE__) {
-            mysdk++;
-        }
     });
 #endif  // __ANDROID_API_U__
 }
diff --git a/media/module/bufferpool/2.0/include/bufferpool/ClientManager.h b/media/module/bufferpool/2.0/include/bufferpool/ClientManager.h
index 24b61f4..7efb74d 100644
--- a/media/module/bufferpool/2.0/include/bufferpool/ClientManager.h
+++ b/media/module/bufferpool/2.0/include/bufferpool/ClientManager.h
@@ -21,6 +21,7 @@
 #include <hidl/MQDescriptor.h>
 #include <hidl/Status.h>
 #include <memory>
+#include <mutex>
 #include "BufferPoolTypes.h"
 
 namespace android {
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index effd24a..f0aff32 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -52,6 +52,7 @@
 
     shared_libs: [
         "server_configurable_flags",
+        "libbase",
     ],
 
     host_supported: true,
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index f062491..0695ceb 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -22,12 +22,17 @@
 #include <algorithm>
 #include <map>
 #include <memory>
+#include <numeric>
 #include <stdint.h>
 #include <stdlib.h>
 #include <string.h>
 
 #include <utils/Log.h>
 
+#include <android-base/properties.h>
+#ifdef __ANDROID__
+#include <android/api-level.h>
+#endif  //__ANDROID__
 #include "AC4Parser.h"
 #include "MPEG4Extractor.h"
 #include "SampleTable.h"
@@ -84,6 +89,22 @@
     kMaxAtomSize = 64 * 1024 * 1024,
 };
 
+static bool isAtLeastRelease([[maybe_unused]] int version,
+                             [[maybe_unused]] const std::string codeName) {
+#ifdef __ANDROID__
+    static std::once_flag sCheckOnce;
+    static std::string sDeviceCodeName;
+    static int sDeviceApiLevel = 0;
+    std::call_once(sCheckOnce, [&]() {
+        sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+        sDeviceApiLevel = android_get_device_api_level();
+    });
+    return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
+#else   //__ANDROID__
+    return true;
+#endif  //__ANDROID__
+}
+
 class MPEG4Source : public MediaTrackHelper {
 static const size_t  kMaxPcmFrameSize = 8192;
 public:
@@ -191,6 +212,8 @@
     size_t getNALLengthSizeFromAvcCsd(const uint8_t *data, const size_t size) const;
     size_t getNALLengthSizeFromHevcCsd(const uint8_t *data, const size_t size) const;
 
+    int64_t rescaleTime(int64_t value, int64_t scale, int64_t originScale) const;
+
     struct TrackFragmentHeaderInfo {
         enum Flags {
             kBaseDataOffsetPresent         = 0x01,
@@ -369,7 +392,9 @@
             return MEDIA_MIMETYPE_VIDEO_HEVC;
 
         case FOURCC("apv1"):
-            if (!com::android::media::extractor::flags::extractor_mp4_enable_apv()) {
+            // Enable APV codec support from Android Baklava
+            if (!(isAtLeastRelease(36, "Baklava") &&
+                  com::android::media::extractor::flags::extractor_mp4_enable_apv())) {
                 ALOGV("APV support not enabled");
                 return "application/octet-stream";
             }
@@ -2633,11 +2658,10 @@
             break;
         }
 
-        case FOURCC("apvC"):
-        case FOURCC("av1C"):
-        {
-            if (!com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
-                chunk_type == FOURCC("apvC")) {
+        case FOURCC("apvC"): {
+            // Enable APV codec support from Android Baklava
+            if (!(isAtLeastRelease(36, "Baklava") &&
+                  com::android::media::extractor::flags::extractor_mp4_enable_apv())) {
                 ALOGV("APV support not enabled");
                 *offset += chunk_size;
                 break;
@@ -2650,6 +2674,39 @@
                 return NO_MEMORY;
             }
 
+            if (mDataSource->readAt(data_offset, buffer.get(), chunk_data_size) < chunk_data_size) {
+                return ERROR_IO;
+            }
+
+            if (mLastTrack == NULL)
+                return ERROR_MALFORMED;
+
+            int bytes_to_skip = 4;
+            if (chunk_data_size < bytes_to_skip) {
+                return ERROR_MALFORMED;
+            }
+            // apvC extends FullBox so first 4 bytes of version and flag should be zero.
+            for (int i = 0; i < bytes_to_skip; i++) {
+                if (buffer[i] != 0) {
+                    return ERROR_MALFORMED;
+                }
+            }
+
+            // Advance the buffer pointer by 4 bytes as it contains 4 bytes of flag and version.
+            AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+                                   buffer.get() + bytes_to_skip, chunk_data_size - bytes_to_skip);
+
+            *offset += chunk_size;
+            break;
+        }
+        case FOURCC("av1C"): {
+            auto buffer = heapbuffer<uint8_t>(chunk_data_size);
+
+            if (buffer.get() == NULL) {
+                ALOGE("b/28471206");
+                return NO_MEMORY;
+            }
+
             if (mDataSource->readAt(
                         data_offset, buffer.get(), chunk_data_size) < chunk_data_size) {
                 return ERROR_IO;
@@ -5206,8 +5263,12 @@
     mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
-    mIsAPV = com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
-             !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
+    // Enable APV codec support from Android Baklava
+    mIsAPV = false;
+    if (isAtLeastRelease(36, "Baklava")) {
+        mIsAPV = com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+                 !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
+    }
     mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
     mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
@@ -6200,6 +6261,63 @@
     return 1 + (data[14 + 7] & 3);
 }
 
+int64_t MPEG4Source::rescaleTime(int64_t value, int64_t scale, int64_t originScale) const {
+    // Rescale time: calculate value * scale / originScale
+    if (value == 0 || scale == 0) {
+        return 0;
+    }
+
+    CHECK(value > 0);
+    CHECK(scale > 0);
+    CHECK(originScale > 0);
+
+    if (originScale >= scale && (originScale % scale) == 0) {
+        int64_t factor = originScale / scale;
+        return value / factor;
+    } else if (originScale < scale && (scale % originScale) == 0) {
+        int64_t factor = scale / originScale;
+        if (__builtin_mul_overflow(value, factor, &value)) {
+            return std::numeric_limits<int64_t>::max();
+        }
+        return value;
+    } else if (originScale >= value && (originScale % value) == 0) {
+        int64_t factor = originScale / value;
+        return scale / factor;
+    } else if (originScale < value && (value % originScale) == 0) {
+        int64_t factor = value / originScale;
+        if (__builtin_mul_overflow(scale, factor, &value)) {
+            return std::numeric_limits<int64_t>::max();
+        }
+        return value;
+    } else {
+        int64_t rescaleValue;
+        if (!__builtin_mul_overflow(value, scale, &rescaleValue)) {
+            return rescaleValue / originScale;
+        } else {
+            // Divide the max gcd before calc scale/originScale
+            int64_t gcdOfScaleAndOriginScale = std::gcd(scale, originScale);
+            int64_t simpleScale = scale / gcdOfScaleAndOriginScale;
+            int64_t simpleOriginScale = originScale / gcdOfScaleAndOriginScale;
+            // Divide the max gcd before calc value/simpleOriginScale
+            int64_t gcdOfValueAndSimpleOriginScale = std::gcd(value, simpleOriginScale);
+            int64_t simpleValue = value / gcdOfValueAndSimpleOriginScale;
+            simpleOriginScale /= gcdOfValueAndSimpleOriginScale;
+
+            if (!__builtin_mul_overflow(simpleValue, simpleScale, &simpleValue)) {
+                return simpleValue / simpleOriginScale;
+            } else {
+                // Fallback using long double to calculate the rescale value
+                long double rescale = (long double)value / originScale * scale;
+                if (rescale > std::numeric_limits<int64_t>::max()) {
+                    return std::numeric_limits<int64_t>::max();
+                }
+
+                return rescale;
+            }
+        }
+    }
+}
+
 media_status_t MPEG4Source::read(
         MediaBufferHelper **out, const ReadOptions *options) {
     Mutex::Autolock autoLock(mLock);
@@ -6264,16 +6382,26 @@
             if( mode != ReadOptions::SEEK_FRAME_INDEX) {
                 int64_t elstInitialEmptyEditUs = 0, elstShiftStartUs = 0;
                 if (mElstInitialEmptyEditTicks > 0) {
-                    elstInitialEmptyEditUs = ((long double)mElstInitialEmptyEditTicks * 1000000) /
-                                             mTimescale;
+                    elstInitialEmptyEditUs = rescaleTime(mElstInitialEmptyEditTicks, 1000000,
+                            mTimescale);
+
                     /* Sample's composition time from ctts/stts entries are non-negative(>=0).
                      * Hence, lower bound on seekTimeUs is 0.
                      */
-                    seekTimeUs = std::max(seekTimeUs - elstInitialEmptyEditUs, (int64_t)0);
+                    if (__builtin_sub_overflow(seekTimeUs, elstInitialEmptyEditUs,
+                            &seekTimeUs) || seekTimeUs < 0) {
+                        ALOGW("seekTimeUs:%" PRId64 " would be a bogus value, set to 0",
+                                seekTimeUs);
+                        seekTimeUs = 0;
+                    }
                 }
                 if (mElstShiftStartTicks > 0) {
-                    elstShiftStartUs = ((long double)mElstShiftStartTicks * 1000000) / mTimescale;
-                    seekTimeUs += elstShiftStartUs;
+                    elstShiftStartUs = rescaleTime(mElstShiftStartTicks, 1000000, mTimescale);
+
+                    if (__builtin_add_overflow(seekTimeUs, elstShiftStartUs, &seekTimeUs)) {
+                        ALOGW("seek + elst shift start would be overflow, round to max");
+                        seekTimeUs = std::numeric_limits<int64_t>::max();
+                    }
                 }
                 ALOGV("shifted seekTimeUs:%" PRId64 ", elstInitialEmptyEditUs:%" PRIu64
                       ", elstShiftStartUs:%" PRIu64, seekTimeUs, elstInitialEmptyEditUs,
@@ -6711,16 +6839,26 @@
         ALOGV("seekTimeUs:%" PRId64, seekTimeUs);
         int64_t elstInitialEmptyEditUs = 0, elstShiftStartUs = 0;
         if (mElstInitialEmptyEditTicks > 0) {
-            elstInitialEmptyEditUs = ((long double)mElstInitialEmptyEditTicks * 1000000) /
-                                     mTimescale;
+            elstInitialEmptyEditUs = rescaleTime(mElstInitialEmptyEditTicks, 1000000,
+                    mTimescale);
+
             /* Sample's composition time from ctts/stts entries are non-negative(>=0).
              * Hence, lower bound on seekTimeUs is 0.
              */
-            seekTimeUs = std::max(seekTimeUs - elstInitialEmptyEditUs, (int64_t)0);
+            if (__builtin_sub_overflow(seekTimeUs, elstInitialEmptyEditUs,
+                    &seekTimeUs) || seekTimeUs < 0) {
+                ALOGW("seekTimeUs:%" PRId64 " would be a bogus value, set to 0",
+                        seekTimeUs);
+                seekTimeUs = 0;
+            }
         }
-        if (mElstShiftStartTicks > 0){
-            elstShiftStartUs = ((long double)mElstShiftStartTicks * 1000000) / mTimescale;
-            seekTimeUs += elstShiftStartUs;
+        if (mElstShiftStartTicks > 0) {
+            elstShiftStartUs = rescaleTime(mElstShiftStartTicks, 1000000, mTimescale);
+
+            if (__builtin_add_overflow(seekTimeUs, elstShiftStartUs, &seekTimeUs)) {
+                ALOGW("seek + elst shift start would be overflow, round to max");
+                seekTimeUs = std::numeric_limits<int64_t>::max();
+            }
         }
         ALOGV("shifted seekTimeUs:%" PRId64 ", elstInitialEmptyEditUs:%" PRIu64
               ", elstShiftStartUs:%" PRIu64, seekTimeUs, elstInitialEmptyEditUs,
diff --git a/media/module/extractors/mpeg2/Android.bp b/media/module/extractors/mpeg2/Android.bp
index c68ea51..74a90a8 100644
--- a/media/module/extractors/mpeg2/Android.bp
+++ b/media/module/extractors/mpeg2/Android.bp
@@ -64,9 +64,7 @@
         "libcutils",
         "libhidlbase",
         "libhidlmemory",
-        "libjsoncpp",
         "libmedia_helper",
-        "libprocessgroup",
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
         "libstagefright_mpeg2extractor",
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index 5f0f4fa..0cb332d 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -50,6 +50,7 @@
 
         "libstagefright_id3",
         "libstagefright_flacdec",
+        "libstagefright_foundation",
         "libstagefright_esds",
         "libstagefright_mpeg2support",
         "libstagefright_foundation_colorutils_ndk",
@@ -75,7 +76,6 @@
         "libmediandk",
         "libmedia",
         "libstagefright",
-        "libstagefright_foundation",
         "libcrypto",
         "libhidlmemory",
         "libhidlbase",
@@ -104,4 +104,6 @@
             "signed-integer-overflow",
         ],
     },
+
+    min_sdk_version: "29",
 }
diff --git a/media/module/foundation/include/media/stagefright/foundation/AString.h b/media/module/foundation/include/media/stagefright/foundation/AString.h
index 517774b..7ab6b7c 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AString.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AString.h
@@ -67,6 +67,9 @@
     void insert(const AString &from, size_t insertionPos);
     void insert(const char *from, size_t size, size_t insertionPos);
 
+    // Returns the index of the first occurrence of substring in the string, or -1 if not found.
+    // If start is specified, the search is limited to the substring starting at that position.
+    // The start parameter MUST NOT be greater than the string size.
     ssize_t find(const char *substring, size_t start = 0) const;
 
     size_t hash() const;
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
index dbda81b..27c1d22 100644
--- a/media/module/libapexcodecs/Android.bp
+++ b/media/module/libapexcodecs/Android.bp
@@ -19,7 +19,12 @@
 }
 
 cc_defaults {
-    name: "libapexcodecs-defaults",
+    name: "libcom.android.media.swcodec.apexcodecs-defaults",
+
+    defaults: [
+        "libcodec2-internal-defaults",
+    ],
+
     header_libs: [
         "libbase_headers",
     ],
@@ -31,8 +36,14 @@
         "libnativewindow",
     ],
 
+    static_libs: [
+        "android.media.swcodec.flags-aconfig-cc",
+    ],
+
     export_include_dirs: ["include"],
 
+    local_include_dirs: ["private"],
+
     export_shared_lib_headers: [
         "libbase",
         "libnativewindow",
@@ -41,7 +52,7 @@
 }
 
 cc_library_headers {
-    name: "libapexcodecs-header",
+    name: "libcom.android.media.swcodec.apexcodecs-header",
     visibility: [
         "//frameworks/av/apex:__subpackages__",
         "//frameworks/av/media/codec2/hal/client",
@@ -50,8 +61,10 @@
 }
 
 cc_library {
-    name: "libapexcodecs-testing",
-    defaults: ["libapexcodecs-defaults"],
+    name: "libcom.android.media.swcodec.apexcodecs-testing",
+    defaults: ["libcom.android.media.swcodec.apexcodecs-defaults"],
+
+    srcs: ["tests/ApexCodecsStoreTestImpl.cpp"],
 
     visibility: [
         ":__subpackages__",
@@ -59,18 +72,20 @@
 }
 
 cc_library {
-    name: "libapexcodecs",
-    defaults: ["libapexcodecs-defaults"],
+    name: "libcom.android.media.swcodec.apexcodecs",
+    defaults: ["libcom.android.media.swcodec.apexcodecs-defaults"],
 
     visibility: [
         "//frameworks/av/apex:__subpackages__",
         "//frameworks/av/media/codec2/hal/client",
     ],
 
+    srcs: ["ApexCodecsStoreImpl.cpp"],
+
     min_sdk_version: "apex_inherit",
-    version_script: "libapexcodecs.map.txt",
+    version_script: "libcom.android.media.swcodec.apexcodecs.map.txt",
     stubs: {
-        symbol_file: "libapexcodecs.map.txt",
+        symbol_file: "libcom.android.media.swcodec.apexcodecs.map.txt",
         versions: ["36"],
     },
 
diff --git a/media/module/libapexcodecs/ApexCodecs.cpp b/media/module/libapexcodecs/ApexCodecs.cpp
index 7101677..8dec439 100644
--- a/media/module/libapexcodecs/ApexCodecs.cpp
+++ b/media/module/libapexcodecs/ApexCodecs.cpp
@@ -14,17 +14,89 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "ApexCodecs"
+// #define LOG_NDEBUG 0
+#include <android-base/logging.h>
+
 #include <new>
+#include <map>
+#include <vector>
+
+#include <C2ParamInternal.h>
+#include <android_media_swcodec_flags.h>
 
 #include <android-base/no_destructor.h>
 #include <apex/ApexCodecs.h>
+#include <apex/ApexCodecsImpl.h>
+#include <apex/ApexCodecsParam.h>
 
 // TODO: remove when we have real implementations
 #pragma clang diagnostic push
 #pragma clang diagnostic ignored "-Wunused-parameter"
 
+using ::android::apexcodecs::ApexComponentIntf;
+using ::android::apexcodecs::ApexComponentStoreIntf;
+using ::android::base::ERROR;
+
+struct ApexCodec_Component {
+    explicit ApexCodec_Component(std::unique_ptr<ApexComponentIntf> &&comp)
+        : mComponent(std::move(comp)) {
+    }
+
+    ApexCodec_Status start() {
+        return mComponent->start();
+    }
+
+    ApexCodec_Status flush() {
+        return mComponent->flush();
+    }
+
+    ApexCodec_Status reset() {
+        return mComponent->reset();
+    }
+
+private:
+    std::unique_ptr<ApexComponentIntf> mComponent;
+};
+
 struct ApexCodec_ComponentStore {
-    ApexCodec_ComponentStore() = default;
+    ApexCodec_ComponentStore() : mStore((ApexComponentStoreIntf *)GetApexComponentStore()) {
+        if (mStore == nullptr) {
+            return;
+        }
+        mC2Traits = mStore->listComponents();
+        mTraits.reserve(mC2Traits.size());
+        for (const std::shared_ptr<const C2Component::Traits> &trait : mC2Traits) {
+            mTraits.push_back(ApexCodec_ComponentTraits{
+                trait->name.c_str(),                // name
+                trait->mediaType.c_str(),           // mediaType
+                (ApexCodec_Kind)trait->kind,        // kind
+                (ApexCodec_Domain)trait->domain,    // domain
+            });
+        }
+    }
+
+    ApexCodec_ComponentTraits *getTraits(size_t index) {
+        if (mStore == nullptr) {
+            return nullptr;
+        }
+        if (index < mTraits.size()) {
+            return mTraits.data() + index;
+        } else {
+            return nullptr;
+        }
+    }
+
+    std::unique_ptr<ApexComponentIntf> createComponent(const char *name) {
+        if (mStore == nullptr) {
+            return nullptr;
+        }
+        return mStore->createComponent(name);
+    }
+private:
+    ApexComponentStoreIntf *mStore;
+    std::vector<std::shared_ptr<const C2Component::Traits>> mC2Traits;
+    std::vector<ApexCodec_ComponentTraits> mTraits;
 };
 
 ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
@@ -34,27 +106,61 @@
 
 ApexCodec_ComponentTraits *ApexCodec_Traits_get(
         ApexCodec_ComponentStore *store, size_t index) {
-    return nullptr;
+    if (!android::media::swcodec::flags::apexcodecs_base()) {
+        return nullptr;
+    }
+    return store->getTraits(index);
 }
 
 ApexCodec_Status ApexCodec_Component_create(
         ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+    if (!android::media::swcodec::flags::apexcodecs_base()) {
+        return APEXCODEC_STATUS_NOT_FOUND;
+    }
+    if (store == nullptr) {
+        LOG(ERROR) << "ApexCodec_Component_create: store is nullptr";
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    if (name == nullptr) {
+        LOG(ERROR) << "ApexCodec_Component_create: name is nullptr";
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    if (comp == nullptr) {
+        LOG(ERROR) << "ApexCodec_Component_create: comp is nullptr";
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
     *comp = nullptr;
-    return APEXCODEC_STATUS_NOT_FOUND;
+    std::unique_ptr<ApexComponentIntf> compIntf = store->createComponent(name);
+    if (compIntf == nullptr) {
+        return APEXCODEC_STATUS_NOT_FOUND;
+    }
+    *comp = new ApexCodec_Component(std::move(compIntf));
+    return APEXCODEC_STATUS_OK;
 }
 
-void ApexCodec_Component_destroy(ApexCodec_Component *comp) {}
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {
+    delete comp;
+}
 
 ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (comp == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return comp->start();
 }
 
 ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (comp == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return comp->flush();
 }
 
 ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (comp == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return comp->reset();
 }
 
 ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
@@ -62,16 +168,461 @@
     return nullptr;
 }
 
+struct ApexCodec_Buffer {
+public:
+    ApexCodec_Buffer()
+          : mType(APEXCODEC_BUFFER_TYPE_EMPTY) {
+    }
+
+    ~ApexCodec_Buffer() {
+    }
+
+    void clear() {
+        mType = APEXCODEC_BUFFER_TYPE_EMPTY;
+        mBufferInfo.reset();
+        mLinearBuffer = {};
+        mGraphicBuffer = nullptr;
+        mConfigUpdates.reset();
+        mOwnedConfigUpdates.reset();
+    }
+
+    ApexCodec_BufferType getType() const {
+        return mType;
+    }
+
+    void setBufferInfo(ApexCodec_BufferFlags flags, uint64_t frameIndex, uint64_t timestampUs) {
+        mBufferInfo.emplace(BufferInfo{flags, frameIndex, timestampUs});
+    }
+
+    ApexCodec_Status setLinearBuffer(const ApexCodec_LinearBuffer *linearBuffer) {
+        if (mType != APEXCODEC_BUFFER_TYPE_EMPTY) {
+            return APEXCODEC_STATUS_BAD_STATE;
+        }
+        mType = APEXCODEC_BUFFER_TYPE_LINEAR;
+        if (linearBuffer == nullptr) {
+            mLinearBuffer.data = nullptr;
+            mLinearBuffer.size = 0;
+        } else {
+            mLinearBuffer = *linearBuffer;
+        }
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status setGraphicBuffer(AHardwareBuffer *graphicBuffer) {
+        if (mType != APEXCODEC_BUFFER_TYPE_EMPTY) {
+            return APEXCODEC_STATUS_BAD_STATE;
+        }
+        mType = APEXCODEC_BUFFER_TYPE_GRAPHIC;
+        mGraphicBuffer = graphicBuffer;
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status setConfigUpdates(const ApexCodec_LinearBuffer *configUpdates) {
+        if (configUpdates == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (mConfigUpdates.has_value()) {
+            return APEXCODEC_STATUS_BAD_STATE;
+        }
+        mOwnedConfigUpdates.reset();
+        mConfigUpdates.emplace(*configUpdates);
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status getBufferInfo(
+            ApexCodec_BufferFlags *outFlags,
+            uint64_t *outFrameIndex,
+            uint64_t *outTimestampUs) const {
+        if (!mBufferInfo.has_value()) {
+            return APEXCODEC_STATUS_BAD_STATE;
+        }
+        *outFlags = mBufferInfo->flags;
+        *outFrameIndex = mBufferInfo->frameIndex;
+        *outTimestampUs = mBufferInfo->timestampUs;
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status getLinearBuffer(ApexCodec_LinearBuffer *outLinearBuffer) const {
+        if (mType != APEXCODEC_BUFFER_TYPE_LINEAR) {
+            return APEXCODEC_STATUS_BAD_STATE;
+        }
+        *outLinearBuffer = mLinearBuffer;
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status getGraphicBuffer(AHardwareBuffer **outGraphicBuffer) const {
+        if (mType != APEXCODEC_BUFFER_TYPE_GRAPHIC) {
+            return APEXCODEC_STATUS_BAD_STATE;
+        }
+        *outGraphicBuffer = mGraphicBuffer;
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status getConfigUpdates(
+            ApexCodec_LinearBuffer *outConfigUpdates,
+            bool *outOwnedByClient) const {
+        if (!mConfigUpdates.has_value()) {
+            return APEXCODEC_STATUS_NOT_FOUND;
+        }
+        *outConfigUpdates = mConfigUpdates.value();
+        *outOwnedByClient = mOwnedConfigUpdates.has_value();
+        return APEXCODEC_STATUS_OK;
+    }
+
+    void setOwnedConfigUpdates(std::vector<uint8_t> &&configUpdates) {
+        mOwnedConfigUpdates = std::move(configUpdates);
+        mConfigUpdates.emplace(
+                ApexCodec_LinearBuffer{ configUpdates.data(), configUpdates.size() });
+    }
+
+private:
+    struct BufferInfo {
+        ApexCodec_BufferFlags flags;
+        uint64_t frameIndex;
+        uint64_t timestampUs;
+    };
+
+    ApexCodec_BufferType mType;
+    std::optional<BufferInfo> mBufferInfo;
+    ApexCodec_LinearBuffer mLinearBuffer;
+    AHardwareBuffer *mGraphicBuffer;
+    std::optional<ApexCodec_LinearBuffer> mConfigUpdates;
+    std::optional<std::vector<uint8_t>> mOwnedConfigUpdates;
+};
+
+ApexCodec_Buffer *ApexCodec_Buffer_create() {
+    return new ApexCodec_Buffer;
+}
+
+void ApexCodec_Buffer_destroy(ApexCodec_Buffer *buffer) {
+    delete buffer;
+}
+
+void ApexCodec_Buffer_clear(ApexCodec_Buffer *buffer) {
+    if (buffer == nullptr) {
+        return;
+    }
+    buffer->clear();
+}
+
+ApexCodec_BufferType ApexCodec_Buffer_getType(ApexCodec_Buffer *buffer) {
+    if (buffer == nullptr) {
+        return APEXCODEC_BUFFER_TYPE_EMPTY;
+    }
+    return buffer->getType();
+}
+
+void ApexCodec_Buffer_setBufferInfo(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_BufferFlags flags,
+        uint64_t frameIndex,
+        uint64_t timestampUs) {
+    if (buffer == nullptr) {
+        return;
+    }
+    buffer->setBufferInfo(flags, frameIndex, timestampUs);
+}
+
+ApexCodec_Status ApexCodec_Buffer_setLinearBuffer(
+        ApexCodec_Buffer *buffer,
+        const ApexCodec_LinearBuffer *linearBuffer) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->setLinearBuffer(linearBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_setGraphicBuffer(
+        ApexCodec_Buffer *buffer,
+        AHardwareBuffer *graphicBuffer) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->setGraphicBuffer(graphicBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_setConfigUpdates(
+        ApexCodec_Buffer *buffer,
+        const ApexCodec_LinearBuffer *configUpdates) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->setConfigUpdates(configUpdates);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getBufferInfo(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_BufferFlags *outFlags,
+        uint64_t *outFrameIndex,
+        uint64_t *outTimestampUs) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->getBufferInfo(outFlags, outFrameIndex, outTimestampUs);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getLinearBuffer(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_LinearBuffer *outLinearBuffer) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->getLinearBuffer(outLinearBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getGraphicBuffer(
+        ApexCodec_Buffer *buffer,
+        AHardwareBuffer **outGraphicBuffer) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->getGraphicBuffer(outGraphicBuffer);
+}
+
+ApexCodec_Status ApexCodec_Buffer_getConfigUpdates(
+        ApexCodec_Buffer *buffer,
+        ApexCodec_LinearBuffer *outConfigUpdates,
+        bool *outOwnedByClient) {
+    if (buffer == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return buffer->getConfigUpdates(outConfigUpdates, outOwnedByClient);
+}
+
+struct ApexCodec_SupportedValues {
+public:
+    ApexCodec_SupportedValues(
+            const C2FieldSupportedValues &supportedValues,
+            const C2Value::type_t &numberType) {
+        mType = (ApexCodec_SupportedValuesType)supportedValues.type;
+        mNumberType = (ApexCodec_SupportedValuesNumberType)numberType;
+        switch (supportedValues.type) {
+            case C2FieldSupportedValues::RANGE: {
+                mValues.insert(mValues.end(), 5, ApexCodec_Value{});
+                ToApexCodecValue(supportedValues.range.min,   numberType, &mValues[0]);
+                ToApexCodecValue(supportedValues.range.max,   numberType, &mValues[1]);
+                ToApexCodecValue(supportedValues.range.step,  numberType, &mValues[2]);
+                ToApexCodecValue(supportedValues.range.num,   numberType, &mValues[3]);
+                ToApexCodecValue(supportedValues.range.denom, numberType, &mValues[4]);
+                break;
+            }
+            case C2FieldSupportedValues::VALUES:
+            case C2FieldSupportedValues::FLAGS: {
+                for (size_t i = 0; i < supportedValues.values.size(); ++i) {
+                    mValues.emplace_back();
+                    ToApexCodecValue(supportedValues.values[i], numberType, &mValues[i]);
+                }
+                break;
+            }
+            default:
+                // Unrecognized type; initialize as empty.
+                mType = APEXCODEC_SUPPORTED_VALUES_EMPTY;
+                break;
+        }
+    }
+
+    ~ApexCodec_SupportedValues() {
+    }
+
+    ApexCodec_Status getTypeAndValues(
+            ApexCodec_SupportedValuesType *type,
+            ApexCodec_SupportedValuesNumberType *numberType,
+            ApexCodec_Value **values,
+            uint32_t *numValues) {
+        if (type == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (numberType == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (values == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (numValues == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        *type = mType;
+        *numberType = mNumberType;
+        switch (mType) {
+            case APEXCODEC_SUPPORTED_VALUES_EMPTY: {
+                *values = nullptr;
+                *numValues = 0;
+                break;
+            }
+            case APEXCODEC_SUPPORTED_VALUES_RANGE:
+            case APEXCODEC_SUPPORTED_VALUES_VALUES:
+            case APEXCODEC_SUPPORTED_VALUES_FLAGS: {
+                if (mValues.empty()) {
+                    return APEXCODEC_STATUS_BAD_STATE;
+                }
+                *values = mValues.data();
+                *numValues = mValues.size();
+                break;
+            }
+            default:
+                return APEXCODEC_STATUS_BAD_STATE;
+        }
+        return APEXCODEC_STATUS_OK;
+    }
+
+    static bool ToApexCodecValue(
+            const C2Value::Primitive &value,
+            const C2Value::type_t &type,
+            ApexCodec_Value *outValue) {
+        switch (type) {
+            case C2Value::NO_INIT:
+                return false;
+            case C2Value::INT32:
+                outValue->i32 = value.i32;
+                return true;
+            case C2Value::UINT32:
+                outValue->u32 = value.u32;
+                return true;
+            case C2Value::INT64:
+                outValue->i64 = value.i64;
+                return true;
+            case C2Value::UINT64:
+                outValue->u64 = value.u64;
+                return true;
+            case C2Value::FLOAT:
+                outValue->f = value.fp;
+                return true;
+            default:
+                return false;
+        }
+    }
+
+    static C2Value::type_t GetFieldType(
+            const std::shared_ptr<C2ParamReflector> &reflector,
+            const C2ParamField& field) {
+        std::unique_ptr<C2StructDescriptor> desc = reflector->describe(
+                _C2ParamInspector::GetIndex(field));
+
+        for (const C2FieldDescriptor &fieldDesc : *desc) {
+            if (_C2ParamInspector::GetOffset(fieldDesc) == _C2ParamInspector::GetOffset(field)) {
+                if (_C2ParamInspector::GetSize(fieldDesc) != _C2ParamInspector::GetSize(field)) {
+                    // Size doesn't match.
+                    return C2Value::NO_INIT;
+                }
+                switch (fieldDesc.type()) {
+                    case C2FieldDescriptor::INT32:
+                    case C2FieldDescriptor::UINT32:
+                    case C2FieldDescriptor::INT64:
+                    case C2FieldDescriptor::UINT64:
+                    case C2FieldDescriptor::FLOAT:
+                        return (C2Value::type_t)fieldDesc.type();
+                    default:
+                        // Unrecognized type.
+                        return C2Value::NO_INIT;
+                }
+            }
+        }
+        return C2Value::NO_INIT;
+    }
+
+private:
+    ApexCodec_SupportedValuesType mType;
+    ApexCodec_SupportedValuesNumberType mNumberType;
+    std::vector<ApexCodec_Value> mValues;
+};
+
 ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
         ApexCodec_SupportedValues *supportedValues,
         ApexCodec_SupportedValuesType *type,
         ApexCodec_SupportedValuesNumberType *numberType,
         ApexCodec_Value **values,
         uint32_t *numValues) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (supportedValues == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return supportedValues->getTypeAndValues(type, numberType, values, numValues);
 }
 
-void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {}
+void ApexCodec_SupportedValues_destroy(ApexCodec_SupportedValues *values) {
+    delete values;
+}
+
+struct ApexCodec_SettingResults {
+public:
+    explicit ApexCodec_SettingResults(
+            const std::shared_ptr<C2ParamReflector> &reflector,
+            const std::vector<C2SettingResult> &results) : mReflector(reflector) {
+        for (const C2SettingResult &c2Result : results) {
+            mResults.emplace_back();
+            Entry &entry = mResults.back();
+            entry.failure = (ApexCodec_SettingResultFailure)c2Result.failure;
+            entry.field.index = _C2ParamInspector::GetIndex(c2Result.field.paramOrField);
+            entry.field.offset = _C2ParamInspector::GetOffset(c2Result.field.paramOrField);
+            entry.field.size = _C2ParamInspector::GetSize(c2Result.field.paramOrField);
+            if (c2Result.field.values) {
+                entry.fieldValues = std::make_unique<ApexCodec_SupportedValues>(
+                        *c2Result.field.values,
+                        ApexCodec_SupportedValues::GetFieldType(mReflector,
+                                                                c2Result.field.paramOrField));
+                entry.field.values = entry.fieldValues.get();
+            } else {
+                entry.field.values = nullptr;
+            }
+            for (const C2ParamFieldValues &c2Conflict : c2Result.conflicts) {
+                entry.conflicts.emplace_back();
+                ApexCodec_ParamFieldValues &conflict = entry.conflicts.back();
+                conflict.index = _C2ParamInspector::GetIndex(c2Conflict.paramOrField);
+                conflict.offset = _C2ParamInspector::GetOffset(c2Conflict.paramOrField);
+                conflict.size = _C2ParamInspector::GetSize(c2Conflict.paramOrField);
+                if (c2Conflict.values) {
+                    entry.conflictValues.emplace_back(std::make_unique<ApexCodec_SupportedValues>(
+                            *c2Conflict.values,
+                            ApexCodec_SupportedValues::GetFieldType(mReflector,
+                                                                    c2Conflict.paramOrField)));
+                    conflict.values = entry.conflictValues.back().get();
+                } else {
+                    conflict.values = nullptr;
+                }
+            }
+        }
+    }
+
+    ~ApexCodec_SettingResults() {
+    }
+
+    ApexCodec_Status getResultAtIndex(
+            size_t index,
+            ApexCodec_SettingResultFailure *failure,
+            ApexCodec_ParamFieldValues *field,
+            ApexCodec_ParamFieldValues **conflicts,
+            size_t *numConflicts) {
+        if (failure == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (field == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (conflicts == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (numConflicts == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (index >= mResults.size()) {
+            return APEXCODEC_STATUS_NOT_FOUND;
+        }
+        *failure = mResults[index].failure;
+        *field = mResults[index].field;
+        *conflicts = mResults[index].conflicts.data();
+        *numConflicts = mResults[index].conflicts.size();
+        return APEXCODEC_STATUS_OK;
+    }
+private:
+    std::shared_ptr<C2ParamReflector> mReflector;
+    struct Entry {
+        ApexCodec_SettingResultFailure failure;
+        ApexCodec_ParamFieldValues field;
+        std::vector<ApexCodec_ParamFieldValues> conflicts;
+        std::unique_ptr<ApexCodec_SupportedValues> fieldValues;
+        std::vector<std::unique_ptr<ApexCodec_SupportedValues>> conflictValues;
+    };
+    std::vector<Entry> mResults;
+};
 
 ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
         ApexCodec_SettingResults *results,
@@ -80,10 +631,15 @@
         ApexCodec_ParamFieldValues *field,
         ApexCodec_ParamFieldValues **conflicts,
         size_t *numConflicts) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (results == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return results->getResultAtIndex(index, failure, field, conflicts, numConflicts);
 }
 
-void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {}
+void ApexCodec_SettingResults_destroy(ApexCodec_SettingResults *results) {
+    delete results;
+}
 
 ApexCodec_Status ApexCodec_Component_process(
         ApexCodec_Component *comp,
@@ -106,15 +662,94 @@
         uint32_t indices[],
         size_t numIndices,
         ApexCodec_LinearBuffer *config,
-        size_t *written) {
+        size_t *writtenOrRequired) {
     return APEXCODEC_STATUS_OMITTED;
 }
 
+struct ApexCodec_ParamDescriptors {
+public:
+    explicit ApexCodec_ParamDescriptors(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>> &paramDescriptors) {
+        for (const std::shared_ptr<C2ParamDescriptor> &c2Descriptor : paramDescriptors) {
+            if (!c2Descriptor) {
+                continue;
+            }
+            uint32_t index = c2Descriptor->index();
+            Entry &entry = mDescriptors[index];
+            entry.index = index;
+            entry.attr = (ApexCodec_ParamAttribute)_C2ParamInspector::GetAttrib(*c2Descriptor);
+            entry.name = c2Descriptor->name();
+            for (const C2Param::Index &dependency : c2Descriptor->dependencies()) {
+                entry.dependencies.emplace_back((uint32_t)dependency);
+            }
+            mIndices.push_back(entry.index);
+        }
+    }
+
+    ~ApexCodec_ParamDescriptors() {
+    }
+
+    ApexCodec_Status getIndices(uint32_t **indices, size_t *numIndices) {
+        if (indices == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (numIndices == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        *indices = mIndices.data();
+        *numIndices = mIndices.size();
+        return APEXCODEC_STATUS_OK;
+    }
+
+    ApexCodec_Status getDescriptor(
+            uint32_t index,
+            ApexCodec_ParamAttribute *attr,
+            const char **name,
+            uint32_t **dependencies,
+            size_t *numDependencies) {
+        if (attr == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (name == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (dependencies == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        if (numDependencies == nullptr) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        auto it = mDescriptors.find(index);
+        if (it == mDescriptors.end()) {
+            return APEXCODEC_STATUS_BAD_VALUE;
+        }
+        const Entry &entry = it->second;
+        *attr = entry.attr;
+        *name = entry.name.c_str();
+        *dependencies = const_cast<uint32_t *>(entry.dependencies.data());
+        *numDependencies = entry.dependencies.size();
+        return APEXCODEC_STATUS_OK;
+    }
+
+private:
+    struct Entry {
+        uint32_t index;
+        ApexCodec_ParamAttribute attr;
+        C2String name;
+        std::vector<uint32_t> dependencies;
+    };
+    std::map<uint32_t, Entry> mDescriptors;
+    std::vector<uint32_t> mIndices;
+};
+
 ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
         ApexCodec_ParamDescriptors *descriptors,
         uint32_t **indices,
         size_t *numIndices) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (descriptors == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return descriptors->getIndices(indices, numIndices);
 }
 
 ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
@@ -124,12 +759,14 @@
         const char **name,
         uint32_t **dependencies,
         size_t *numDependencies) {
-    return APEXCODEC_STATUS_OMITTED;
+    if (descriptors == nullptr) {
+        return APEXCODEC_STATUS_BAD_VALUE;
+    }
+    return descriptors->getDescriptor(index, attr, name, dependencies, numDependencies);
 }
 
-ApexCodec_Status ApexCodec_ParamDescriptors_release(
-        ApexCodec_ParamDescriptors *descriptors) {
-    return APEXCODEC_STATUS_OMITTED;
+void ApexCodec_ParamDescriptors_destroy(ApexCodec_ParamDescriptors *descriptors) {
+    delete descriptors;
 }
 
 ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
diff --git a/media/module/libapexcodecs/ApexCodecsImpl.cpp b/media/module/libapexcodecs/ApexCodecsImpl.cpp
new file mode 100644
index 0000000..a737c57
--- /dev/null
+++ b/media/module/libapexcodecs/ApexCodecsImpl.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecsImpl.h>
+
+namespace android::apexcodecs {
+
+class ApexComponentImpl : public ApexComponentIntf {
+public:
+    ApexComponentImpl(const std::shared_ptr<C2Component> &comp) : mComponent(comp) {}
+    virtual ApexCodec_Status start() = 0;
+    virtual ApexCodec_Status flush() = 0;
+    virtual ApexCodec_Status reset() = 0;
+    virtual ApexCodec_Configurable *getConfigurable() = 0;
+    virtual ApexCodec_Status process(
+            const ApexCodec_Buffer *input,
+            ApexCodec_Buffer *output,
+            size_t *consumed,
+            size_t *produced) = 0;
+private:
+    std::shared_ptr<C2Component> mComponent;
+};
+
+}  // namespace android::apexcodecs
\ No newline at end of file
diff --git a/media/module/libapexcodecs/ApexCodecsStoreImpl.cpp b/media/module/libapexcodecs/ApexCodecsStoreImpl.cpp
new file mode 100644
index 0000000..3beb510
--- /dev/null
+++ b/media/module/libapexcodecs/ApexCodecsStoreImpl.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecsImpl.h>
+
+namespace android::apexcodecs {
+
+class ApexComponentStoreImpl : public ApexComponentStoreIntf {
+public:
+    ApexComponentStoreImpl() = default;
+
+    std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() const override {
+        return {};
+    }
+    virtual std::unique_ptr<ApexComponentIntf> createComponent(const char *name [[maybe_unused]]) {
+        return nullptr;
+    }
+};
+
+}  // namespace android::apexcodecs
+
+extern "C" void *GetApexComponentStore() {
+    static ::android::base::NoDestructor<::android::apexcodecs::ApexComponentStoreImpl> sStore;
+    return sStore.get();
+}
\ No newline at end of file
diff --git a/media/module/libapexcodecs/TEST_MAPPING b/media/module/libapexcodecs/TEST_MAPPING
new file mode 100644
index 0000000..6ff6a24
--- /dev/null
+++ b/media/module/libapexcodecs/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "postsubmit": [
+    {
+      "name": "libcom.android.media.swcodec.apexcodecs-tests"
+    }
+  ]
+}
diff --git a/media/module/libapexcodecs/include/apex/ApexCodecs.h b/media/module/libapexcodecs/include/apex/ApexCodecs.h
index b9f2e83..8dfee97 100644
--- a/media/module/libapexcodecs/include/apex/ApexCodecs.h
+++ b/media/module/libapexcodecs/include/apex/ApexCodecs.h
@@ -23,6 +23,7 @@
 #include <android/api-level.h>
 #include <android/hardware_buffer.h>
 #include <android/versioning.h>
+#include <apex/ApexCodecsParam.h>
 
 __BEGIN_DECLS
 
@@ -129,13 +130,13 @@
  */
 typedef struct ApexCodec_ComponentTraits {
     /**
-     * The name of the component.
+     * The name of the component in ASCII encoding.
      */
-    const char *name;
+    const char *_Nonnull name;
     /**
-     * The supported media type of the component.
+     * The supported media type of the component in ASCII encoding.
      */
-    const char *mediaType;
+    const char *_Nonnull mediaType;
     /**
      * The kind of the component.
      */
@@ -158,7 +159,7 @@
  *
  * \return component store object.
  */
-ApexCodec_ComponentStore *ApexCodec_GetComponentStore()
+ApexCodec_ComponentStore *_Nullable ApexCodec_GetComponentStore()
         __INTRODUCED_IN(36);
 
 /**
@@ -174,8 +175,8 @@
  * \param index index of the traits object to query
  * \return traits object at the index, or nullptr if the index is out of bounds.
  */
-ApexCodec_ComponentTraits *ApexCodec_Traits_get(
-        ApexCodec_ComponentStore *store, size_t index) __INTRODUCED_IN(36);
+ApexCodec_ComponentTraits *_Nullable ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *_Nonnull store, size_t index) __INTRODUCED_IN(36);
 
 /**
  * An opaque struct that represents a codec.
@@ -186,14 +187,16 @@
  * Create a component by the name.
  *
  * \param store the component store
- * \param name the name of the component
- * \param component out-param to be filled with the component; must not be null
+ * \param name the name of the component in ASCII encoding
+ * \param outComponent out-param to be filled with the component; must not be null
  * \return  APEXCODEC_STATUS_OK         if successful
- *          APEXCODEC_STATUS_NOT_FOUND  if the name is not found
+ * \return  APEXCODEC_STATUS_NOT_FOUND  if the name is not found
+ * \return  APEXCODEC_STATUS_CORRUPTED  if an unexpected error occurs
  */
 ApexCodec_Status ApexCodec_Component_create(
-        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp)
-        __INTRODUCED_IN(36);
+        ApexCodec_ComponentStore *_Nonnull store,
+        const char *_Nonnull name,
+        ApexCodec_Component *_Nullable *_Nonnull outComponent) __INTRODUCED_IN(36);
 
 /**
  * Destroy the component by the handle. It is invalid to call component methods on the handle
@@ -201,23 +204,29 @@
  *
  * \param comp the handle for the component
  */
-void ApexCodec_Component_destroy(ApexCodec_Component *comp) __INTRODUCED_IN(36);
+void ApexCodec_Component_destroy(ApexCodec_Component *_Nullable comp) __INTRODUCED_IN(36);
 
 /**
  * Start the component. The component is ready to process buffers after this call.
  *
  * \param comp the handle for the component
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if the component is already started or released
+ * \return  APEXCODEC_STATUS_CORRUPTED  if an unexpected error occurs
  */
 ApexCodec_Status ApexCodec_Component_start(
-        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+        ApexCodec_Component *_Nonnull comp) __INTRODUCED_IN(36);
 
 /**
  * Flush the component's internal states. This operation preserves the existing configurations.
  *
  * \param comp the handle for the component
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if the component is not started
+ * \return  APEXCODEC_STATUS_CORRUPTED  if an unexpected error occurs
  */
 ApexCodec_Status ApexCodec_Component_flush(
-        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+        ApexCodec_Component *_Nonnull comp) __INTRODUCED_IN(36);
 
 /**
  * Resets the component to the initial state, right after creation. Note that the configuration
@@ -225,9 +234,11 @@
  * set again to use the component.
  *
  * \param comp the handle for the component
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_CORRUPTED  if an unexpected error occurs
  */
 ApexCodec_Status ApexCodec_Component_reset(
-        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+        ApexCodec_Component *_Nonnull comp) __INTRODUCED_IN(36);
 
 /**
  * An opaque struct that represents a configurable part of the component.
@@ -243,8 +254,8 @@
  * \param comp the handle for the component
  * \return the configurable object handle
  */
-ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
-        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+ApexCodec_Configurable *_Nonnull ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *_Nonnull comp) __INTRODUCED_IN(36);
 
 /**
  * Enum that represents the flags for ApexCodec_Buffer.
@@ -267,7 +278,7 @@
  * Introduced in API 36.
  */
 typedef enum ApexCodec_BufferType : uint32_t {
-    APEXCODEC_BUFFER_TYPE_INVALID,
+    APEXCODEC_BUFFER_TYPE_EMPTY,
     APEXCODEC_BUFFER_TYPE_LINEAR,
     APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS,
     APEXCODEC_BUFFER_TYPE_GRAPHIC,
@@ -285,7 +296,7 @@
     /**
      * A pointer to the start of the buffer. This is not aligned.
      */
-    uint8_t *data;
+    uint8_t *_Nullable data;
     /**
      * Size of the buffer. The memory region between |data| (inclusive) and
      * |data + size| (exclusive) is assumed to be valid for read/write.
@@ -294,112 +305,211 @@
 } ApexCodec_LinearBuffer;
 
 /**
- * Struct that represents a buffer for ApexCodec_Component.
+ * Opaque struct that represents a buffer for ApexCodec_Component.
+ *
+ * The buffer object is used to pass data between the client and the component.
+ * The buffer object is created by ApexCodec_Buffer_create and destroyed by
+ * ApexCodec_Buffer_destroy. The main usage is to pass the buffer to
+ * ApexCodec_Component_process.
+ *
+ * The buffer object is empty by default. The client can set the buffer to be
+ * either linear or graphic by calling ApexCodec_Buffer_setLinearBuffer or
+ * ApexCodec_Buffer_setGraphicBuffer.
+ *
+ * The buffer object can be reused after it is cleared by
+ * ApexCodec_Buffer_clear. The client should set the buffer again before using
+ * it.
  *
  * Introduced in API 36.
  */
-typedef struct ApexCodec_Buffer {
-    /**
-     * Flags associated with the buffer.
-     */
-    ApexCodec_BufferFlags flags;
-    /**
-     * For input buffers client assign a unique sequential index for each buffer. For output buffers
-     * it is the same as the associated input buffer's frame index.
-     */
-    uint64_t frameIndex;
-    /**
-     * A timestamp associated with the buffer in microseconds.
-     */
-    uint64_t timestampUs;
-    /**
-     * The type of the buffer. The component may reject request to process a buffer with the wrong
-     * type. For example, a video decoder will reject an input buffer with type BUFFER_TYPE_GRAPHIC,
-     * or an output buffer with type BUFFER_TYPE_LINEAR.
-     */
-    ApexCodec_BufferType type;
-    /**
-     * The actual memory for the buffer.
-     */
-    union {
-        ApexCodec_LinearBuffer linear;
-        AHardwareBuffer *graphic;
-    } memory;
-    /**
-     * Config updates associated with the buffer. For input buffers these are sent to the component
-     * at the specific input frame. For output buffers these are config updates as a result of
-     * processing the buffer.
-     */
-    ApexCodec_LinearBuffer configUpdates;
-} ApexCodec_Buffer;
+typedef struct ApexCodec_Buffer ApexCodec_Buffer;
 
 /**
- * Enum that represents the query type for the supported values.
+ * Create an empty buffer object, with no underlying memory or buffer info set.
+ * ApexCodec_Buffer_getType will return APEXCODEC_BUFFER_TYPE_EMPTY, and other getters
+ * will throw APEXCODEC_STATUS_BAD_STATE.
  *
- * Introduced in API 36.
+ * \return the buffer object handle
  */
-typedef enum ApexCodec_SupportedValuesQueryType : uint32_t {
-    /** Query all possible supported values regardless of current configuration */
-    APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE,
-    /** Query supported values at current configuration */
-    APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT,
-} ApexCodec_SupportedValuesQueryType;
+ApexCodec_Buffer *_Nonnull ApexCodec_Buffer_create() __INTRODUCED_IN(36);
 
 /**
- * Enum that represents the type of the supported values.
+ * Destroy the buffer object. No-op if |buffer| is nullptr. Note that ApexCodec_Buffer does not own
+ * objects that are set from the client including linear buffer, graphic buffer, and config updates.
+ * The client therefore is responsible for freeing them if needed.
  *
- * Introduced in API 36.
+ * The exception is the config updates that are owned by the buffer object, which will be
+ * freed when the buffer object is destroyed.
+ *
+ * \param buffer the buffer object
  */
-typedef enum ApexCodec_SupportedValuesType : uint32_t {
-    /** The supported values are empty. */
-    APEXCODEC_SUPPORTED_VALUES_EMPTY,
-    /**
-     * The supported values are represented by a range defined with {min, max, step, num, den}.
-     *
-     * If step is 0 and num and denom are both 1, the supported values are any value, for which
-     * min <= value <= max.
-     *
-     * Otherwise, the range represents a geometric/arithmetic/multiply-accumulate series, where
-     * successive supported values can be derived from previous values (starting at min), using the
-     * following formula:
-     *  v[0] = min
-     *  v[i] = v[i-1] * num / denom + step for i >= 1, while min < v[i] <= max.
-     */
-    APEXCODEC_SUPPORTED_VALUES_RANGE,
-    /** The supported values are represented by a list of values. */
-    APEXCODEC_SUPPORTED_VALUES_VALUES,
-    /** The supported values are represented by a list of flags. */
-    APEXCODEC_SUPPORTED_VALUES_FLAGS,
-} ApexCodec_SupportedValuesType;
+void ApexCodec_Buffer_destroy(ApexCodec_Buffer *_Nullable buffer) __INTRODUCED_IN(36);
 
 /**
- * Enum that represents numeric types of the supported values.
+ * Clear the buffer object to be the empty state; i.e. the same as the buffer object created by
+ * ApexCodec_Buffer_create.
  *
- * Introduced in API 36.
+ * Similarly to ApexCodec_Buffer_destroy, The client is responsible for freeing objects set to the
+ * buffer if needed.
+ *
+ * \param buffer the buffer object
  */
-typedef enum ApexCodec_SupportedValuesNumberType : uint32_t {
-    APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   = 0,
-    APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  = 1,
-    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 = 2,
-    // RESERVED                            = 3,
-    APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  = 4,
-    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 = 5,
-    // RESERVED                            = 6,
-    APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  = 7,
-} ApexCodec_SupportedValuesNumberType;
+void ApexCodec_Buffer_clear(ApexCodec_Buffer *_Nonnull buffer) __INTRODUCED_IN(36);
 
 /**
- * Union of primitive types.
+ * Set the buffer info to the buffer object.
  *
- * Introduced in API 36.
+ * For input buffers the buffer info is required; otherwise
+ * ApexCodec_Component_process will return APEXCODEC_STATUS_BAD_VALUE.
+ * For output buffers the buffer info is optional.
+ *
+ * When called multiple times, the last set values will be used.
+ *
+ * \param buffer            the buffer object
+ * \param flags             the flags associated with the buffer
+ * \param frameIndex        the frame index for the buffer
+ * \param timestampUs       the timestamp for the buffer in microseconds
  */
-typedef union {
-    int32_t i32;
-    uint32_t u32;
-    int64_t i64;
-    uint64_t u64;
-    float f;
-} ApexCodec_Value;
+void ApexCodec_Buffer_setBufferInfo(
+        ApexCodec_Buffer *_Nonnull buffer,
+        ApexCodec_BufferFlags flags,
+        uint64_t frameIndex,
+        uint64_t timestampUs) __INTRODUCED_IN(36);
+
+
+/**
+ * Set the linear buffer for the empty buffer object. It is an error to call this function if the
+ * buffer is not empty. For example, calling this function twice or after calling
+ * ApexCodec_Buffer_setGraphicBuffer will result in APEXCODEC_STATUS_BAD_STATE, unless the buffer
+ * is cleared first.
+ *
+ * If successful ApexCodec_Buffer_getType will return APEXCODEC_BUFFER_TYPE_LINEAR.
+ *
+ * \param buffer the buffer object
+ * \param linearBuffer  the linear buffer to be set; may be null to indicate an empty linear buffer.
+ *                      an empty linear buffer is used to communicate flags and/or config updates
+ *                      only to the component.
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if |buffer| is not empty
+ */
+ApexCodec_Status ApexCodec_Buffer_setLinearBuffer(
+        ApexCodec_Buffer *_Nonnull buffer,
+        const ApexCodec_LinearBuffer *_Nullable linearBuffer) __INTRODUCED_IN(36);
+
+/**
+ * Set the graphic buffer for the empty buffer object. It is an error to call this function if the
+ * buffer is not empty. For example, calling this function twice or after calling
+ * ApexCodec_Buffer_setLinearBuffer will result in APEXCODEC_STATUS_BAD_STATE, unless the buffer
+ * is cleared first.
+ *
+ * If successful ApexCodec_Buffer_getType will return APEXCODEC_BUFFER_TYPE_GRAPHIC.
+ *
+ * \param buffer        the buffer object
+ * \param graphicBuffer the graphic buffer to be set; may be null to indicate
+ *                      an empty graphic buffer.
+ *                      an empty graphic buffer is used to communicate flags and/or config updates
+ *                      only to the component.
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if |buffer| is not empty
+ */
+ApexCodec_Status ApexCodec_Buffer_setGraphicBuffer(
+        ApexCodec_Buffer *_Nonnull buffer,
+        AHardwareBuffer *_Nullable graphicBuffer) __INTRODUCED_IN(36);
+
+/**
+ * Set the config updates for the buffer object.
+ *
+ * For input buffers these are sent to the component at the specific input frame.
+ * For output buffers client should not set this; otherwise ApexCodec_Component_process will return
+ * APEXCODEC_STATUS_BAD_VALUE.
+ *
+ * This function cannot be called multiple times on the same buffer object until the buffer object
+ * is cleared. This is to prevent the client from accidentally overwriting the config updates
+ * before the client could free the existing config updates if needed.
+ *
+ * \param buffer            the buffer object
+ * \param configUpdates     the config updates to be set
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if config updates are already set
+ */
+ApexCodec_Status ApexCodec_Buffer_setConfigUpdates(
+        ApexCodec_Buffer *_Nonnull buffer,
+        const ApexCodec_LinearBuffer *_Nonnull configUpdates) __INTRODUCED_IN(36);
+
+/**
+ * Get the type of the buffer object.
+ *
+ * \param buffer the buffer object
+ * \return the type of the buffer object
+ */
+ApexCodec_BufferType ApexCodec_Buffer_getType(
+        ApexCodec_Buffer *_Nonnull buffer) __INTRODUCED_IN(36);
+
+/**
+ * Extract the buffer info from the buffer object.
+ *
+ * \param buffer            the buffer object
+ * \param outFlags          the flags associated with the buffer
+ * \param outFrameIndex     the frame index for the buffer
+ *                          for output buffers it is the same as the associated
+ *                          input buffer's frame index.
+ * \param outTimestampUs    the timestamp for the buffer in microseconds
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if buffer info was never set
+ */
+ApexCodec_Status ApexCodec_Buffer_getBufferInfo(
+        ApexCodec_Buffer *_Nonnull buffer,
+        ApexCodec_BufferFlags *_Nonnull outFlags,
+        uint64_t *_Nonnull outFrameIndex,
+        uint64_t *_Nonnull outTimestampUs) __INTRODUCED_IN(36);
+
+/**
+ * Extract the linear buffer from the buffer object.
+ *
+ * \param buffer            the buffer object
+ * \param outLinearBuffer   the linear buffer to be set
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if |buffer| does not contain a linear buffer
+ */
+ApexCodec_Status ApexCodec_Buffer_getLinearBuffer(
+        ApexCodec_Buffer *_Nonnull buffer,
+        ApexCodec_LinearBuffer *_Nonnull outLinearBuffer) __INTRODUCED_IN(36);
+
+/**
+ * Extract the graphic buffer from the buffer object.
+ *
+ * \param buffer            the buffer object
+ * \param outGraphicBuffer  the graphic buffer to be set
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_STATE  if |buffer| does not contain a graphic buffer
+ */
+ApexCodec_Status ApexCodec_Buffer_getGraphicBuffer(
+        ApexCodec_Buffer *_Nonnull buffer,
+        AHardwareBuffer *_Nullable *_Nonnull outGraphicBuffer) __INTRODUCED_IN(36);
+
+/**
+ * Extract the config updates from the buffer object.
+ * For output buffers these are config updates as a result of processing the buffer.
+ *
+ * \param buffer            the buffer object
+ * \param outConfigUpdates  the config updates to be set.
+ *                          if the config update was set by the client via
+ *                          ApexCodec_Buffer_setConfigUpdates, the config updates are the same as
+ *                          what was set before. |outOwnedByClient| will be set to true.
+ *                          if the config update was set by the component, |outOwnedByClient| will
+ *                          be set to false.
+ * \param outOwnedByClient  if true, the client owns the config updates and is responsible
+ *                          for freeing it.
+ *                          if false, the config updates are owned by the buffer object
+ *                          and the client should not free it; it will be freed when the buffer
+ *                          object is cleared or destroyed.
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_NOT_FOUND  if |buffer| does not contain config updates
+ */
+ApexCodec_Status ApexCodec_Buffer_getConfigUpdates(
+        ApexCodec_Buffer *_Nonnull buffer,
+        ApexCodec_LinearBuffer *_Nonnull outConfigUpdates,
+        bool *_Nonnull outOwnedByClient) __INTRODUCED_IN(36);
 
 /**
  * An opaque struct that represents the supported values of a parameter.
@@ -411,35 +521,38 @@
 /**
  * Extract information from ApexCodec_SupportedValues object.
  *
- * \param [in] supportedValues the supported values object
- * \param [out] type        pointer to be filled with the type of the supported values
- * \param [out] numberType  pointer to be filled with the numeric type of the supported values
- * \param [out] values      pointer to be filled with the array of the actual supported values.
+ * \param supportedValues   the supported values object
+ * \param outType           pointer to be filled with the type of the supported values
+ * \param outNumberType     pointer to be filled with the numeric type of the supported values
+ * \param outValues         pointer to be filled with the array of the actual supported values.
  *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: nullptr
  *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: {min, max, step, num, den}
  *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS:
  *                              the array of supported values/flags
  *                          the array is owned by the |supportedValues| object and the client
  *                          should not free it.
- * \param [out] numValues   pointer to be filled with the number of values.
+ * \param outNumValues      pointer to be filled with the number of values.
  *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: 0
  *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: 5
  *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS: varies
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_BAD_VALUE  if the parameters are bad
+ * \return  APEXCODEC_STATUS_CORRUPTED  if an unexpected error occurs
  */
 ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
-        ApexCodec_SupportedValues *supportedValues,
-        ApexCodec_SupportedValuesType *type,
-        ApexCodec_SupportedValuesNumberType *numberType,
-        ApexCodec_Value **values,
-        uint32_t *numValues) __INTRODUCED_IN(36);
+        ApexCodec_SupportedValues *_Nonnull supportedValues,
+        ApexCodec_SupportedValuesType *_Nonnull outType,
+        ApexCodec_SupportedValuesNumberType *_Nonnull outNumberType,
+        ApexCodec_Value *_Nullable *_Nonnull outValues,
+        uint32_t *_Nonnull outNumValues) __INTRODUCED_IN(36);
 
 /**
- * Release the supported values object.
+ * Destroy the supported values object. No-op if |values| is nullptr.
  *
  * \param values the supported values object
  */
-void ApexCodec_SupportedValues_release(
-        ApexCodec_SupportedValues *values) __INTRODUCED_IN(36);
+void ApexCodec_SupportedValues_destroy(
+        ApexCodec_SupportedValues *_Nullable values) __INTRODUCED_IN(36);
 
 /**
  * Struct that represents the result of ApexCodec_Configurable_config.
@@ -449,116 +562,61 @@
 typedef struct ApexCodec_SettingResults ApexCodec_SettingResults;
 
 /**
- * Enum that represents the failure code of ApexCodec_SettingResults.
- *
- * Introduced in API 36.
- */
-typedef enum ApexCodec_SettingResultFailure : uint32_t {
-    /** parameter type is not supported */
-    APEXCODEC_SETTING_RESULT_BAD_TYPE,
-    /** parameter is not supported on the specific port */
-    APEXCODEC_SETTING_RESULT_BAD_PORT,
-    /** parameter is not supported on the specific stream */
-    APEXCODEC_SETTING_RESULT_BAD_INDEX,
-    /** parameter is read-only */
-    APEXCODEC_SETTING_RESULT_READ_ONLY,
-    /** parameter mismatches input data */
-    APEXCODEC_SETTING_RESULT_MISMATCH,
-    /** strict parameter does not accept value for the field at all */
-    APEXCODEC_SETTING_RESULT_BAD_VALUE,
-    /** strict parameter field value conflicts with another settings */
-    APEXCODEC_SETTING_RESULT_CONFLICT,
-    /** strict parameter field is out of range due to other settings */
-    APEXCODEC_SETTING_RESULT_UNSUPPORTED,
-    /**
-     * field does not accept the requested parameter value at all. It has been corrected to
-     * the closest supported value. This failure mode is provided to give guidance as to what
-     * are the currently supported values for this field (which may be a subset of the at-all-
-     * potential values)
-     */
-    APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE,
-    /**
-     * requested parameter value is in conflict with an/other setting(s)
-     * and has been corrected to the closest supported value. This failure
-     * mode is given to provide guidance as to what are the currently supported values as well
-     * as to optionally provide suggestion to the client as to how to enable the requested
-     * parameter value.
-     */
-    APEXCODEC_SETTING_RESULT_INFO_CONFLICT,
-} ApexCodec_SettingResultFailure;
-
-/**
- * Struct that represents a field and its supported values of a parameter.
- *
- * The offset and size of the field are where the field is located in the blob representation of
- * the parameter, as used in the ApexCodec_Configurable_query() and ApexCodec_Configurable_config(),
- * for example.
- *
- * Introduced in API 36.
- */
-typedef struct ApexCodec_ParamFieldValues {
-    /** index of the param */
-    uint32_t index;
-    /** offset of the param field */
-    uint32_t offset;
-    /** size of the param field */
-    uint32_t size;
-    /** currently supported values of the param field */
-    ApexCodec_SupportedValues *values;
-} ApexCodec_ParamFieldValues;
-
-/**
  * Extract the result of ApexCodec_Configurable_config.
  * The client can iterate through the results with index starting from 0 until this function returns
  * APEXCODEC_STATUS_NOT_FOUND.
  *
- * \param [in]  result  the result object
- * \param [in]  index   the index of the result to extract, starts from 0.
- * \param [out] failure pointer to be filled with the failure code
- * \param [out] field   pointer to be filled with the field that failed.
+ * \param result        the result object
+ * \param index         the index of the result to extract, starts from 0.
+ * \param outFailure    pointer to be filled with the failure code
+ * \param outField      pointer to be filled with the field that failed.
  *                      |field->value| is owned by the |result| object and the client should not
  *                      free it.
- * \param [out] conflicts   pointer to be filled with the array of conflicts.
+ * \param outConflicts      pointer to be filled with the array of conflicts.
  *                          nullptr if |numConflicts| is 0.
  *                          the array and its content is owned by the |result| object and the client
  *                          should not free it.
- * \param [out] numConflicts pointer to be filled with the number of conflicts
+ * \param outNumConflicts   pointer to be filled with the number of conflicts
  *                          may be 0 if there are no conflicts
- * \return APEXCODEC_STATUS_OK         if successful
- * \return APEXCODEC_STATUS_NOT_FOUND  if index is out of range
+ * \return  APEXCODEC_STATUS_OK         if successful
+ * \return  APEXCODEC_STATUS_NOT_FOUND  if index is out of range
+ * \return  APEXCODEC_STATUS_BAD_VALUE  if the parameters are bad
  */
 ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
-        ApexCodec_SettingResults *results,
+        ApexCodec_SettingResults *_Nonnull results,
         size_t index,
-        ApexCodec_SettingResultFailure *failure,
-        ApexCodec_ParamFieldValues *field,
-        ApexCodec_ParamFieldValues **conflicts,
-        size_t *numConflicts) __INTRODUCED_IN(36);
+        ApexCodec_SettingResultFailure *_Nonnull outFailure,
+        ApexCodec_ParamFieldValues *_Nonnull outField,
+        ApexCodec_ParamFieldValues *_Nullable *_Nonnull outConflicts,
+        size_t *_Nonnull outNumConflicts) __INTRODUCED_IN(36);
 
 /**
- * Release the setting result object.
+ * Destroy the setting result object. No-op if |results| is nullptr.
  *
  * \param result the setting result object
  */
-void ApexCodec_SettingResults_release(
-        ApexCodec_SettingResults *results) __INTRODUCED_IN(36);
+void ApexCodec_SettingResults_destroy(
+        ApexCodec_SettingResults *_Nullable results) __INTRODUCED_IN(36);
 
 /**
  * Process one frame from |input|, and produce one frame to |output| if possible.
- * When successfully filled, |output->memory.linear| has the size adjusted to the produced
+ *
+ * When successfully filled, |outProduced| has the size adjusted to the produced
  * output size, in case of linear buffers. |input->configUpdates| is applied with the input
  * buffer; |output->configUpdates| contains config updates as a result of processing the frame.
  *
- * \param comp      the component to process the buffers
- * \param input     the input buffer; when nullptr, the component should fill |output| if there are
- *                  any pending output buffers.
- * \param output    the output buffer, should not be nullptr.
- * \param consumed  the number of consumed bytes from the input buffer
- *                  set to 0 if no input buffer has been consumed, including |input| is nullptr.
- *                  for graphic buffers, any non-zero value means that the input buffer is consumed.
- * \param produced  the number of bytes produced on the output buffer
- *                  set to 0 if no output buffer has been produced.
- *                  for graphic buffers, any non-zero value means that the output buffer is filled.
+ * \param comp          the component to process the buffers
+ * \param input         the input buffer; when nullptr, the component should fill |output|
+ *                      if there are any pending output buffers.
+ * \param output        the output buffer, should not be nullptr.
+ * \param outConsumed   the number of consumed bytes from the input buffer
+ *                      set to 0 if no input buffer has been consumed, including |input| is nullptr.
+ *                      for graphic buffers, any non-zero value means that
+ *                      the input buffer is consumed.
+ * \param outProduced   the number of bytes produced on the output buffer
+ *                      set to 0 if no output buffer has been produced.
+ *                      for graphic buffers, any non-zero value means that
+ *                      the output buffer is filled.
  * \return APEXCODEC_STATUS_OK         if successful
  * \return APEXCODEC_STATUS_NO_MEMORY  if the output buffer is not suitable to hold the output frame
  *                                     the client should retry with a new output buffer;
@@ -570,11 +628,11 @@
  * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
  */
 ApexCodec_Status ApexCodec_Component_process(
-        ApexCodec_Component *comp,
-        const ApexCodec_Buffer *input,
-        ApexCodec_Buffer *output,
-        size_t *consumed,
-        size_t *produced) __INTRODUCED_IN(36);
+        ApexCodec_Component *_Nonnull comp,
+        const ApexCodec_Buffer *_Nullable input,
+        ApexCodec_Buffer *_Nonnull output,
+        size_t *_Nonnull outConsumed,
+        size_t *_Nonnull outProduced) __INTRODUCED_IN(36);
 
 /**
  * Configure the component with the given config.
@@ -590,48 +648,50 @@
  * information about the failures. See ApexCodec_SettingResultFailure and ApexCodec_SettingResults
  * for more details.
  *
- * \param [in]    comp   the handle for the component
- * \param [inout] config the config blob; after the call, the config blob is updated to the actual
- *                       config by the component.
- * \param [out]   result the result of the configuration.
- *                       the client should call ApexCodec_SettingResult_getResultAtIndex()
- *                       to extract the result. The result object is owned by the client and should
- *                       be released with ApexCodec_SettingResult_release().
- *                       |result| may be nullptr if empty.
+ * \param comp          the handle for the component
+ * \param inoutConfig   the config blob; after the call, the config blob is updated to the actual
+ *                      config by the component.
+ * \param outResult     the result of the configuration.
+ *                      the client should call ApexCodec_SettingResult_getResultAtIndex()
+ *                      to extract the result. The result object is owned by the client and should
+ *                      be released with ApexCodec_SettingResult_destroy().
+ *                      |result| may be nullptr if empty.
  * \return APEXCODEC_STATUS_OK         if successful
  * \return APEXCODEC_STATUS_BAD_VALUE  if the config is invalid
  * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state to be configured
  * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
  */
 ApexCodec_Status ApexCodec_Configurable_config(
-        ApexCodec_Configurable *comp,
-        ApexCodec_LinearBuffer *config,
-        ApexCodec_SettingResults **results) __INTRODUCED_IN(36);
+        ApexCodec_Configurable *_Nonnull comp,
+        ApexCodec_LinearBuffer *_Nonnull inoutConfig,
+        ApexCodec_SettingResults *_Nullable *_Nonnull outResults) __INTRODUCED_IN(36);
 
 /**
  * Query the component for the given indices.
  *
  * Parameter indices are defined in frameworks/av/media/codec2/core/include/C2Config.h.
  *
- * \param [in] comp         the handle for the component
- * \param [in] indices      the array of indices to query
- * \param [in] numIndices   the size of the indices array
- * \param [inout] config    the output buffer for the config blob, allocated by the client.
- *                          if the |config->size| was insufficient, it is set to the required size
- *                          and |config->data| remains unchanged.
- * \param [out] written     the number of bytes written to |config|.
+ * \param comp          the handle for the component
+ * \param indices       the array of indices to query
+ * \param numIndices    the size of the indices array
+ * \param inoutConfig   the output buffer for the config blob, allocated by the client.
+ *                      it can be null to query the required size.
+ * \param outWrittenOrRequired      the number of bytes written to |config|.
+ *                                  if the |config->size| was insufficient, it is set to the
+ *                                  required size.
+ *
  * \return APEXCODEC_STATUS_OK          if successful
- * \return APEXCODEC_STATUS_NO_MEMORY   if |config.size| is too small; |config.size| is updated to the
- *                                      requested buffer size.
- * \return APEXCODEC_STATUS_BAD_VALUE   if the parameters are bad. e.g. |indices|, |config|,
- *                                      |config->data| or |written| is nullptr.
+ * \return APEXCODEC_STATUS_NO_MEMORY   if |config.size| is too small; |config.size| is updated
+ *                                      to the requested buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE   if the parameters are bad. e.g. |indices| or
+ *                                      |written| is nullptr.
  */
 ApexCodec_Status ApexCodec_Configurable_query(
-        ApexCodec_Configurable *comp,
-        uint32_t indices[],
+        ApexCodec_Configurable *_Nonnull comp,
+        uint32_t indices[_Nonnull],
         size_t numIndices,
-        ApexCodec_LinearBuffer *config,
-        size_t *written) __INTRODUCED_IN(36);
+        ApexCodec_LinearBuffer *_Nullable inoutConfig,
+        size_t *_Nonnull outWrittenOrRequired) __INTRODUCED_IN(36);
 
 /**
  * Struct that represents a parameter descriptor.
@@ -641,89 +701,68 @@
 typedef struct ApexCodec_ParamDescriptors ApexCodec_ParamDescriptors;
 
 /**
- * Enum that represents the attributes of a parameter.
- *
- * Introduced in API 36.
- */
-typedef enum ApexCodec_ParamAttribute : uint32_t {
-    /** parameter is required to be specified */
-    APEXCODEC_PARAM_IS_REQUIRED   = 1u << 0,
-    /** parameter retains its value */
-    APEXCODEC_PARAM_IS_PERSISTENT = 1u << 1,
-    /** parameter is strict */
-    APEXCODEC_PARAM_IS_STRICT     = 1u << 2,
-    /** parameter is read-only */
-    APEXCODEC_PARAM_IS_READ_ONLY  = 1u << 3,
-    /** parameter shall not be visible to clients */
-    APEXCODEC_PARAM_IS_HIDDEN     = 1u << 4,
-    /** parameter shall not be used by framework (other than testing) */
-    APEXCODEC_PARAM_IS_INTERNAL   = 1u << 5,
-    /** parameter is publicly const (hence read-only) */
-    APEXCODEC_PARAM_IS_CONST      = 1u << 6 | APEXCODEC_PARAM_IS_READ_ONLY,
-} ApexCodec_ParamAttribute;
-
-/**
  * Get the parameter indices of the param descriptors.
  *
- * \param [in] descriptors the param descriptors object
- * \param [out] indices the pointer to be filled with the array of the indices;
+ * \param descriptors   the param descriptors object
+ * \param outIndices    the pointer to be filled with the array of the indices;
  *                      the array is owned by |descriptors| and should not be freed by the client.
- * \param [out] numIndices the size of the indices array
+ * \param outNumIndices the size of the indices array
  * \return APEXCODEC_STATUS_OK          if successful
  * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |indices| or
  *                                  |numIndices| is nullptr.
  */
 ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
-        ApexCodec_ParamDescriptors *descriptors,
-        uint32_t **indices,
-        size_t *numIndices) __INTRODUCED_IN(36);
+        ApexCodec_ParamDescriptors *_Nonnull descriptors,
+        uint32_t *_Nullable *_Nonnull outIndices,
+        size_t *_Nonnull outNumIndices) __INTRODUCED_IN(36);
 
 /**
  * Get the descriptor of the param.
  *
- * \param [in] descriptors the param descriptors object
- * \param [in] index the index of the param
- * \param [out] attr the attribute of the param
- * \param [out] name    the pointer to be filled with the name of the param
+ * \param descriptors   the param descriptors object
+ * \param index         the index of the param
+ * \param outAttr       the attribute of the param
+ * \param outName       the pointer to be filled with the name of the param
  *                      the string is owned by |descriptors| and should not be freed by the client.
- * \param [out] dependencies the pointer to be filled with an array of the parameter indices
+ *                      the encoding is ASCII.
+ * \param outDependencies the pointer to be filled with an array of the parameter indices
  *                        that the parameter with |index| depends on.
  *                        may be null if empty.
  *                        the array is owned by |descriptors| and should not be freed by the client.
- * \param [out] numDependencies the number of dependencies
+ * \param outNumDependencies the number of dependencies
  * \return APEXCODEC_STATUS_OK          if successful
  * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |attr|, |name|,
  *                                  |dependencies| or |numDependencies| is nullptr.
  * \return APEXCODEC_STATUS_BAD_INDEX   if the index is not included in the param descriptors.
  */
 ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
-        ApexCodec_ParamDescriptors *descriptors,
+        ApexCodec_ParamDescriptors *_Nonnull descriptors,
         uint32_t index,
-        ApexCodec_ParamAttribute *attr,
-        const char **name,
-        uint32_t **dependencies,
-        size_t *numDependencies) __INTRODUCED_IN(36);
+        ApexCodec_ParamAttribute *_Nonnull outAttr,
+        const char *_Nullable *_Nonnull outName,
+        uint32_t *_Nullable *_Nonnull outDependencies,
+        size_t *_Nonnull outNumDependencies) __INTRODUCED_IN(36);
 
 /**
- * Release the param descriptors object.
+ * Destroy the param descriptors object. No-op if |descriptors| is nullptr.
  *
  * \param descriptors the param descriptors object
  */
-ApexCodec_Status ApexCodec_ParamDescriptors_release(
-        ApexCodec_ParamDescriptors *descriptors) __INTRODUCED_IN(36);
+void ApexCodec_ParamDescriptors_destroy(
+        ApexCodec_ParamDescriptors *_Nullable descriptors) __INTRODUCED_IN(36);
 
 /**
  * Query the component for the supported parameters.
  *
- * \param comp the handle for the component
- * \param descriptors   the pointer to be filled with the param descriptors object
- *                      the object should be released with ApexCodec_ParamDescriptors_release().
+ * \param comp              the handle for the component
+ * \param outDescriptors    the pointer to be filled with the param descriptors object
+ *                          the object should be released with ApexCodec_ParamDescriptors_destroy().
  * \return APEXCODEC_STATUS_OK          if successful
  * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors| is nullptr.
  */
 ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
-        ApexCodec_Configurable *comp,
-        ApexCodec_ParamDescriptors **descriptors) __INTRODUCED_IN(36);
+        ApexCodec_Configurable *_Nonnull comp,
+        ApexCodec_ParamDescriptors *_Nullable *_Nonnull outDescriptors) __INTRODUCED_IN(36);
 
 /**
  * Struct that represents the query for the supported values of a parameter.
@@ -747,22 +786,22 @@
     /** status of the query */
     ApexCodec_Status status;
 
-    /** supported values. must be released with ApexCodec_SupportedValues_release(). */
-    ApexCodec_SupportedValues *values;
+    /** supported values. must be released with ApexCodec_SupportedValues_destroy(). */
+    ApexCodec_SupportedValues *_Nullable values;
 } ApexCodec_SupportedValuesQuery;
 
 /**
  * Query the component for the supported values of the given indices.
  *
  * \param comp the handle for the component
- * \param queries the array of queries
+ * \param inoutQueries the array of queries
  * \param numQueries the size of the queries array
  * \return  APEXCODEC_STATUS_OK         if successful
- *          APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ * \return  APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
  */
 ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
-        ApexCodec_Configurable *comp,
-        ApexCodec_SupportedValuesQuery *queries,
+        ApexCodec_Configurable *_Nonnull comp,
+        ApexCodec_SupportedValuesQuery *_Nonnull inoutQueries,
         size_t numQueries) __INTRODUCED_IN(36);
 
 __END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/include/apex/ApexCodecsParam.h b/media/module/libapexcodecs/include/apex/ApexCodecsParam.h
new file mode 100644
index 0000000..e3bca82
--- /dev/null
+++ b/media/module/libapexcodecs/include/apex/ApexCodecsParam.h
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/cdefs.h>
+#include <stdint.h>
+
+#include <android/api-level.h>
+#include <android/versioning.h>
+
+__BEGIN_DECLS
+
+/**
+ * Enums and types that represent parameters in ApexCodecs.
+ *
+ * NOTE: Many of the constants and types mirror the ones in the Codec 2.0 API.
+ */
+
+/**
+ * Enum that represents the query type for the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesQueryType : uint32_t {
+    /** Query all possible supported values regardless of current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE,
+    /** Query supported values at current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT,
+} ApexCodec_SupportedValuesQueryType;
+
+/**
+ * Enum that represents the type of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesType : uint32_t {
+    /** The supported values are empty. */
+    APEXCODEC_SUPPORTED_VALUES_EMPTY,
+    /**
+     * The supported values are represented by a range defined with {min, max, step, num, den}.
+     *
+     * If step is 0 and num and denom are both 1, the supported values are any value, for which
+     * min <= value <= max.
+     *
+     * Otherwise, the range represents a geometric/arithmetic/multiply-accumulate series, where
+     * successive supported values can be derived from previous values (starting at min), using the
+     * following formula:
+     *  v[0] = min
+     *  v[i] = v[i-1] * num / denom + step for i >= 1, while min < v[i] <= max.
+     */
+    APEXCODEC_SUPPORTED_VALUES_RANGE,
+    /** The supported values are represented by a list of values. */
+    APEXCODEC_SUPPORTED_VALUES_VALUES,
+    /** The supported values are represented by a list of flags. */
+    APEXCODEC_SUPPORTED_VALUES_FLAGS,
+} ApexCodec_SupportedValuesType;
+
+/**
+ * Enum that represents numeric types of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesNumberType : uint32_t {
+    APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   = 0,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  = 1,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 = 2,
+    // RESERVED                            = 3,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  = 4,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 = 5,
+    // RESERVED                            = 6,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  = 7,
+} ApexCodec_SupportedValuesNumberType;
+
+/**
+ * Union of primitive types.
+ *
+ * Introduced in API 36.
+ */
+typedef union {
+    int32_t i32;
+    uint32_t u32;
+    int64_t i64;
+    uint64_t u64;
+    float f;
+} ApexCodec_Value;
+
+/**
+ * Enum that represents the failure code of ApexCodec_SettingResults.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SettingResultFailure : uint32_t {
+    /** parameter type is not supported */
+    APEXCODEC_SETTING_RESULT_BAD_TYPE,
+    /** parameter is not supported on the specific port */
+    APEXCODEC_SETTING_RESULT_BAD_PORT,
+    /** parameter is not supported on the specific stream */
+    APEXCODEC_SETTING_RESULT_BAD_INDEX,
+    /** parameter is read-only */
+    APEXCODEC_SETTING_RESULT_READ_ONLY,
+    /** parameter mismatches input data */
+    APEXCODEC_SETTING_RESULT_MISMATCH,
+    /** strict parameter does not accept value for the field at all */
+    APEXCODEC_SETTING_RESULT_BAD_VALUE,
+    /** strict parameter field value conflicts with another settings */
+    APEXCODEC_SETTING_RESULT_CONFLICT,
+    /** strict parameter field is out of range due to other settings */
+    APEXCODEC_SETTING_RESULT_UNSUPPORTED,
+    /**
+     * field does not accept the requested parameter value at all. It has been corrected to
+     * the closest supported value. This failure mode is provided to give guidance as to what
+     * are the currently supported values for this field (which may be a subset of the at-all-
+     * potential values)
+     */
+    APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE,
+    /**
+     * requested parameter value is in conflict with an/other setting(s)
+     * and has been corrected to the closest supported value. This failure
+     * mode is given to provide guidance as to what are the currently supported values as well
+     * as to optionally provide suggestion to the client as to how to enable the requested
+     * parameter value.
+     */
+    APEXCODEC_SETTING_RESULT_INFO_CONFLICT,
+} ApexCodec_SettingResultFailure;
+
+/* forward-declaration for an opaque struct */
+struct ApexCodec_SupportedValues;
+
+/**
+ * Struct that represents a field and its supported values of a parameter.
+ *
+ * The offset and size of the field are where the field is located in the blob representation of
+ * the parameter, as used in the ApexCodec_Configurable_query() and ApexCodec_Configurable_config(),
+ * for example.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamFieldValues {
+    /** index of the param */
+    uint32_t index;
+    /** offset of the param field */
+    uint32_t offset;
+    /** size of the param field */
+    uint32_t size;
+    /** currently supported values of the param field */
+    struct ApexCodec_SupportedValues *_Nullable values;
+} ApexCodec_ParamFieldValues;
+
+/**
+ * Enum that represents the attributes of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_ParamAttribute : uint32_t {
+    /** parameter is required to be specified */
+    APEXCODEC_PARAM_IS_REQUIRED   = 1u << 0,
+    /** parameter retains its value */
+    APEXCODEC_PARAM_IS_PERSISTENT = 1u << 1,
+    /** parameter is strict */
+    APEXCODEC_PARAM_IS_STRICT     = 1u << 2,
+    /**
+     * parameter is read-only; the value may change if other parameters are changed,
+     * but the client cannot modify the value directly.
+     */
+    APEXCODEC_PARAM_IS_READ_ONLY  = 1u << 3,
+    /** parameter shall not be visible to clients */
+    APEXCODEC_PARAM_IS_HIDDEN     = 1u << 4,
+    /** parameter shall not be used by framework (other than testing) */
+    APEXCODEC_PARAM_IS_INTERNAL   = 1u << 5,
+    /**
+     * parameter is publicly const (hence read-only); the parameter never changes.
+     */
+    APEXCODEC_PARAM_IS_CONSTANT   = 1u << 6 | APEXCODEC_PARAM_IS_READ_ONLY,
+} ApexCodec_ParamAttribute;
+
+__END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/libapexcodecs.map.txt b/media/module/libapexcodecs/libapexcodecs.map.txt
deleted file mode 100644
index 672cf89..0000000
--- a/media/module/libapexcodecs/libapexcodecs.map.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-LIBAPEXCODECS_36 { # introduced=36
-  global:
-    ApexCodec_Component_create; # apex
-    ApexCodec_Component_destroy; # apex
-    ApexCodec_Component_flush; # apex
-    ApexCodec_Component_getConfigurable; # apex
-    ApexCodec_Component_process; # apex
-    ApexCodec_Component_start; # apex
-    ApexCodec_Component_reset; # apex
-    ApexCodec_Configurable_config; # apex
-    ApexCodec_Configurable_query; # apex
-    ApexCodec_Configurable_querySupportedParams; # apex
-    ApexCodec_Configurable_querySupportedValues; # apex
-    ApexCodec_GetComponentStore; # apex
-    ApexCodec_ParamDescriptors_getDescriptor; # apex
-    ApexCodec_ParamDescriptors_getIndices; # apex
-    ApexCodec_ParamDescriptors_release; # apex
-    ApexCodec_SettingResults_getResultAtIndex; # apex
-    ApexCodec_SettingResults_release; # apex
-    ApexCodec_SupportedValues_getTypeAndValues; # apex
-    ApexCodec_SupportedValues_release; # apex
-    ApexCodec_Traits_get; # apex
-
-  local:
-    *;
-};
\ No newline at end of file
diff --git a/media/module/libapexcodecs/libcom.android.media.swcodec.apexcodecs.map.txt b/media/module/libapexcodecs/libcom.android.media.swcodec.apexcodecs.map.txt
new file mode 100644
index 0000000..e121ae3
--- /dev/null
+++ b/media/module/libapexcodecs/libcom.android.media.swcodec.apexcodecs.map.txt
@@ -0,0 +1,38 @@
+LIBAPEXCODECS_36 { # introduced=36
+  global:
+    ApexCodec_Buffer_clear; # apex
+    ApexCodec_Buffer_create; # apex
+    ApexCodec_Buffer_destroy; # apex
+    ApexCodec_Buffer_getBufferInfo; # apex
+    ApexCodec_Buffer_getConfigUpdates; # apex
+    ApexCodec_Buffer_getGraphicBuffer; # apex
+    ApexCodec_Buffer_getLinearBuffer; # apex
+    ApexCodec_Buffer_getType; # apex
+    ApexCodec_Buffer_setBufferInfo; # apex
+    ApexCodec_Buffer_setConfigUpdates; # apex
+    ApexCodec_Buffer_setGraphicBuffer; # apex
+    ApexCodec_Buffer_setLinearBuffer; # apex
+    ApexCodec_Component_create; # apex
+    ApexCodec_Component_destroy; # apex
+    ApexCodec_Component_flush; # apex
+    ApexCodec_Component_getConfigurable; # apex
+    ApexCodec_Component_process; # apex
+    ApexCodec_Component_reset; # apex
+    ApexCodec_Component_start; # apex
+    ApexCodec_Configurable_config; # apex
+    ApexCodec_Configurable_query; # apex
+    ApexCodec_Configurable_querySupportedParams; # apex
+    ApexCodec_Configurable_querySupportedValues; # apex
+    ApexCodec_GetComponentStore; # apex
+    ApexCodec_ParamDescriptors_destroy; # apex
+    ApexCodec_ParamDescriptors_getDescriptor; # apex
+    ApexCodec_ParamDescriptors_getIndices; # apex
+    ApexCodec_SettingResults_destroy; # apex
+    ApexCodec_SettingResults_getResultAtIndex; # apex
+    ApexCodec_SupportedValues_destroy; # apex
+    ApexCodec_SupportedValues_getTypeAndValues; # apex
+    ApexCodec_Traits_get; # apex
+
+  local:
+    *;
+};
\ No newline at end of file
diff --git a/media/module/libapexcodecs/private/apex/ApexCodecsImpl.h b/media/module/libapexcodecs/private/apex/ApexCodecsImpl.h
new file mode 100644
index 0000000..f01af87
--- /dev/null
+++ b/media/module/libapexcodecs/private/apex/ApexCodecsImpl.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <vector>
+
+#include <C2Component.h>
+
+#include <apex/ApexCodecs.h>
+#include <apex/ApexCodecsParam.h>
+
+namespace android::apexcodecs {
+
+class ApexComponentIntf {
+public:
+    virtual ~ApexComponentIntf() = default;
+    virtual ApexCodec_Status start() = 0;
+    virtual ApexCodec_Status flush() = 0;
+    virtual ApexCodec_Status reset() = 0;
+    virtual ApexCodec_Configurable *getConfigurable() = 0;
+    virtual ApexCodec_Status process(
+            const ApexCodec_Buffer *input,
+            ApexCodec_Buffer *output,
+            size_t *consumed,
+            size_t *produced) = 0;
+};
+
+class ApexComponentStoreIntf {
+public:
+    virtual ~ApexComponentStoreIntf() = default;
+    virtual std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() const = 0;
+    virtual std::unique_ptr<ApexComponentIntf> createComponent(const char *name) = 0;
+};
+
+}  // namespace android
+
+__BEGIN_DECLS
+
+void *GetApexComponentStore();
+
+__END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/tests/Android.bp b/media/module/libapexcodecs/tests/Android.bp
index 162d12c..1d444ad 100644
--- a/media/module/libapexcodecs/tests/Android.bp
+++ b/media/module/libapexcodecs/tests/Android.bp
@@ -20,11 +20,18 @@
 }
 
 cc_test {
-    name: "libapexcodecs_tests",
+    name: "libcom.android.media.swcodec.apexcodecs-tests",
     shared_libs: [
-        "libapexcodecs-testing",
+        "libbinder_ndk",
         "libcodec2",
+        "libnativewindow",
+    ],
+
+    static_libs: [
+        "libcom.android.media.swcodec.apexcodecs-testing",
     ],
 
     srcs: ["ApexCodecsTest.cpp"],
+
+    test_suites: ["general-tests"],
 }
diff --git a/media/module/libapexcodecs/tests/ApexCodecsStoreTestImpl.cpp b/media/module/libapexcodecs/tests/ApexCodecsStoreTestImpl.cpp
new file mode 100644
index 0000000..fb0e98e
--- /dev/null
+++ b/media/module/libapexcodecs/tests/ApexCodecsStoreTestImpl.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecsImpl.h>
+
+namespace android::apexcodecs::test {
+
+// This is a test implementation of ApexComponentStoreIntf.
+// It may contain different set of components than the APEX for testing purpose.
+class ApexComponentStoreImpl : public ApexComponentStoreIntf {
+public:
+    ApexComponentStoreImpl() = default;
+
+    std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() const override {
+        return {};
+    }
+    virtual std::unique_ptr<ApexComponentIntf> createComponent(const char *name [[maybe_unused]]) {
+        return nullptr;
+    }
+};
+
+}  // namespace android::apexcodecs::test
+
+extern "C" void *GetApexComponentStore() {
+    using ::android::apexcodecs::test::ApexComponentStoreImpl;
+    static ::android::base::NoDestructor<ApexComponentStoreImpl> sStore;
+    return sStore.get();
+}
\ No newline at end of file
diff --git a/media/module/libapexcodecs/tests/ApexCodecsTest.cpp b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
index cd5ebba..3338aff 100644
--- a/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
+++ b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
@@ -1,7 +1,10 @@
 #include <C2.h>
 #include <C2Component.h>
 
+#include <android/hardware_buffer_aidl.h>
 #include <apex/ApexCodecs.h>
+#include <apex/ApexCodecsParam.h>
+#include <gtest/gtest.h>
 
 // static_asserts for enum values match
 static_assert((uint32_t)APEXCODEC_STATUS_OK        == (uint32_t)C2_OK);
@@ -37,7 +40,7 @@
 static_assert((uint32_t)APEXCODEC_FLAG_CORRUPT       == (uint32_t)C2FrameData::FLAG_CORRUPT);
 static_assert((uint32_t)APEXCODEC_FLAG_CODEC_CONFIG  == (uint32_t)C2FrameData::FLAG_CODEC_CONFIG);
 
-static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_INVALID        ==
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_EMPTY          ==
               (uint32_t)C2BufferData::INVALID);
 static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR         ==
               (uint32_t)C2BufferData::LINEAR);
@@ -97,4 +100,169 @@
 static_assert((uint32_t)APEXCODEC_PARAM_IS_READ_ONLY  == (uint32_t)C2ParamDescriptor::IS_READ_ONLY);
 static_assert((uint32_t)APEXCODEC_PARAM_IS_HIDDEN     == (uint32_t)C2ParamDescriptor::IS_HIDDEN);
 static_assert((uint32_t)APEXCODEC_PARAM_IS_INTERNAL   == (uint32_t)C2ParamDescriptor::IS_INTERNAL);
-static_assert((uint32_t)APEXCODEC_PARAM_IS_CONST      == (uint32_t)C2ParamDescriptor::IS_CONST);
\ No newline at end of file
+static_assert((uint32_t)APEXCODEC_PARAM_IS_CONSTANT   == (uint32_t)C2ParamDescriptor::IS_CONST);
+
+using ::aidl::android::hardware::HardwareBuffer;
+
+class SpApexCodecBuffer {
+public:
+    SpApexCodecBuffer() {
+        mBuffer = ApexCodec_Buffer_create();
+    }
+
+    ~SpApexCodecBuffer() {
+        ApexCodec_Buffer_destroy(mBuffer);
+    }
+
+    ApexCodec_Buffer* get() const {
+        return mBuffer;
+    }
+
+private:
+    ApexCodec_Buffer* mBuffer;
+};
+
+TEST(ApexCodecsTest, BufferCreateDestroyTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+}
+
+TEST(ApexCodecsTest, BufferInitialStateTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+    ASSERT_EQ(ApexCodec_Buffer_getType(buffer.get()), APEXCODEC_BUFFER_TYPE_EMPTY);
+
+    ApexCodec_BufferFlags flags;
+    uint64_t frameIndex;
+    uint64_t timestampUs;
+    ASSERT_EQ(ApexCodec_Buffer_getBufferInfo(buffer.get(), &flags, &frameIndex, &timestampUs),
+              APEXCODEC_STATUS_BAD_STATE);
+
+    ApexCodec_LinearBuffer linearBuffer;
+    ASSERT_EQ(ApexCodec_Buffer_getLinearBuffer(buffer.get(), &linearBuffer),
+              APEXCODEC_STATUS_BAD_STATE);
+
+    AHardwareBuffer* graphicBuffer;
+    ASSERT_EQ(ApexCodec_Buffer_getGraphicBuffer(buffer.get(), &graphicBuffer),
+              APEXCODEC_STATUS_BAD_STATE);
+
+    ApexCodec_LinearBuffer configUpdates;
+    bool ownedByClient;
+    ASSERT_EQ(ApexCodec_Buffer_getConfigUpdates(buffer.get(), &configUpdates, &ownedByClient),
+              APEXCODEC_STATUS_NOT_FOUND);
+}
+
+TEST(ApexCodecsTest, BufferSetGetInfoTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+
+    ApexCodec_Buffer_setBufferInfo(buffer.get(), APEXCODEC_FLAG_END_OF_STREAM, 123, 456);
+
+    ApexCodec_BufferFlags flags;
+    uint64_t frameIndex;
+    uint64_t timestampUs;
+    ASSERT_EQ(ApexCodec_Buffer_getBufferInfo(buffer.get(), &flags, &frameIndex, &timestampUs),
+              APEXCODEC_STATUS_OK);
+    ASSERT_EQ(flags, APEXCODEC_FLAG_END_OF_STREAM);
+    ASSERT_EQ(frameIndex, 123);
+    ASSERT_EQ(timestampUs, 456);
+}
+
+TEST(ApexCodecsTest, BufferSetGetLinearBufferTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+
+    uint8_t data[10];
+    ApexCodec_LinearBuffer linearBuffer;
+    linearBuffer.data = data;
+    linearBuffer.size = 10;
+    ASSERT_EQ(ApexCodec_Buffer_setLinearBuffer(buffer.get(), &linearBuffer), APEXCODEC_STATUS_OK);
+    ASSERT_EQ(ApexCodec_Buffer_getType(buffer.get()), APEXCODEC_BUFFER_TYPE_LINEAR);
+    // Clear the data to ensure that the buffer owns the data.
+    linearBuffer.data = nullptr;
+    linearBuffer.size = 0;
+    ASSERT_EQ(ApexCodec_Buffer_getLinearBuffer(buffer.get(), &linearBuffer), APEXCODEC_STATUS_OK);
+    ASSERT_EQ(linearBuffer.data, data);
+    ASSERT_EQ(linearBuffer.size, 10);
+
+    ASSERT_EQ(ApexCodec_Buffer_setLinearBuffer(buffer.get(), &linearBuffer),
+              APEXCODEC_STATUS_BAD_STATE);
+}
+
+TEST(ApexCodecsTest, BufferSetGetGraphicBufferTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+
+    HardwareBuffer hardwareBuffer;
+    AHardwareBuffer_Desc desc;
+    desc.width = 100;
+    desc.height = 100;
+    desc.layers = 1;
+    desc.format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
+    desc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
+    AHardwareBuffer* graphicBuffer = nullptr;
+    AHardwareBuffer_allocate(&desc, &graphicBuffer);
+    hardwareBuffer.reset(graphicBuffer);
+    ASSERT_NE(graphicBuffer, nullptr);
+    ASSERT_EQ(ApexCodec_Buffer_setGraphicBuffer(buffer.get(), graphicBuffer), APEXCODEC_STATUS_OK);
+    ASSERT_EQ(ApexCodec_Buffer_getType(buffer.get()), APEXCODEC_BUFFER_TYPE_GRAPHIC);
+    graphicBuffer = nullptr;
+    ASSERT_EQ(ApexCodec_Buffer_getGraphicBuffer(buffer.get(), &graphicBuffer), APEXCODEC_STATUS_OK);
+    ASSERT_NE(graphicBuffer, nullptr);
+
+    ASSERT_EQ(ApexCodec_Buffer_setGraphicBuffer(buffer.get(), graphicBuffer),
+              APEXCODEC_STATUS_BAD_STATE);
+}
+
+TEST(ApexCodecsTest, BufferSetGetConfigUpdatesTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+
+    uint8_t configData[20];
+    ApexCodec_LinearBuffer configUpdates;
+    configUpdates.data = configData;
+    configUpdates.size = 20;
+    ASSERT_EQ(ApexCodec_Buffer_setConfigUpdates(buffer.get(), &configUpdates), APEXCODEC_STATUS_OK);
+
+    bool ownedByClient;
+    ASSERT_EQ(ApexCodec_Buffer_getConfigUpdates(buffer.get(), &configUpdates, &ownedByClient),
+              APEXCODEC_STATUS_OK);
+    ASSERT_EQ(configUpdates.data, configData);
+    ASSERT_EQ(configUpdates.size, 20);
+    ASSERT_EQ(ownedByClient, false);
+
+    ASSERT_EQ(ApexCodec_Buffer_setConfigUpdates(buffer.get(), &configUpdates),
+              APEXCODEC_STATUS_BAD_STATE);
+}
+
+TEST(ApexCodecsTest, BufferClearTest) {
+    SpApexCodecBuffer buffer;
+    ASSERT_NE(buffer.get(), nullptr);
+
+    uint8_t data[10];
+    ApexCodec_LinearBuffer linearBuffer;
+    linearBuffer.data = data;
+    linearBuffer.size = 10;
+    ASSERT_EQ(ApexCodec_Buffer_setLinearBuffer(buffer.get(), &linearBuffer), APEXCODEC_STATUS_OK);
+
+    uint8_t configData[20];
+    ApexCodec_LinearBuffer configUpdates;
+    configUpdates.data = configData;
+    configUpdates.size = 20;
+    ASSERT_EQ(ApexCodec_Buffer_setConfigUpdates(buffer.get(), &configUpdates), APEXCODEC_STATUS_OK);
+
+    ApexCodec_Buffer_clear(buffer.get());
+    ASSERT_EQ(ApexCodec_Buffer_getType(buffer.get()), APEXCODEC_BUFFER_TYPE_EMPTY);
+
+    ApexCodec_BufferFlags flags;
+    uint64_t frameIndex;
+    uint64_t timestampUs;
+    ASSERT_EQ(ApexCodec_Buffer_getBufferInfo(buffer.get(), &flags, &frameIndex, &timestampUs),
+              APEXCODEC_STATUS_BAD_STATE);
+    ASSERT_EQ(ApexCodec_Buffer_getLinearBuffer(buffer.get(), &linearBuffer),
+              APEXCODEC_STATUS_BAD_STATE);
+    bool ownedByClient;
+
+    ASSERT_EQ(ApexCodec_Buffer_getConfigUpdates(buffer.get(), &configUpdates, &ownedByClient),
+              APEXCODEC_STATUS_NOT_FOUND);
+}
diff --git a/media/module/service.mediatranscoding/tests/Android.bp b/media/module/service.mediatranscoding/tests/Android.bp
index 9fb6d0d..97fbd4c 100644
--- a/media/module/service.mediatranscoding/tests/Android.bp
+++ b/media/module/service.mediatranscoding/tests/Android.bp
@@ -14,7 +14,6 @@
 cc_defaults {
     name: "mediatranscodingservice_test_defaults",
 
-    cpp_std: "gnu++17",
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 723bd77..24ad30e 100644
--- a/media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -114,6 +114,8 @@
         enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
         int64_t clientId;
         int32_t sessionId;
+
+        friend bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs);
     };
 
 #define DECLARE_EVENT(action)                                  \
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index bdb6cdb..7a2ee74 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,9 +1,8 @@
 set noparent
 
-anothermark@google.com
+vmartensson@google.com
+nkapron@google.com
 febinthattil@google.com
-aprasath@google.com
-jsharkey@android.com
+shubhankarm@google.com
 jameswei@google.com
-rmojumder@google.com
-kumarashishg@google.com
\ No newline at end of file
+rmojumder@google.com
\ No newline at end of file
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 889ddad..7a76753 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -293,30 +293,20 @@
 AImageReader::init() {
     mHalUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
 
-#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    sp<IGraphicBufferProducer> gbProducer;
-    sp<IGraphicBufferConsumer> gbConsumer;
-    BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
-#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-
     String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "m%d-%d-%d",
             mWidth, mHeight, mFormat, mUsage, mMaxImages, getpid(),
             createProcessUniqueId());
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mBufferItemConsumer = new BufferItemConsumer(mHalUsage, mMaxImages, /*controlledByApp*/ true);
-#else
-    mBufferItemConsumer =
-            new BufferItemConsumer(gbConsumer, mHalUsage, mMaxImages, /*controlledByApp*/ true);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    std::tie(mBufferItemConsumer, mSurface) =
+            BufferItemConsumer::create(mHalUsage, mMaxImages, /*controlledByApp*/ true);
     if (mBufferItemConsumer == nullptr) {
         ALOGE("Failed to allocate BufferItemConsumer");
         return AMEDIA_ERROR_UNKNOWN;
     }
 
 #if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mProducer = gbProducer;
-#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mProducer = mSurface->getIGraphicBufferProducer();
+#endif
     mBufferItemConsumer->setName(consumerName);
     mBufferItemConsumer->setFrameAvailableListener(mFrameListener);
     mBufferItemConsumer->setBufferFreedListener(mBufferRemovedListener);
@@ -338,18 +328,9 @@
         return AMEDIA_ERROR_UNKNOWN;
     }
     if (mUsage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT) {
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         mBufferItemConsumer->setConsumerIsProtected(true);
-#else
-        gbConsumer->setConsumerIsProtected(true);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mSurface = mBufferItemConsumer->getSurface();
-#else
-    mSurface = new Surface(mProducer, /*controlledByApp*/true);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mSurface == nullptr) {
         ALOGE("Failed to create surface");
         return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index b230df5..9971731 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -168,7 +168,7 @@
         {
              int32_t cbID;
              if (!msg->findInt32("callbackID", &cbID)) {
-                 ALOGE("kWhatAsyncNotify: callbackID is expected.");
+                 ALOGD("kWhatAsyncNotify: callbackID is expected.");
                  break;
              }
 
@@ -179,7 +179,7 @@
                  {
                      int32_t index;
                      if (!msg->findInt32("index", &index)) {
-                         ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+                         ALOGD("CB_INPUT_AVAILABLE: index is expected.");
                          break;
                      }
 
@@ -203,23 +203,23 @@
                      int32_t flags;
 
                      if (!msg->findInt32("index", &index)) {
-                         ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
+                         ALOGD("CB_OUTPUT_AVAILABLE: index is expected.");
                          break;
                      }
                      if (!msg->findSize("offset", &offset)) {
-                         ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
+                         ALOGD("CB_OUTPUT_AVAILABLE: offset is expected.");
                          break;
                      }
                      if (!msg->findSize("size", &size)) {
-                         ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
+                         ALOGD("CB_OUTPUT_AVAILABLE: size is expected.");
                          break;
                      }
                      if (!msg->findInt64("timeUs", &timeUs)) {
-                         ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
+                         ALOGD("CB_OUTPUT_AVAILABLE: timeUs is expected.");
                          break;
                      }
                      if (!msg->findInt32("flags", &flags)) {
-                         ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
+                         ALOGD("CB_OUTPUT_AVAILABLE: flags is expected.");
                          break;
                      }
 
@@ -245,7 +245,7 @@
                  {
                      sp<AMessage> format;
                      if (!msg->findMessage("format", &format)) {
-                         ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+                         ALOGD("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
                          break;
                      }
 
@@ -274,15 +274,15 @@
                      int32_t actionCode;
                      AString detail;
                      if (!msg->findInt32("err", &err)) {
-                         ALOGE("CB_ERROR: err is expected.");
+                         ALOGD("CB_ERROR: err is expected.");
                          break;
                      }
                      if (!msg->findInt32("actionCode", &actionCode)) {
-                         ALOGE("CB_ERROR: actionCode is expected.");
+                         ALOGD("CB_ERROR: actionCode is expected.");
                          break;
                      }
                      msg->findString("detail", &detail);
-                     ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)",
+                     ALOGI("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)",
                            err, StrMediaError(err).c_str(), actionCode, detail.c_str());
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
@@ -298,9 +298,52 @@
                      break;
                  }
 
+                 case MediaCodec::CB_CRYPTO_ERROR:
+                 {
+                     status_t err;
+                     int32_t actionCode;
+                     AString detail;
+                     if (!msg->findInt32("err", &err)) {
+                         ALOGD("CB_ERROR: err is expected.");
+                         break;
+                     }
+                     if (!msg->findInt32("actionCode", &actionCode)) {
+                         ALOGD("CB_ERROR: actionCode is expected.");
+                         break;
+                     }
+                     msg->findString("errorDetail", &detail);
+                     ALOGI("Codec reported crypto error(0x%x/%s), actionCode(%d), detail(%s)"
+                           " that cannot be passed to the NDK client",
+                           err, StrMediaError(err).c_str(), actionCode, detail.c_str());
+                     // TODO: handle crypto error. We could in theory pass this via the regular
+                     // onAsyncError callback, but clients are not expecting it, and would make
+                     // it harder to distinguish between crypto and non-crypto errors in the
+                     // future.
+                     break;
+                 }
+
+                 case MediaCodec::CB_REQUIRED_RESOURCES_CHANGED:
+                 {
+                     ALOGV("kWhatAsyncNotify: ignoring CB_REQUIRED_RESOURCES_CHANGED event.");
+                     break;
+                 }
+
+                 case MediaCodec::CB_METRICS_FLUSHED:
+                 {
+                     ALOGV("kWhatAsyncNotify: ignoring CB_METRICS_FLUSHED event.");
+                     break;
+                 }
+
+                 case MediaCodec::CB_LARGE_FRAME_OUTPUT_AVAILABLE:
+                 {
+                     ALOGD("kWhatAsyncNotify: ignoring CB_LARGE_FRAME_OUTPUT_AVAILABLE event.");
+                     // TODO: handle large frame output in NDK API.
+                     break;
+                 }
+
                  default:
                  {
-                     ALOGE("kWhatAsyncNotify: callbackID(%d) is unexpected.", cbID);
+                     ALOGD("kWhatAsyncNotify: callbackID(%d) is unexpected.", cbID);
                      break;
                  }
              }
@@ -390,7 +433,7 @@
         }
 
         default:
-            ALOGE("shouldn't be here");
+            ALOGD("unexpected message received: %d. shouldn't be here", msg->what());
             break;
     }
 
@@ -672,7 +715,13 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->data();
+
+        // When an input buffer is provided to the application, it is essentially
+        // empty. Ignore its offset as we will set it upon queueInputBuffer.
+        // This actually works as expected as we do not provide visibility of
+        // a potential internal offset to the client, so it is equivalent to
+        // setting the offset to 0 prior to returning the buffer to the client.
+        return abuf->base();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -689,7 +738,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->data();
+        return abufs[idx]->base();
     }
     ALOGE("couldn't get input buffers");
     return NULL;
@@ -704,8 +753,12 @@
             return NULL;
         }
 
+        // Note that we do not provide visibility of the internal offset to the
+        // client, but it also does not make sense to provide visibility of the
+        // buffer capacity vs the actual size.
+
         if (out_size != NULL) {
-            *out_size = abuf->capacity();
+            *out_size = abuf->size();
         }
         return abuf->data();
     }
@@ -718,7 +771,7 @@
             return NULL;
         }
         if (out_size != NULL) {
-            *out_size = abufs[idx]->capacity();
+            *out_size = abufs[idx]->size();
         }
         return abufs[idx]->data();
     }
@@ -748,7 +801,8 @@
     requestActivityNotification(mData);
     switch (ret) {
         case OK:
-            info->offset = offset;
+            // the output buffer address is already offset in AMediaCodec_getOutputBuffer()
+            info->offset = 0;
             info->size = size;
             info->flags = flags;
             info->presentationTimeUs = presentationTimeUs;
@@ -824,7 +878,12 @@
         return translate_error(err);
     }
 
-    *surface = new Surface(igbp);
+    // This will increment default strongCount on construction.  It will be decremented
+    // on function exit.
+    auto spSurface = sp<Surface>::make(igbp);
+    *surface = spSurface.get();
+    // This will increment a private strongCount.  It will be decremented in
+    // ANativeWindow_release.
     ANativeWindow_acquire(*surface);
     return AMEDIA_OK;
 }
diff --git a/media/ndk/NdkMediaCodecInfo.cpp b/media/ndk/NdkMediaCodecInfo.cpp
index 82ceb61..baf7401 100644
--- a/media/ndk/NdkMediaCodecInfo.cpp
+++ b/media/ndk/NdkMediaCodecInfo.cpp
@@ -25,18 +25,6 @@
 
 extern "C" {
 
-// Utils
-
-EXPORT
-void AIntRange_delete(AIntRange *range) {
-    free(range);
-}
-
-EXPORT
-void ADoubleRange_delete(ADoubleRange *range) {
-    free(range);
-}
-
 // AMediaCodecInfo
 
 EXPORT
@@ -49,31 +37,39 @@
 }
 
 EXPORT
-bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) {
-    return info->mInfo->isEncoder();
+AMediaCodecKind AMediaCodecInfo_getKind(const AMediaCodecInfo* info) {
+    if (info == nullptr) {
+        return AMediaCodecKind_INVALID;
+    }
+
+    return info->mInfo->isEncoder() ? AMediaCodecKind_ENCODER : AMediaCodecKind_DECODER;
 }
 
 EXPORT
-bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) {
+int32_t AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) {
+    if (info == nullptr) {
+        return -1;
+    }
+
     int32_t attributes = info->mInfo->getAttributes();
-    return (attributes & android::MediaCodecInfo::kFlagIsVendor);
+    return (attributes & android::MediaCodecInfo::kFlagIsVendor) ? 1 : 0;
 }
 
 EXPORT
 AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(const AMediaCodecInfo *info) {
     if (info == nullptr || info->mInfo == nullptr) {
-        return (AMediaCodecType)0;
+        return AMediaCodecType_INVALID_CODEC_INFO;
     }
 
     int32_t attributes = info->mInfo->getAttributes();
 
     if (attributes & android::MediaCodecInfo::kFlagIsSoftwareOnly) {
-        return SOFTWARE_ONLY;
+        return AMediaCodecType_SOFTWARE_ONLY;
     }
     if (attributes & android::MediaCodecInfo::kFlagIsHardwareAccelerated) {
-        return HARDWARE_ACCELERATED;
+        return AMediaCodecType_HARDWARE_ACCELERATED;
     }
-    return SOFTWARE_WITH_DEVICE_ACCESS;
+    return AMediaCodecType_SOFTWARE_WITH_DEVICE_ACCESS;
 }
 
 EXPORT
@@ -96,7 +92,7 @@
 
 EXPORT
 int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info, const char *featureName) {
-    if (featureName == nullptr) {
+    if (info == nullptr || featureName == nullptr) {
         return -1;
     }
     return info->mCodecCaps->isFeatureSupported(std::string(featureName));
@@ -104,7 +100,7 @@
 
 EXPORT
 int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info, const char *featureName) {
-    if (featureName == nullptr) {
+    if (info == nullptr || featureName == nullptr) {
         return -1;
     }
     return info->mCodecCaps->isFeatureRequired(std::string(featureName));
@@ -112,7 +108,7 @@
 
 EXPORT
 int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info, const AMediaFormat *format) {
-    if (format == nullptr) {
+    if (info == nullptr || format == nullptr) {
         return -1;
     }
 
@@ -125,7 +121,7 @@
 EXPORT
 media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
         const ACodecAudioCapabilities **outAudioCaps) {
-    if (info == nullptr || info->mInfo == nullptr) {
+    if (info == nullptr || info->mInfo == nullptr || outAudioCaps == nullptr) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
@@ -141,7 +137,7 @@
 EXPORT
 media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
         const ACodecVideoCapabilities **outVideoCaps) {
-    if (info == nullptr || info->mInfo == nullptr) {
+    if (info == nullptr || info->mInfo == nullptr || outVideoCaps == nullptr) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
@@ -157,7 +153,7 @@
 EXPORT
 media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
         const ACodecEncoderCapabilities **outEncoderCaps) {
-    if (info == nullptr || info->mInfo == nullptr) {
+    if (info == nullptr || info->mInfo == nullptr || outEncoderCaps == nullptr) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
@@ -264,19 +260,17 @@
 }
 
 EXPORT
-media_status_t ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) {
-    if (performancePoint == nullptr) {
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
+void ACodecPerformancePoint_destroy(ACodecPerformancePoint *performancePoint) {
     delete performancePoint;
-
-    return AMEDIA_OK;
 }
 
 EXPORT
-bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+int32_t ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
         const AMediaFormat *format) {
+    if (performancePoint == nullptr || format == nullptr) {
+        return -1;
+    }
+
     sp<AMessage> nativeFormat;
     AMediaFormat_getFormat(format, &nativeFormat);
 
@@ -284,14 +278,22 @@
 }
 
 EXPORT
-bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+int32_t ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
         const ACodecPerformancePoint *another) {
+    if (one == nullptr || another == nullptr) {
+        return -1;
+    }
+
     return one->mPerformancePoint->covers(*(another->mPerformancePoint));
 }
 
 EXPORT
-bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+int32_t ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
         const ACodecPerformancePoint *another) {
+    if (one == nullptr || another == nullptr) {
+        return -1;
+    }
+
     return one->mPerformancePoint->equals(*(another->mPerformancePoint));
 }
 
@@ -447,17 +449,25 @@
 }
 
 EXPORT
-media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
-        const ACodecVideoCapabilities *videoCaps,
-        const ACodecPerformancePoint **outPerformancePointArray, size_t *outCount) {
-    if (videoCaps == nullptr) {
+media_status_t ACodecVideoCapabilities_getNextSupportedPerformancePoint(
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
+        const ACodecPerformancePoint* _Nullable * _Nonnull outPerformancePoint) {
+    if (videoCaps == nullptr || outPerformancePoint == nullptr) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
-    *outPerformancePointArray = videoCaps->mPerformancePoints.data();
-    *outCount = videoCaps->mPerformancePoints.size();
-
-    return AMEDIA_OK;
+    bool found = *outPerformancePoint == nullptr;
+    for (const ACodecPerformancePoint& pp : videoCaps->mPerformancePoints) {
+        if (found) {
+            *outPerformancePoint = &pp;
+            return AMEDIA_OK;
+        }
+        if (*outPerformancePoint == &pp) {
+            found = true;
+        }
+    }
+    *outPerformancePoint = nullptr;
+    return AMEDIA_ERROR_UNSUPPORTED;
 }
 
 EXPORT
@@ -509,12 +519,30 @@
 }
 
 int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
-        const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) {
+        const ACodecEncoderCapabilities *encoderCaps, ABitrateMode mode) {
     if (encoderCaps == nullptr) {
         return -1;
     }
     return encoderCaps->mEncoderCaps->isBitrateModeSupported(mode);
 }
 
+// Feature Names
+
+extern const char* AMediaCodecInfo_FEATURE_AdaptivePlayback     = "adaptive-playback";
+extern const char* AMediaCodecInfo_FEATURE_SecurePlayback       = "secure-playback";
+extern const char* AMediaCodecInfo_FEATURE_TunneledPlayback     = "tunneled-playback";
+extern const char* AMediaCodecInfo_FEATURE_DynamicTimestamp     = "dynamic-timestamp";
+extern const char* AMediaCodecInfo_FEATURE_FrameParsing         = "frame-parsing";
+extern const char* AMediaCodecInfo_FEATURE_MultipleFrames       = "multiple-frames";
+extern const char* AMediaCodecInfo_FEATURE_PartialFrame         = "partial-frame";
+extern const char* AMediaCodecInfo_FEATURE_IntraRefresh         = "intra-refresh";
+extern const char* AMediaCodecInfo_FEATURE_LowLatency           = "low-latency";
+extern const char* AMediaCodecInfo_FEATURE_QpBounds             = "qp-bounds";
+extern const char* AMediaCodecInfo_FEATURE_EncodingStatistics   = "encoding-statistics";
+extern const char* AMediaCodecInfo_FEATURE_HdrEditing           = "hdr-editing";
+extern const char* AMediaCodecInfo_FEATURE_HlgEditing           = "hlg-editing";
+extern const char* AMediaCodecInfo_FEATURE_DynamicColorAspects  = "dynamic-color-aspects";
+extern const char* AMediaCodecInfo_FEATURE_Roi                  = "region-of-interest";
+extern const char* AMediaCodecInfo_FEATURE_DetachedSurface      = "detached-surface";
 
 }
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecInfoPriv.h b/media/ndk/NdkMediaCodecInfoPriv.h
index 6d9188b..72e6382 100644
--- a/media/ndk/NdkMediaCodecInfoPriv.h
+++ b/media/ndk/NdkMediaCodecInfoPriv.h
@@ -99,8 +99,8 @@
     std::shared_ptr<const ACodecVideoCapabilities> mAVideoCaps;
     std::shared_ptr<const ACodecEncoderCapabilities> mAEncoderCaps;
 
-    AMediaCodecInfo(std::string name, android::sp<android::MediaCodecInfo> info,
-            std::shared_ptr<android::CodecCapabilities> codecCaps, std::string mediaType)
+    AMediaCodecInfo(const std::string &name, android::sp<android::MediaCodecInfo> info,
+            std::shared_ptr<android::CodecCapabilities> codecCaps, const std::string &mediaType)
             : mName(name), mInfo(info), mMediaType(mediaType), mCodecCaps(codecCaps) {
         if (!mName.empty() && mInfo != nullptr && !mMediaType.empty() && mCodecCaps != nullptr) {
             if (mCodecCaps->getAudioCapabilities() != nullptr) {
diff --git a/media/ndk/NdkMediaCodecStore.cpp b/media/ndk/NdkMediaCodecStore.cpp
index d911593..bf20f77 100644
--- a/media/ndk/NdkMediaCodecStore.cpp
+++ b/media/ndk/NdkMediaCodecStore.cpp
@@ -66,8 +66,10 @@
 
             auto it = typesInfoMap.find(mediaType);
             if (it == typesInfoMap.end()) {
-                AMediaCodecSupportedMediaType supportedType = { mediaType.c_str(), 0 };
-                it = typesInfoMap.emplace(mediaType, supportedType).first;
+                char *mediaTypePtr = new char[mediaType.size()+1];
+                strncpy(mediaTypePtr, mediaType.c_str(), mediaType.size()+1);
+                it = typesInfoMap.emplace(mediaType,
+                        (AMediaCodecSupportedMediaType) { mediaTypePtr, 0 }).first;
                 mediaTypes.push_back(mediaType);
             }
             uint32_t &mode = it->second.mMode;
@@ -152,12 +154,6 @@
     }
 }
 
-static bool codecHandlesFormat(const AMediaCodecInfo codecInfo,
-        sp<AMessage> format, bool isEncoder) {
-    return codecInfo.mCodecCaps->isEncoder() == isEncoder
-            && codecInfo.mCodecCaps->isFormatSupported(format);
-}
-
 static media_status_t findNextCodecForFormat(
         const AMediaFormat *format, bool isEncoder, const AMediaCodecInfo **outCodecInfo) {
     if (outCodecInfo == nullptr) {
@@ -168,10 +164,10 @@
         initCodecInfoMap();
     }
 
-    std::unique_ptr<std::vector<AMediaCodecInfo>> infos;
+    std::vector<AMediaCodecInfo> *infos;
     sp<AMessage> nativeFormat;
     if (format == nullptr) {
-        infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&sCodecInfos);
+        infos = &sCodecInfos;
     } else {
         AMediaFormat_getFormat(format, &nativeFormat);
         AString mime;
@@ -180,24 +176,24 @@
         }
 
         std::string mediaType = std::string(mime.c_str());
-        auto it = sTypeToInfoList.find(mediaType);
+        std::map<std::string, std::vector<AMediaCodecInfo>>::iterator it
+                = sTypeToInfoList.find(mediaType);
         if (it == sTypeToInfoList.end()) {
             return AMEDIA_ERROR_UNSUPPORTED;
         }
-        infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&(it->second));
+        infos = &(it->second);
     }
 
     bool found = *outCodecInfo == nullptr;
     for (const AMediaCodecInfo &info : *infos) {
-        if (found && (format == nullptr
-                || codecHandlesFormat(info, nativeFormat, isEncoder))) {
+        if (found && info.mCodecCaps->isEncoder() == isEncoder
+                && (format == nullptr || info.mCodecCaps->isFormatSupported(nativeFormat))) {
             *outCodecInfo = &info;
             return AMEDIA_OK;
         }
         if (*outCodecInfo == &info) {
             found = true;
         }
-
     }
     *outCodecInfo = nullptr;
     return AMEDIA_ERROR_UNSUPPORTED;
@@ -208,7 +204,7 @@
 EXPORT
 media_status_t AMediaCodecStore_getSupportedMediaTypes(
         const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) {
-    if (outMediaTypes == nullptr) {
+    if (outMediaTypes == nullptr || outCount == nullptr) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
@@ -241,6 +237,10 @@
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
+    if (sNameToInfoMap.empty()) {
+        initCodecInfoMap();
+    }
+
     auto it = sNameToInfoMap.find(std::string(name));
     if (it == sNameToInfoMap.end()) {
         *outCodecInfo = nullptr;
diff --git a/media/ndk/include/media-vndk/VndkImageReader.h b/media/ndk/include/media-vndk/VndkImageReader.h
index c67a38c..8659ce7 100644
--- a/media/ndk/include/media-vndk/VndkImageReader.h
+++ b/media/ndk/include/media-vndk/VndkImageReader.h
@@ -17,6 +17,7 @@
 #ifndef _VNDK_IMAGE_READER_H
 #define _VNDK_IMAGE_READER_H
 
+#include <sys/cdefs.h>
 // vndk is a superset of the NDK
 #include <media/NdkImageReader.h>
 
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 598beb7..223d2f8 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -251,6 +251,11 @@
  * dequeueOutputBuffer, and not yet queued.
  *
  * Available since API level 21.
+ * <p>
+ * At or before API level 35, the out_size returned was invalid, and instead the
+ * size returned in the AMediaCodecBufferInfo struct from
+ * AMediaCodec_dequeueOutputBuffer() should be used. After API
+ * level 35, this API returns the correct output buffer size as well.
  */
 uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec*, size_t idx, size_t *out_size) __INTRODUCED_IN(21);
 
@@ -309,9 +314,16 @@
 #undef _off_t_compat
 
 /**
- * Get the index of the next available buffer of processed data.
+ * Get the index of the next available buffer of processed data along with the
+ * metadata associated with it.
  *
  * Available since API level 21.
+ * <p>
+ * At or before API level 35, the offset in the AMediaCodecBufferInfo struct
+ * was invalid and should be ignored; however, at the same time
+ * the buffer size could only be obtained from this struct. After API
+ * level 35, the offset returned in the struct is always set to 0, and the
+ * buffer size can also be obtained from the AMediaCodec_getOutputBuffer() call.
  */
 ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info,
         int64_t timeoutUs) __INTRODUCED_IN(21);
@@ -468,7 +480,6 @@
 /**
  * Set an asynchronous callback for actionable AMediaCodec events.
  * When asynchronous callback is enabled, it is an error for the client to call
- * AMediaCodec_getInputBuffers(), AMediaCodec_getOutputBuffers(),
  * AMediaCodec_dequeueInputBuffer() or AMediaCodec_dequeueOutputBuffer().
  *
  * AMediaCodec_flush() behaves differently in asynchronous mode.
diff --git a/media/ndk/include/media/NdkMediaCodecInfo.h b/media/ndk/include/media/NdkMediaCodecInfo.h
index 558e82c..87d1cfe 100644
--- a/media/ndk/include/media/NdkMediaCodecInfo.h
+++ b/media/ndk/include/media/NdkMediaCodecInfo.h
@@ -73,22 +73,38 @@
 /**
  * Get the canonical name of a codec.
  *
- * \return      The char pointer to the canonical name.
- *              It is owned by the framework. No lifetime management needed for users.
+ * @return      The char pointer to the canonical name.
+ *              Encoded as ASCII and owned by the framework.
  *
- * Return NULL if @param info is invalid.
+ * @return NULL if @param info is invalid.
  */
-const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+const char* _Nullable AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo* _Nonnull info)
+        __INTRODUCED_IN(36);
+
+typedef enum AMediaCodecKind : int32_t {
+    /** invalid media codec info */
+    AMediaCodecKind_INVALID = 0,
+
+    /** decoder */
+    AMediaCodecKind_DECODER = 1,
+
+    /** encoder */
+    AMediaCodecKind_ENCODER = 2,
+} AMediaCodecKind;
 
 /**
- * Query if the codec is an encoder.
+ * Query the kind of the codec.
  */
-bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+AMediaCodecKind AMediaCodecInfo_getKind(const AMediaCodecInfo* _Nonnull info) __INTRODUCED_IN(36);
 
 /**
- * Query if the codec is provided by the Android platform (false) or the device manufacturer (true).
+ * Query if the codec is provided by the Android platform or the device manufacturer.
+ *
+ * @return 1 if the codec is provided by the device manufacturer
+ * @return 0 if the codec is provided by the Android platform
+ * @return -1 if @param info is invalid.
  */
-bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+int32_t AMediaCodecInfo_isVendor(const AMediaCodecInfo* _Nonnull info) __INTRODUCED_IN(36);
 
 /**
  * The type of codecs.
@@ -97,7 +113,7 @@
     /**
      * Not a codec type. Used for indicating an invalid operation occurred.
      */
-    INVALID_CODEC_INFO = 0,
+    AMediaCodecType_INVALID_CODEC_INFO = 0,
 
     /**
      * Software codec.
@@ -105,7 +121,7 @@
      * Software-only codecs are more secure as they run in a tighter security sandbox.
      * On the other hand, software-only codecs do not provide any performance guarantees.
      */
-    SOFTWARE_ONLY = 1,
+    AMediaCodecType_SOFTWARE_ONLY = 1,
 
     /**
      * Hardware accelerated codec.
@@ -114,44 +130,46 @@
      * software codecs, but since they are specific to each device,
      * the actual performance details can vary.
      */
-    HARDWARE_ACCELERATED = 2,
+    AMediaCodecType_HARDWARE_ACCELERATED = 2,
 
     /**
      * Software codec but have device access.
      * Mainly referring to software codecs provided by vendors.
      */
-    SOFTWARE_WITH_DEVICE_ACCESS = 3,
+    AMediaCodecType_SOFTWARE_WITH_DEVICE_ACCESS = 3,
 } AMediaCodecType;
 
 /**
  * Query if the codec is SOFTWARE_ONLY, HARDWARE_ACCELERATED or SOFTWARE_WITH_DEVICE_ACCESS.
  *
- * Return INVALID_CODEC_INFO if @param info is invalid.
+ * @return INVALID_CODEC_INFO if @param info is invalid.
  */
 AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(
-        const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+        const AMediaCodecInfo* _Nonnull info) __INTRODUCED_IN(36);
 
 /**
  * Get the supported media type of the codec.
  *
- * \return  The char pointer to the media type.
- *          It is owned by the framework with infinite lifetime.
+ * @return  The char pointer to the media type(e.g. "video/hevc").
+ *          It is ASCII encoded and owned by the framework with infinite lifetime.
  *
- * Return NULL if @param info is invalid.
+ * @return NULL if @param info is invalid.
  */
-const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+const char* _Nullable AMediaCodecInfo_getMediaType(const AMediaCodecInfo* _Nonnull info)
+        __INTRODUCED_IN(36);
 
 /**
- * Returns the max number of the supported concurrent codec instances.
+ * Get the max number of the supported concurrent codec instances.
  *
  * This is a hint for an upper bound. Applications should not expect to successfully
  * operate more instances than the returned value, but the actual number of
  * concurrently operable instances may be less as it depends on the available
  * resources at time of use.
  *
- * Return -1 if @param info is invalid.
+ * @return -1 if @param info is invalid.
  */
-int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo* _Nonnull info)
+        __INTRODUCED_IN(36);
 
 /**
  * Query codec feature capabilities.
@@ -160,12 +178,15 @@
  * include optional features that can be turned on, as well as
  * features that are always on.
  *
- * Return 1 if the feature is supported;
- * Return 0 if the feature is unsupported;
- * Return -1 if @param featureName is invalid.
+ * @param featureName   Get valid feature names from the defined constants in NdkMediaCodecInfo.h
+ *                      with prefix AMediaCoecInfo_FEATURE_.
+ *
+ * @return 1 if the feature is supported;
+ * @return 0 if the feature is unsupported;
+ * @return -1 if @param info or @param featureName is invalid.
  */
-int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info,
-        const char *featureName) __INTRODUCED_IN(36);
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo* _Nonnull info,
+        const char* _Nonnull featureName) __INTRODUCED_IN(36);
 
 /**
  * Query codec feature requirements.
@@ -173,22 +194,25 @@
  * These features are required to be used by the codec, and as such,
  * they are always turned on.
  *
- * Return 1 if the feature is required;
- * Return 0 if the feature is not required;
- * Return -1 if @param featureName is invalid.
+ * @param featureName   Get valid feature names from the defined constants in NdkMediaCodecInfo.h
+ *                      with prefix AMediaCoecInfo_FEATURE_.
+ *
+ * @return 1 if the feature is required;
+ * @return 0 if the feature is not required;
+ * @return -1 if @param info or @param featureName is invalid.
  */
-int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info,
-        const char *featureName) __INTRODUCED_IN(36);
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo* _Nonnull info,
+        const char* _Nonnull featureName) __INTRODUCED_IN(36);
 
 /**
  * Query whether codec supports a given @param format.
  *
- * Return 1 if the format is supported;
- * Return 0 if the format is unsupported;
- * Return -1 if @param format is invalid.
+ * @return 1 if the format is supported;
+ * @return 0 if the format is unsupported;
+ * @return -1 if @param info or @param format is invalid.
  */
-int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info,
-        const AMediaFormat *format) __INTRODUCED_IN(36);
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo* _Nonnull info,
+        const AMediaFormat* _Nonnull format) __INTRODUCED_IN(36);
 
 /**
  * Get the ACodecAudioCapabilities from the given AMediaCodecInfo.
@@ -196,12 +220,12 @@
  * @param outAudioCaps        The pointer to the output ACodecAudioCapabilities.
  *                            It is owned by the framework and has an infinite lifetime.
  *
- * Return AMEDIA_OK if successfully got the ACodecAudioCapabilities.
- * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an audio codec.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ * @return AMEDIA_OK if successfully got the ACodecAudioCapabilities.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the codec is not an audio codec.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param info or @param outAudioCaps is invalid.
  */
-media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
-        const ACodecAudioCapabilities **outAudioCaps) __INTRODUCED_IN(36);
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo* _Nonnull info,
+        const ACodecAudioCapabilities* _Nullable * _Nonnull outAudioCaps) __INTRODUCED_IN(36);
 
 /**
  * Get the ACodecVideoCapabilities from the given AMediaCodecInfo.
@@ -209,12 +233,12 @@
  * @param outVideoCaps        The pointer to the output ACodecVideoCapabilities.
  *                            It is owned by the framework and has an infinite lifetime.
  *
- * Return AMEDIA_OK if successfully got the ACodecVideoCapabilities.
- * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not a video codec.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ * @return AMEDIA_OK if successfully got the ACodecVideoCapabilities.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the codec is not a video codec.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param info or @param outVideoCaps is invalid.
  */
-media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
-        const ACodecVideoCapabilities **outVideoCaps) __INTRODUCED_IN(36);
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo* _Nonnull info,
+        const ACodecVideoCapabilities* _Nullable * _Nonnull outVideoCaps) __INTRODUCED_IN(36);
 
 /**
  * Get the ACodecEncoderCapabilities from the given AMediaCodecInfo.
@@ -222,12 +246,12 @@
  * @param outEncoderCaps        The pointer to the output ACodecEncoderCapabilities.
  *                              It is owned by the framework and has an infinite lifetime.
  *
- * Return AMEDIA_OK if successfully got the ACodecEncoderCapabilities.
- * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an encoder.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ * @return AMEDIA_OK if successfully got the ACodecEncoderCapabilities.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the codec is not an encoder.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param info or @param outEncoderCaps is invalid.
  */
-media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
-        const ACodecEncoderCapabilities **outEncoderCaps) __INTRODUCED_IN(36);
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo* _Nonnull info,
+        const ACodecEncoderCapabilities* _Nullable * _Nonnull outEncoderCaps) __INTRODUCED_IN(36);
 
 // ACodecAudioCapabilities
 
@@ -238,11 +262,12 @@
  *                  Users are responsible for allocating a valid AIntRange structure and
  *                  managing the lifetime of it.
  *
- * Return AMEDIA_OK if got bitrates successfully.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got bitrates successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps and @param outRange is invalid.
  */
-media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+media_status_t ACodecAudioCapabilities_getBitrateRange(
+        const ACodecAudioCapabilities* _Nonnull audioCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the array of supported sample rates
@@ -253,14 +278,15 @@
  *                      The array is owned by the framework and has an infinite lifetime.
  * @param outCount      The size of the output array.
  *
- * Return AMEDIA_OK if the codec supports only discrete values.
+ * @return AMEDIA_OK if the codec supports only discrete values.
  * Otherwise, it returns AMEDIA_ERROR_UNSUPPORTED.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
  * and @param outCount is invalid.
  */
 media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
-        const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr,
-        size_t *outCount) __INTRODUCED_IN(36);
+        const ACodecAudioCapabilities* _Nonnull audioCaps,
+        const int* _Nullable * _Nonnull outArrayPtr,
+        size_t* _Nonnull outCount) __INTRODUCED_IN(36);
 
 /**
  * Get the array of supported sample rate ranges.
@@ -271,30 +297,31 @@
  *                      The array is owned by the framework and has an infinite lifetime.
  * @param outCount      The size of the out array.
  *
- * Return AMEDIA_OK if got the sample rate ranges successfully.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * @return AMEDIA_OK if got the sample rate ranges successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
  * and @param outCount is invalid.
  */
 media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
-        const ACodecAudioCapabilities *audioCaps,
-        const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+        const ACodecAudioCapabilities* _Nonnull audioCaps,
+        const AIntRange* _Nullable * _Nonnull outArrayPtr, size_t* _Nonnull outCount)
+        __INTRODUCED_IN(36);
 
 /**
  * Return the maximum number of input channels supported.
  *
- * Return -1 if @param audioCaps is invalid.
+ * @return -1 if @param audioCaps is invalid.
  */
 int32_t ACodecAudioCapabilities_getMaxInputChannelCount(
-        const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+        const ACodecAudioCapabilities* _Nonnull audioCaps) __INTRODUCED_IN(36);
 
 /**
  * Returns the minimum number of input channels supported.
  * This is often 1, but does vary for certain mime types.
  *
- * Return -1 if @param audioCaps is invalid.
+ * @return -1 if @param audioCaps is invalid.
  */
 int32_t ACodecAudioCapabilities_getMinInputChannelCount(
-        const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+        const ACodecAudioCapabilities* _Nonnull audioCaps) __INTRODUCED_IN(36);
 
 /**
  * Get an array of ranges representing the number of input channels supported.
@@ -307,21 +334,24 @@
  *                      The array is owned by the framework and has an infinite lifetime.
  * @param outCount      The size of the output array.
  *
- * Return AMEDIA_OK if got the input channel array successfully.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param audioCaps is invalid.
+ * @return AMEDIA_OK if got the input channel array successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
  */
 media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
-        const ACodecAudioCapabilities *audioCaps,
-        const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+        const ACodecAudioCapabilities* _Nonnull audioCaps,
+        const AIntRange* _Nullable * _Nonnull outArrayPtr, size_t* _Nonnull outCount)
+        __INTRODUCED_IN(36);
 
 /**
  * Query whether the sample rate is supported by the codec.
  *
- * Return 1 if the sample rate is supported.
- * Return 0 if the sample rate is unsupported
- * Return -1 if @param audioCaps is invalid.
+ * @return 1 if the sample rate is supported.
+ * @return 0 if the sample rate is unsupported
+ * @return -1 if @param audioCaps is invalid.
  */
-int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+int32_t ACodecAudioCapabilities_isSampleRateSupported(
+        const ACodecAudioCapabilities* _Nonnull audioCaps,
         int32_t sampleRate) __INTRODUCED_IN(36);
 
 // ACodecPerformancePoint
@@ -329,35 +359,38 @@
 /**
  * Create a performance point for a given frame size and frame rate.
  *
- * Performance points are defined by number of pixels, pixel rate and frame rate.
+ * Video performance points are a set of standard performance points defined by number of
+ * pixels, pixel rate and frame rate. Performance point represents an upper bound. This
+ * means that it covers all performance points with fewer pixels, pixel rate and frame
+ * rate.
  *
  * Users are responsible for calling
- * ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) after use.
+ * ACodecPerformancePoint_destroy(ACodecPerformancePoint *performancePoint) after use.
  *
  * @param width width of the frame in pixels
  * @param height height of the frame in pixels
  * @param frameRate frame rate in frames per second
  */
-ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+ACodecPerformancePoint* _Nonnull ACodecPerformancePoint_create(int32_t width, int32_t height,
         int32_t frameRate) __INTRODUCED_IN(36);
 
 /**
  * Delete a created performance point.
- *
- * Return AMEDIA_OK if it is successfully deleted.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param performancePoint is invalid.
  */
-media_status_t ACodecPerformancePoint_delete(
-        ACodecPerformancePoint *performancePoint) __INTRODUCED_IN(36);
+void ACodecPerformancePoint_destroy(
+        ACodecPerformancePoint* _Nullable performancePoint) __INTRODUCED_IN(36);
 
 /**
  * Checks whether the performance point covers a media format.
  *
  * @param format Stream format considered.
- * Return true if the performance point covers the format.
+ *
+ * @return 1 if the performance point covers the format.
+ * @return 0 if the performance point does not cover the format.
+ * @return -1 if @param performancePoint or @param format is invalid.
  */
-bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
-        const AMediaFormat *format) __INTRODUCED_IN(36);
+int32_t ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint* _Nonnull performancePoint,
+        const AMediaFormat* _Nonnull format) __INTRODUCED_IN(36);
 
 /**
  * Checks whether a performance point covers another performance point.
@@ -369,16 +402,22 @@
  * A Performance point represents an upper bound. This means that
  * it covers all performance points with fewer pixels, pixel rate and frame rate.
  *
- * Return true if @param one covers @param another.
+ * @return 1 if @param one covers @param another.
+ * @return 0 if @param one does not cover @param another.
+ * @return -1 if @param one or @param another is invalid.
  */
-bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
-        const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+int32_t ACodecPerformancePoint_covers(const ACodecPerformancePoint* _Nonnull one,
+        const ACodecPerformancePoint* _Nonnull another) __INTRODUCED_IN(36);
 
 /**
  * Checks whether two performance points are equal.
+ *
+ * @return 1 if @param one and @param another are equal.
+ * @return 0 if @param one and @param another are not equal.
+ * @return -1 if @param one or @param another is invalid.
  */
-bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
-        const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+int32_t ACodecPerformancePoint_equals(const ACodecPerformancePoint* _Nonnull one,
+        const ACodecPerformancePoint* _Nonnull another) __INTRODUCED_IN(36);
 
 // ACodecVideoCapabilities
 
@@ -389,11 +428,12 @@
  *                  Users are responsible for allocating a valid AIntRange structure and
  *                  managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the supported bitrates successfully.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the supported bitrates successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
-media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+media_status_t ACodecVideoCapabilities_getBitrateRange(
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the range of supported video widths.
@@ -402,11 +442,12 @@
  *                  Users are responsible for allocating a valid AIntRange structure and
  *                  managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the supported video widths successfully.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the supported video widths successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
-media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+media_status_t ACodecVideoCapabilities_getSupportedWidths(
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the range of supported video heights.
@@ -415,31 +456,32 @@
  *                  Users are responsible for allocating a valid AIntRange structure and
  *                  managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the supported video heights successfully.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the supported video heights successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
-media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+media_status_t ACodecVideoCapabilities_getSupportedHeights(
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
- * Return the alignment requirement for video width (in pixels).
+ * Get the alignment requirement for video width (in pixels).
  *
  * This is a power-of-2 value that video width must be a multiple of.
  *
- * Return -1 if @param videoCaps is invalid.
+ * @return -1 if @param videoCaps is invalid.
  */
 int32_t ACodecVideoCapabilities_getWidthAlignment(
-        const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps) __INTRODUCED_IN(36);
 
 /**
  * Return the alignment requirement for video height (in pixels).
  *
  * This is a power-of-2 value that video height must be a multiple of.
  *
- * Return -1 if @param videoCaps is invalid.
+ * @return -1 if @param videoCaps is invalid.
  */
 int32_t ACodecVideoCapabilities_getHeightAlignment(
-        const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps) __INTRODUCED_IN(36);
 
 /**
  * Get the range of supported frame rates.
@@ -452,11 +494,12 @@
  *                  Users are responsible for allocating a valid AIntRange structure and
  *                  managing the lifetime of it.
  *
- * \return AMEDIA_OK if got the frame rate range successfully.
- * \return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the frame rate range successfully.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
-        const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the range of supported video widths for a video height.
@@ -465,13 +508,13 @@
  *                      Users are responsible for allocating a valid AIntRange structure and
  *                      managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the supported video width range successfully.
- * Return AMEDIA_ERROR_UNSUPPORTED if the height query is not supported.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the supported video width range successfully.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the height query is not supported.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
-        const ACodecVideoCapabilities *videoCaps, int32_t height,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps, int32_t height,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the range of supported video heights for a video width.
@@ -480,13 +523,13 @@
  *                      Users are responsible for allocating a valid AIntRange structure and
  *                      managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the supported video height range successfully.
- * Return AMEDIA_ERROR_UNSUPPORTED if the width query is not supported.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the supported video height range successfully.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the width query is not supported.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
-        const ACodecVideoCapabilities *videoCaps, int32_t width,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps, int32_t width,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the range of supported video frame rates for a video size.
@@ -499,13 +542,13 @@
  *                      Users are responsible for allocating a valid ADoubleRange structure and
  *                      managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the supported video frame rates successfully.
- * Return AMEDIA_ERROR_UNSUPPORTED if the size query is not supported.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the supported video frame rates successfully.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the size query is not supported.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
-        const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
-        ADoubleRange *outRange) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps, int32_t width, int32_t height,
+        ADoubleRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the range of achievable video frame rates for a video size.
@@ -517,48 +560,56 @@
   *                     Users are responsible for allocating a valid ADoubleRange structure and
  *                      managing the lifetime of it.
  *
- * Return AMEDIA_OK if got the achievable video frame rates successfully.
- * Return AMEDIA_ERROR_UNSUPPORTED if the codec did not publish any measurement data.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if got the achievable video frame rates successfully.
+ * @return AMEDIA_ERROR_UNSUPPORTED if the codec did not publish any measurement data.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
-        const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
-        ADoubleRange *outRange) __INTRODUCED_IN(36);
+        const ACodecVideoCapabilities* _Nonnull videoCaps, int32_t width, int32_t height,
+        ADoubleRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the supported performance points.
  *
- * @param outPerformancePointArray      The pointer to the output performance points array.
- *                                      The array is owned by the framework and has an infinite
- *                                      lifetime.
- * @param outCount                      The size of the output array.
+ * This API returns the supported performance points in a sequence and stores them in a pointer at
+ * outPerformancePoint. Initially, set the pointer pointed to by outPerformancePoint to NULL.
+ * On successive calls, keep the last pointer value. When the sequence is over
+ * AMEDIA_ERROR_UNSUPPORTED will be returned and the pointer will be set to NULL.
  *
- * Return AMEDIA_OK if successfully got the performance points.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param videoCaps is invalid.
+ * @param outPerformancePoint   a pointer to (the ACodecPerformancePoint pointer) where the next
+ *                              performance point will be stored.
+ *                              The ACodecPerformancePoint object is owned by the framework
+ *                              and has an infinite lifecycle.
+ *
+ * @return AMEDIA_OK if successfully got the performance point.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param videoCaps or @param outPerformancePoint
+ * is invalid.
+ * @return AMEDIA_ERROR_UNSUPPORTED if there are no more supported performance points.
+ * *outPerformancePoint will also be set to NULL in this case.
  */
-media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
-        const ACodecVideoCapabilities *videoCaps,
-        const ACodecPerformancePoint **outPerformancePointArray,
-        size_t *outCount) __INTRODUCED_IN(36);
+media_status_t ACodecVideoCapabilities_getNextSupportedPerformancePoint(
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
+        const ACodecPerformancePoint* _Nullable * _Nonnull outPerformancePoint) __INTRODUCED_IN(36);
 
 /**
- * Return whether a given video size and frameRate combination is supported.
+ * Get whether a given video size and frameRate combination is supported.
  *
- * Return 1 if the size and rate are supported.
- * Return 0 if they are not supported.
- * Return -1 if @param videoCaps is invalid.
+ * @return 1 if the size and rate are supported.
+ * @return 0 if they are not supported.
+ * @return -1 if @param videoCaps is invalid.
  */
-int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(
+        const ACodecVideoCapabilities* _Nonnull videoCaps,
         int32_t width, int32_t height, double frameRate) __INTRODUCED_IN(36);
 
 /**
- * Return whether a given video size is supported.
+ * Get whether a given video size is supported.
  *
- * Return 1 if the size is supported.
- * Return 0 if the size is not supported.
- * Return -1 if @param videoCaps is invalid.
+ * @return 1 if the size is supported.
+ * @return 0 if the size is not supported.
+ * @return -1 if @param videoCaps is invalid.
  */
-int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities* _Nonnull videoCaps,
         int32_t width, int32_t height) __INTRODUCED_IN(36);
 
 // ACodecEncoderCapabilities
@@ -573,12 +624,12 @@
  *                      Users are responsible for allocating a valid AIntRange structure and
  *                      managing the lifetime of it.
  *
- * Return AMEDIA_OK if successfully got the quality range.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if successfully got the quality range.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecEncoderCapabilities_getQualityRange(
-        const ACodecEncoderCapabilities *encoderCaps,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+        const ACodecEncoderCapabilities* _Nonnull encoderCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Get the supported range of encoder complexity values.
@@ -591,32 +642,241 @@
  *                      Users are responsible for allocating a valid AIntRange structure and
  *                      managing the lifetime of it.
  *
- * Return AMEDIA_OK if successfully got the complexity range.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ * @return AMEDIA_OK if successfully got the complexity range.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
  */
 media_status_t ACodecEncoderCapabilities_getComplexityRange(
-        const ACodecEncoderCapabilities *encoderCaps,
-        AIntRange *outRange) __INTRODUCED_IN(36);
+        const ACodecEncoderCapabilities* _Nonnull encoderCaps,
+        AIntRange* _Nonnull outRange) __INTRODUCED_IN(36);
 
 /**
  * Encoder bitrate modes.
  */
-typedef enum ABiterateMode : int32_t {
-    BITRATE_MODE_CQ = 0,
-    BITRATE_MODE_VBR = 1,
-    BITRATE_MODE_CBR = 2,
-    BITRATE_MODE_CBR_FD = 3
-} ABiterateMode;
+typedef enum ABitrateMode : int32_t {
+    ABITRATE_MODE_CQ = 0,
+    ABITRATE_MODE_VBR = 1,
+    ABITRATE_MODE_CBR = 2,
+    ABITRATE_MODE_CBR_FD = 3
+} ABitrateMode;
 
 /**
  * Query whether a bitrate mode is supported.
  *
- * Return 1 if the bitrate mode is supported.
- * Return 0 if the bitrate mode is unsupported.
- * Return -1 if @param encoderCaps is invalid.
+ * @return 1 if the bitrate mode is supported.
+ * @return 0 if the bitrate mode is unsupported.
+ * @return -1 if @param encoderCaps is invalid.
  */
 int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
-        const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) __INTRODUCED_IN(36);
+        const ACodecEncoderCapabilities* _Nonnull encoderCaps, ABitrateMode mode)
+        __INTRODUCED_IN(36);
+
+// Feature Names
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video decoder only: codec supports seamless resolution changes.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_AdaptivePlayback __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video decoder only: codec supports secure decryption.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_SecurePlayback __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video or audio decoder only: codec supports tunneled playback.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_TunneledPlayback __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * The timestamp of each output buffer is derived from the timestamp of the input
+ * buffer that produced the output. If false, the timestamp of each output buffer is
+ * derived from the timestamp of the first input buffer.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_DynamicTimestamp __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * decoder only: codec supports partial (including multiple) access units
+ * per input buffer.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_FrameParsing __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * codec supports multiple access units (for decoding, or to output for
+ * encoders). If false, the codec only supports single access units. Producing multiple
+ * access units for output is an optional feature.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_MultipleFrames __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video decoder only: codec supports queuing partial frames.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_PartialFrame __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video encoder only: codec supports intra refresh.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_IntraRefresh __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * decoder only: codec supports low latency decoding.
+ * If supported, clients can enable the low latency mode for the decoder.
+ * When the mode is enabled, the decoder doesn't hold input and output data more than
+ * required by the codec standards.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_LowLatency __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video encoder only: codec supports quantization parameter bounds.
+ * @see MediaFormat#KEY_VIDEO_QP_MAX
+ * @see MediaFormat#KEY_VIDEO_QP_MIN
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_QpBounds __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video encoder only: codec supports exporting encoding statistics.
+ * Encoders with this feature can provide the App clients with the encoding statistics
+ * information about the frame.
+ * The scope of encoding statistics is controlled by
+ * {@link MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL}.
+ *
+ * @see MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_EncodingStatistics __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video encoder only: codec supports HDR editing.
+ *
+ * HDR editing support means that the codec accepts 10-bit HDR
+ * input surface, and it is capable of generating any HDR
+ * metadata required from both YUV and RGB input when the
+ * metadata is not present. This feature is only meaningful when
+ * using an HDR capable profile (and 10-bit HDR input).
+ *
+ * This feature implies that the codec is capable of encoding at
+ * least one HDR format, and that it supports RGBA_1010102 as
+ * well as P010, and optionally RGBA_FP16 input formats, and
+ * that the encoder can generate HDR metadata for all supported
+ * HDR input formats.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_HdrEditing __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video encoder only: codec supports HLG editing.
+ *
+ * HLG editing support means that the codec accepts 10-bit HDR
+ * input surface in both YUV and RGB pixel format. This feature
+ * is only meaningful when using a 10-bit (HLG) profile and
+ * 10-bit input.
+ *
+ * This feature implies that the codec is capable of encoding
+ * 10-bit format, and that it supports RGBA_1010102 as
+ * well as P010, and optionally RGBA_FP16 input formats.
+ *
+ * The difference between this feature and {@link
+ * FEATURE_HdrEditing} is that HLG does not require the
+ * generation of HDR metadata and does not use an explicit HDR
+ * profile.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_HlgEditing __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video decoder only: codec supports dynamically
+ * changing color aspects.
+ *
+ * If true, the codec can propagate color aspect changes during
+ * decoding. This is only meaningful at session boundaries, e.g.
+ * upon processing Picture Parameter Sets prior to a new IDR.
+ * The color aspects may come from the bitstream, or may be
+ * provided using {@link MediaCodec#setParameters} calls.
+ *
+ * If the codec supports both 8-bit and 10-bit profiles, this
+ * feature means that the codec can dynamically switch between 8
+ * and 10-bit profiles, but this is restricted to Surface mode
+ * only.
+ *
+ * If the device supports HDR transfer functions, switching
+ * between SDR and HDR transfer is also supported. Together with
+ * the previous clause this means that switching between SDR and
+ * HDR sessions are supported in Surface mode, as SDR is
+ * typically encoded at 8-bit and HDR at 10-bit.
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_DynamicColorAspects __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video encoder only: codec supports region of interest encoding.
+ *
+ * RoI encoding support means the codec accepts information that specifies the relative
+ * importance of different portions of each video frame. This allows the encoder to
+ * separate a video frame into critical and non-critical regions, and use more bits
+ * (better quality) to represent the critical regions and de-prioritize non-critical
+ * regions. In other words, the encoder chooses a negative qp bias for the critical
+ * portions and a zero or positive qp bias for the non-critical portions.
+ *
+ * At a basic level, if the encoder decides to encode each frame with a uniform
+ * quantization value 'qpFrame' and a 'qpBias' is chosen/suggested for an LCU of the
+ * frame, then the actual qp of the LCU will be 'qpFrame + qpBias', although this value
+ * can be clamped basing on the min-max configured qp bounds for the current encoding
+ * session.
+ *
+ * In a shot, if a group of LCUs pan out quickly they can be marked as non-critical
+ * thereby enabling the encoder to reserve fewer bits during their encoding. Contrarily,
+ * LCUs that remain in shot for a prolonged duration can be encoded at better quality in
+ * one frame thereby setting-up an excellent long-term reference for all future frames.
+ *
+ * Note that by offsetting the quantization of each LCU, the overall bit allocation will
+ * differ from the originally estimated bit allocation, and the encoder will adjust the
+ * frame quantization for subsequent frames to meet the bitrate target. An effective
+ * selection of critical regions can set-up a golden reference and this can compensate
+ * for the bit burden that was introduced due to encoding RoI's at better quality.
+ * On the other hand, an ineffective choice of critical regions might increase the
+ * quality of certain parts of the image but this can hamper quality in subsequent frames.
+ *
+ * @see MediaCodec#PARAMETER_KEY_QP_OFFSET_MAP
+ * @see MediaCodec#PARAMETER_KEY_QP_OFFSET_RECTS
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_Roi __INTRODUCED_IN(36);
+
+/**
+ * A feature constant for use with AMediaCodecInfo_isFeature* methods
+ *
+ * video decoder only: codec supports detaching the
+ * output surface when in Surface mode.
+ * If true, the codec can be configured in Surface mode
+ * without an actual surface (in detached surface mode).
+ * @see MediaCodec#CONFIGURE_FLAG_DETACHED_SURFACE
+ */
+extern const char* _Nonnull AMediaCodecInfo_FEATURE_DetachedSurface __INTRODUCED_IN(36);
+
 
 __END_DECLS
 
diff --git a/media/ndk/include/media/NdkMediaCodecStore.h b/media/ndk/include/media/NdkMediaCodecStore.h
index aab8689..b0fb065 100644
--- a/media/ndk/include/media/NdkMediaCodecStore.h
+++ b/media/ndk/include/media/NdkMediaCodecStore.h
@@ -54,8 +54,8 @@
         FLAG_ENCODER = 1 << 1,
     };
 
-    // The media type.
-    const char *mMediaType;
+    // Encoded as ASCII.
+    const char* _Nonnull mMediaType;
     // bitfields for modes.
     uint32_t mMode;
 } AMediaCodecSupportedMediaType;
@@ -68,47 +68,59 @@
  *
  * @param outCount size of the out array.
  *
- * Return AMEDIA_OK if successfully made the copy.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if the @param outMediaTypes is invalid.
+ * @return AMEDIA_OK if successfully made the copy.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if the @param outMediaTypes is invalid.
  */
 media_status_t AMediaCodecStore_getSupportedMediaTypes(
-        const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) __INTRODUCED_IN(36);
+        const AMediaCodecSupportedMediaType* _Nullable * _Nonnull outMediaTypes,
+        size_t* _Nonnull outCount) __INTRODUCED_IN(36);
 
 /**
  * Get the next decoder info that supports the format.
  *
- * @param outCodecInfo  should be set as NULL to start the iteration.
- *                      Keep the last codecInfo you got from a previous call to get the next one.
- *                      *outCodecInfo will be set to NULL if reached the end.
- *                      It is owned by the framework and has an infinite lifetime.
+ * This API returns the decoder infos supporting the given format in a sequence and stores them
+ * in a pointer at outCodecInfo. Initially, set the pointer pointed to by outCodecInfo to NULL.
+ * On successive calls, keep the last pointer value. When the sequence is over
+ * AMEDIA_ERROR_UNSUPPORTED will be returned and the pointer will be set to NULL.
+ *
+ * @param outCodecInfo  a pointer to (the AMediaCodecInfo pointer) where the next codec info
+ *                      will be stored. The AMediaCodecInfo object is owned by the framework
+ *                      and has an infinite lifecycle.
  *
  * @param format        If set as NULL, this API will iterate through all available decoders.
  *                      If NOT NULL, it MUST contain key "mime" implying the media type.
  *
- * Return AMEDIA_OK if successfully got the info.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param format is invalid.
- * Return AMEDIA_ERROR_UNSUPPORTED if no more decoder supporting the format.
+ * @return AMEDIA_OK if successfully got the info.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param format is invalid.
+ * @return AMEDIA_ERROR_UNSUPPORTED If ther are no more decoder supporting the format.
+ * *outCodecInfo will also be set to NULL in this case.
  *
  * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
  * and a different @param format during an iteration.
  */
 media_status_t AMediaCodecStore_findNextDecoderForFormat(
-        const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+        const AMediaFormat* _Nonnull format,
+        const AMediaCodecInfo* _Nullable * _Nonnull outCodecInfo) __INTRODUCED_IN(36);
 
 /**
  * Get the next encoder info that supports the format.
  *
- * @param outCodecInfo  should be set as NULL to start the iteration.
- *                      Keep the last codecInfo you got from a previous call to get the next one.
- *                      *outCodecInfo will be set to NULL if reached the end.
- *                      It is owned by the framework and has an infinite lifetime.
+ * This API returns the encoder infos supporting the given format in a sequence and stores them
+ * in a pointer at outCodecInfo. Initially, set the pointer pointed to by outCodecInfo to NULL.
+ * On successive calls, keep the last pointer value. When the sequence is over
+ * AMEDIA_ERROR_UNSUPPORTED will be returned and the pointer will be set to NULL.
+ *
+ * @param outCodecInfo  a pointer to (the AMediaCodecInfo pointer) where the next codec info
+ *                      will be stored. The AMediaCodecInfo object is owned by the framework
+ *                      and has an infinite lifecycle.
  *
  * @param format        If set as NULL, this API will iterate through all available encoders.
  *                      If NOT NULL, it MUST contain key "mime" implying the media type.
  *
- * Return AMEDIA_OK if successfully got the info.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo is invalid.
- * Return AMEDIA_ERROR_UNSUPPORTED if no more encoder supporting the format.
+ * @return AMEDIA_OK if successfully got the info.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo is invalid.
+ * @return AMEDIA_ERROR_UNSUPPORTED If ther are no more encoder supporting the format.
+ * *outCodecInfo will also be set to NULL in this case.
  *
  * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
  * and a different @param format during an iteration.
@@ -116,12 +128,14 @@
  * No secure encoder will show in the output.
  */
 media_status_t AMediaCodecStore_findNextEncoderForFormat(
-        const AMediaFormat* format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+        const AMediaFormat* _Nonnull format,
+        const AMediaCodecInfo* _Nullable * _Nonnull outCodecInfo) __INTRODUCED_IN(36);
 
 /**
  * Get the codecInfo corresponding to a given codec name.
  *
  * @param name          Media codec name.
+ *                      Encoded as ASCII.
  *                      Users can get valid codec names from the AMediaCodecInfo structures
  *                      returned from findNextDecoder|EncoderForFormat methods.
  *                      Note that this name may not correspond to the name the same codec used
@@ -130,12 +144,13 @@
  * @param outCodecInfo  Output parameter for the corresponding AMeidaCodecInfo structure.
  *                      It is owned by the framework and has an infinite lifetime.
  *
- * Return AMEDIA_OK if got the codecInfo successfully.
- * Return AMEDIA_ERROR_UNSUPPORTED if no corresponding codec found.
- * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param name is invalid.
+ * @return AMEDIA_OK if got the codecInfo successfully.
+ * @return AMEDIA_ERROR_UNSUPPORTED if no corresponding codec found.
+ * @return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param name is invalid.
  */
 media_status_t AMediaCodecStore_getCodecInfo(
-        const char *name, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+        const char*  _Nonnull name,
+        const AMediaCodecInfo* _Nullable * _Nonnull outCodecInfo) __INTRODUCED_IN(36);
 
 __END_DECLS
 
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 939f151..a141b64 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -13,17 +13,17 @@
     ACodecPerformancePoint_create; # introduced=36
     ACodecPerformancePoint_covers; # introduced=36
     ACodecPerformancePoint_coversFormat; # introduced=36
-    ACodecPerformancePoint_delete; # introduced=36
+    ACodecPerformancePoint_destroy; # introduced=36
     ACodecPerformancePoint_equals; # introduced=36
     ACodecVideoCapabilities_areSizeAndRateSupported; # introduced=36
     ACodecVideoCapabilities_getAchievableFrameRatesFor; # introduced=36
     ACodecVideoCapabilities_getBitrateRange; # introduced=36
     ACodecVideoCapabilities_getHeightAlignment; # introduced=36
+    ACodecVideoCapabilities_getNextSupportedPerformancePoint; # introduced=36
     ACodecVideoCapabilities_getSupportedFrameRates; # introduced=36
     ACodecVideoCapabilities_getSupportedFrameRatesFor; # introduced=36
     ACodecVideoCapabilities_getSupportedHeights; # introduced=36
     ACodecVideoCapabilities_getSupportedHeightsFor; # introduced=36
-    ACodecVideoCapabilities_getSupportedPerformancePoints; # introduced=36
     ACodecVideoCapabilities_getSupportedWidths; # introduced=36
     ACodecVideoCapabilities_getSupportedWidthsFor; # introduced=36
     ACodecVideoCapabilities_getWidthAlignment; # introduced=36
@@ -44,7 +44,7 @@
     AImageReader_newWithDataSpace; # introduced=UpsideDownCake
     AImageReader_setBufferRemovedListener; # introduced=26
     AImageReader_setImageListener; # introduced=24
-    AImageReader_setUsage; # introduced=36 llndk
+    AImageReader_setUsage; # introduced=36 llndk systemapi
     AImage_delete; # introduced=24
     AImage_deleteAsync; # introduced=26
     AImage_getCropRect; # introduced=24
@@ -245,14 +245,30 @@
     AMediaCodec_createPersistentInputSurface; # introduced=26
     AMediaCodec_start;
     AMediaCodec_stop;
+    AMediaCodecInfo_FEATURE_AdaptivePlayback; # var introduced=36
+    AMediaCodecInfo_FEATURE_SecurePlayback; # var introduced=36
+    AMediaCodecInfo_FEATURE_TunneledPlayback; # var introduced=36
+    AMediaCodecInfo_FEATURE_DynamicTimestamp; # var introduced=36
+    AMediaCodecInfo_FEATURE_FrameParsing; # var introduced=36
+    AMediaCodecInfo_FEATURE_MultipleFrames; # var introduced=36
+    AMediaCodecInfo_FEATURE_PartialFrame; # var introduced=36
+    AMediaCodecInfo_FEATURE_IntraRefresh; # var introduced=36
+    AMediaCodecInfo_FEATURE_LowLatency; # var introduced=36
+    AMediaCodecInfo_FEATURE_QpBounds; # var introduced=36
+    AMediaCodecInfo_FEATURE_EncodingStatistics; # var introduced=36
+    AMediaCodecInfo_FEATURE_HdrEditing; # var introduced=36
+    AMediaCodecInfo_FEATURE_HlgEditing; # var introduced=36
+    AMediaCodecInfo_FEATURE_DynamicColorAspects; # var introduced=36
+    AMediaCodecInfo_FEATURE_Roi; # var introduced=36
+    AMediaCodecInfo_FEATURE_DetachedSurface; # var introduced=36
     AMediaCodecInfo_getAudioCapabilities; # introduced=36
+    AMediaCodecInfo_getCanonicalName; # introduced=36
     AMediaCodecInfo_getEncoderCapabilities; # introduced=36
     AMediaCodecInfo_getVideoCapabilities; # introduced=36
-    AMediaCodecInfo_getCanonicalName; # introduced=36
+    AMediaCodecInfo_getKind; # introduced=36
     AMediaCodecInfo_getMaxSupportedInstances; # introduced=36
     AMediaCodecInfo_getMediaCodecInfoType; # introduced=36
     AMediaCodecInfo_getMediaType; # introduced=36
-    AMediaCodecInfo_isEncoder; # introduced=36
     AMediaCodecInfo_isFeatureRequired; # introduced=36
     AMediaCodecInfo_isFeatureSupported; # introduced=36
     AMediaCodecInfo_isFormatSupported; # introduced=36
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
index 80f0fc4..f73cb2f 100644
--- a/media/utils/MethodStatistics.cpp
+++ b/media/utils/MethodStatistics.cpp
@@ -42,7 +42,8 @@
                 new std::vector<std::string>{
                 "DeviceHalAidl",
                 "EffectHalAidl",
-                "StreamHalAidl",
+                "StreamInHalAidl",
+                "StreamOutHalAidl",
               })
         },
     };
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 7fd5be5..81662f1 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -172,9 +172,28 @@
         permission::PermissionChecker permissionChecker;
         int permitted;
         if (start) {
-            permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+            // Do a double-check, where we first check without actually starting in order to handle
+            // the behavior of AppOps where ops are sometimes started but paused for SOFT_DENIED.
+            // Since there is no way to maintain reference consensus due to this behavior, avoid
+            // starting an op when a restriction is in place by first checking. In the case where we
+            // startOp would fail, call a noteOp (which will also fail) instead. This preserves
+            // behavior that is reliant on listening to op rejected events (such as the hint
+            // dialogue to unmute the microphone). Technically racy, but very unlikely.
+            //
+            // TODO(b/294609684) To be removed when the pause state for an OP is removed.
+            permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
                     sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
                     attributedOpCode);
+            if (permitted == PERMISSION_GRANTED) {
+                permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+                        sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+                        attributedOpCode);
+            } else {
+                // intentionally don't set permitted
+                permissionChecker.checkPermissionForDataDeliveryFromDatasource(
+                            sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+                            attributedOpCode);
+            }
         } else {
             permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
                     sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 6a5bbbe..aece6c6 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -368,24 +368,56 @@
     }
 }
 
+template <typename T>
+concept is_ptr = requires(T t) { *t; t.operator->(); };
+
 // Automatically create a TimeCheck class for a class and method.
 // This is used for Audio HAL support.
-mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
+template <typename T>
+T makeTimeCheckStatsForClassMethodGeneric(
         std::string_view className, std::string_view methodName) {
     std::shared_ptr<MethodStatistics<std::string>> statistics =
             mediautils::getStatisticsForClass(className);
-    if (!statistics) return {}; // empty TimeCheck.
-    return mediautils::TimeCheck(
-            FixedString62(className).append("::").append(methodName),
-            [ safeMethodName = FixedString30(methodName),
-              stats = std::move(statistics) ]
-            (bool timeout, float elapsedMs) {
-                    if (timeout) {
-                        ; // ignored, there is no timeout value.
+
+    if constexpr (is_ptr<T>) {
+        if (!statistics) return T(new TimeCheck{}); // empty TimeCheck
+        return T(new TimeCheck{
+                FixedString62(className).append("::").append(methodName),
+                [safeMethodName = FixedString30(methodName),
+                        stats = std::move(statistics)]
+                        (bool timeout, float elapsedMs) {
+                    if (timeout) { ; // ignored, there is no timeout value.
                     } else {
                         stats->event(safeMethodName.asStringView(), elapsedMs);
                     }
-            }, {} /* timeoutDuration */, {} /* secondChanceDuration */, false /* crashOnTimeout */);
+                }, {} /* timeoutDuration */, {} /* secondChanceDuration */,
+                false /* crashOnTimeout */});
+    } else /* constexpr */ {
+        if (!statistics) return TimeCheck{}; // empty TimeCheck
+        return TimeCheck{
+                FixedString62(className).append("::").append(methodName),
+                [safeMethodName = FixedString30(methodName),
+                        stats = std::move(statistics)]
+                        (bool timeout, float elapsedMs) {
+                    if (timeout) { ; // ignored, there is no timeout value.
+                    } else {
+                        stats->event(safeMethodName.asStringView(), elapsedMs);
+                    }
+                }, {} /* timeoutDuration */, {} /* secondChanceDuration */,
+                false /* crashOnTimeout */};
+
+    }
+}
+
+mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
+        std::string_view className, std::string_view methodName) {
+    return makeTimeCheckStatsForClassMethodGeneric<mediautils::TimeCheck>(className, methodName);
+}
+
+std::unique_ptr<mediautils::TimeCheck> makeTimeCheckStatsForClassMethodUniquePtr(
+        std::string_view className, std::string_view methodName) {
+    return makeTimeCheckStatsForClassMethodGeneric<std::unique_ptr<mediautils::TimeCheck>>(
+            className, methodName);
 }
 
 }  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/BinderGenericUtils.h b/media/utils/include/mediautils/BinderGenericUtils.h
index 5f3b9f3..52ebd3b 100644
--- a/media/utils/include/mediautils/BinderGenericUtils.h
+++ b/media/utils/include/mediautils/BinderGenericUtils.h
@@ -59,6 +59,24 @@
 }
 
 /**
+ * Returns true if two interfaces pointer-match, or represent identical binder objects.
+ *
+ * C++ with C++ and NDK with NDK interfaces may be compared.
+ *
+ * It currently isn't possible through the NDK public interface to extract
+ * the underlying C++ binder object, so we don't allow NDK and C++ interfaces to
+ * be cross-checked even though they might be backed by the same binder object.
+ */
+static inline bool isSameInterface(const sp<IInterface>& a, const sp<IInterface>& b) {
+    return a == b || (a && b && IInterface::asBinder(a) == IInterface::asBinder(b));
+}
+
+static inline bool isSameInterface(const std::shared_ptr<::ndk::ICInterface>& a,
+        const std::shared_ptr<::ndk::ICInterface>& b) {
+    return a == b || (a && b && a->asBinder() == b->asBinder());
+}
+
+/**
  * Returns either a sp<Interface> or a std::shared_ptr<Interface> from a Binder object.
  *
  * A -cpp interface will return sp<Interface>.
diff --git a/media/utils/include/mediautils/Library.h b/media/utils/include/mediautils/Library.h
index 19cfc11..633d77d 100644
--- a/media/utils/include/mediautils/Library.h
+++ b/media/utils/include/mediautils/Library.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <dlfcn.h>
+#include <memory>
 #include <string>
 #include <unistd.h>
 
diff --git a/media/utils/include/mediautils/Runnable.h b/media/utils/include/mediautils/Runnable.h
new file mode 100644
index 0000000..ecb1b2b
--- /dev/null
+++ b/media/utils/include/mediautils/Runnable.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstddef>
+#include <future>
+#include <memory>
+
+namespace android::mediautils {
+
+// Essentially std::function <void()>, but supports moveable types (and binds to any return type).
+// The lack of moveable is fixed in C++23, but we don't yet have it.
+// Also, SBO for std::packaged_task size, which is what we are using this for
+class Runnable {
+  private:
+    // src == nullptr => destroy the dest, otherwise move from src storage to dst, destroying src
+    using move_destroy_fptr_t = void (*)(std::byte* dest, std::byte* src) noexcept;
+    using call_fptr_t = void (*)(std::byte* storage);
+
+    struct VTable {
+        move_destroy_fptr_t move_destroy;
+        call_fptr_t invoke;
+    };
+
+    static void empty_move_destroy(std::byte*, std::byte*) noexcept {}
+    static constexpr VTable empty_vtable{.move_destroy = empty_move_destroy, .invoke = nullptr};
+
+    template <typename T>
+    static T& transmogrify(std::byte* addr) {
+        return *std::launder(reinterpret_cast<T*>(addr));
+    }
+
+    template <typename T>
+    static void move_destroy_impl(std::byte* dest, std::byte* src) noexcept {
+        if (src) {
+            std::construct_at(&transmogrify<T>(dest), std::move(transmogrify<T>(src)));
+            transmogrify<T>(src).~T();
+        } else {
+            transmogrify<T>(dest).~T();
+        }
+    }
+
+    template <typename T>
+    static void call_impl(std::byte* addr) {
+        std::invoke(transmogrify<T>(addr));
+    }
+
+  public:
+    static constexpr size_t STORAGE_SIZE = sizeof(std::packaged_task<int()>);
+
+    Runnable() = default;
+
+    Runnable(std::nullptr_t) {}
+
+    Runnable(const Runnable& o) = delete;
+
+    Runnable(Runnable&& o) noexcept {
+        // ask other vtable to move their storage into ours
+        o.v.move_destroy(storage_, o.storage_);
+        std::swap(v, o.v);
+    }
+
+    template <typename F>
+        requires(std::is_invocable_v<std::decay_t<F>> &&
+                 !std::is_same_v<std::decay_t<F>, Runnable> &&
+                 std::is_move_constructible_v<std::decay_t<F>> &&
+                 sizeof(std::decay_t<F>) <= STORAGE_SIZE)
+    explicit Runnable(F&& task)
+        : v{move_destroy_impl<std::decay_t<F>>, call_impl<std::decay_t<F>>} {
+        std::construct_at(&transmogrify<std::decay_t<F>>(storage_), std::forward<F>(task));
+    }
+
+    Runnable& operator=(const Runnable& o) = delete;
+
+    Runnable& operator=(Runnable&& o) {
+        // destroy ourselves
+        v.move_destroy(storage_, nullptr);
+        v = empty_vtable;
+        // ask other vtable to move their storage into ours
+        o.v.move_destroy(storage_, o.storage_);
+        std::swap(v, o.v);
+        return *this;
+    }
+
+    ~Runnable() { v.move_destroy(storage_, nullptr); }
+
+    operator bool() const { return v.invoke != nullptr; }
+
+    void operator()() {
+        if (*this) v.invoke(storage_);
+    }
+
+  private:
+    VTable v = empty_vtable;
+    alignas(alignof(std::max_align_t)) std::byte storage_[STORAGE_SIZE];
+};
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceSingleton.h b/media/utils/include/mediautils/ServiceSingleton.h
index 177c3ba..bbd44d3 100644
--- a/media/utils/include/mediautils/ServiceSingleton.h
+++ b/media/utils/include/mediautils/ServiceSingleton.h
@@ -318,7 +318,9 @@
                 [traits, this](const InterfaceType<Service>& service) {
                     audio_utils::unique_lock ul(mMutex);
                     auto originalService = std::get<BaseInterfaceType<Service>>(mService);
-                    if (originalService != service) {
+                    // we suppress equivalent services from being set
+                    // where either the pointers match or the binder objects match.
+                    if (!mediautils::isSameInterface(originalService, service)) {
                         if (originalService != nullptr) {
                             invalidateService_l<Service>();
                         }
@@ -331,6 +333,9 @@
                         traits->onNewService(service);
                         ul.lock();
                         setDeathNotifier_l<Service>(service);
+                    } else {
+                        ALOGW("%s: ignoring duplicated service: %p",
+                                __func__, originalService.get());
                     }
                     ul.unlock();
                     mCv.notify_all();
@@ -343,6 +348,8 @@
     // sets the death notifier for mService (mService must be non-null).
     template <typename Service>
     void setDeathNotifier_l(const BaseInterfaceType<Service>& base) REQUIRES(mMutex) {
+        // here the pointer match should be identical to binder object match
+        // since we use a cached service.
         if (base != std::get<BaseInterfaceType<Service>>(mService)) {
             ALOGW("%s: service has changed for %s, skipping death notification registration",
                     __func__, toString(Service::descriptor).c_str());
@@ -358,6 +365,14 @@
                         // we do not need to generation count.
                         {
                             std::lock_guard l(mMutex);
+                            const auto currentService =
+                                    std::get<BaseInterfaceType<Service>>(mService);
+                            if (currentService != service) {
+                                ALOGW("%s: ignoring death as current service "
+                                        "%p != registered death service %p", __func__,
+                                        currentService.get(), service.get());
+                                return;
+                            }
                             invalidateService_l<Service>();
                         }
                         traits->onServiceDied(service);
diff --git a/media/utils/include/mediautils/SingleThreadExecutor.h b/media/utils/include/mediautils/SingleThreadExecutor.h
new file mode 100644
index 0000000..9700bc9
--- /dev/null
+++ b/media/utils/include/mediautils/SingleThreadExecutor.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <deque>
+#include <mutex>
+
+#include "Runnable.h"
+#include "jthread.h"
+
+namespace android::mediautils {
+
+/**
+ * A C++ implementation similar to a Java executor, which manages a thread which runs enqueued
+ * runnable tasks in queue order. Spawns thread on construction and joins destruction
+ */
+class SingleThreadExecutor {
+  public:
+    SingleThreadExecutor() : thread_([this](stop_token stok) { run(stok); }) {}
+
+    ~SingleThreadExecutor() { shutdown(/* dropTasks= */ true); }
+
+    void enqueue(Runnable r) {
+        if (!r) {
+            return;
+        } else {
+            std::lock_guard l{mutex_};
+            if (thread_.stop_requested()) return;
+            task_list_.push_back(std::move(r));
+        }
+        cv_.notify_one();
+    }
+
+    /**
+     * Request thread termination, optionally dropping any enqueued tasks.
+     * Note: does not join thread in this method and no task cancellation.
+     */
+    void shutdown(bool dropTasks = false) {
+        {
+            std::lock_guard l{mutex_};
+            if (thread_.stop_requested()) return;
+            if (dropTasks) {
+                task_list_.clear();
+            }
+            thread_.request_stop();  // fancy atomic bool, so no deadlock risk
+        }
+        // This condition variable notification is necessary since the stop_callback functionality
+        // of stop_token is not fully implemented
+        cv_.notify_one();
+    }
+
+
+  private:
+    void run(stop_token stok) {
+        std::unique_lock l{mutex_};
+        while (true) {
+            cv_.wait_for(l, std::chrono::seconds(3), [this, stok]() {
+                return !task_list_.empty() || stok.stop_requested();
+            });
+            if (!task_list_.empty()) {
+                Runnable r {std::move(task_list_.front())};
+                task_list_.pop_front();
+                l.unlock();
+                r();
+                l.lock();
+            } else if (stok.stop_requested()) {
+                break;
+            } // else cv timeout
+        }
+    }
+
+    std::condition_variable cv_;
+    std::mutex mutex_;
+    std::deque<Runnable> task_list_;
+    jthread thread_;
+};
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index c112863..b275d24 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -159,6 +159,9 @@
 TimeCheck makeTimeCheckStatsForClassMethod(
         std::string_view className, std::string_view methodName);
 
+std::unique_ptr<TimeCheck> makeTimeCheckStatsForClassMethodUniquePtr(
+            std::string_view className, std::string_view methodName);
+
 // A handy statement-like macro to put at the beginning of almost every method
 // which calls into HAL. Note that it requires the class to implement 'getClassName'.
 #define TIME_CHECK() auto timeCheck = \
diff --git a/media/utils/include/mediautils/jthread.h b/media/utils/include/mediautils/jthread.h
index 17532a4..0acae18 100644
--- a/media/utils/include/mediautils/jthread.h
+++ b/media/utils/include/mediautils/jthread.h
@@ -42,7 +42,7 @@
     stop_token get_token() { return stop_token{*this}; }
     bool stop_requested() const { return cancellation_signal_.load(); }
     bool request_stop() {
-        auto f = false;
+        bool f = false;
         return cancellation_signal_.compare_exchange_strong(f, true);
     }
 
@@ -84,6 +84,8 @@
 
     bool request_stop() { return stop_source_.request_stop(); }
 
+    bool stop_requested() const { return stop_source_.stop_requested(); }
+
   private:
     // order matters
     impl::stop_source stop_source_;
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 4456df2..b1285d9 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -284,6 +284,8 @@
     name: "jthread_tests",
     defaults: ["libmediautils_tests_defaults"],
     srcs: [
+        "executor_tests.cpp",
         "jthread_tests.cpp",
+        "runnable_tests.cpp",
     ],
 }
diff --git a/media/utils/tests/executor_tests.cpp b/media/utils/tests/executor_tests.cpp
new file mode 100644
index 0000000..78ab3f6
--- /dev/null
+++ b/media/utils/tests/executor_tests.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "executor_tests"
+
+#include <mediautils/SingleThreadExecutor.h>
+#include <mediautils/TidWrapper.h>
+
+#include <future>
+
+#include <gtest/gtest.h>
+
+using namespace android::mediautils;
+
+class ExecutorTests : public ::testing::Test {
+  protected:
+    void TearDown() override { executor_.shutdown(); }
+    SingleThreadExecutor executor_;
+};
+
+TEST_F(ExecutorTests, TaskEnqueue) {
+    std::atomic<int> counter = 0;
+    std::packaged_task<int()> task1([&]() {
+        counter++;
+        return 7;
+    });
+
+    auto future1 = task1.get_future();
+    executor_.enqueue(Runnable{std::move(task1)});
+    EXPECT_EQ(future1.get(), 7);
+    EXPECT_EQ(counter, 1);
+}
+
+TEST_F(ExecutorTests, TaskThread) {
+    std::packaged_task<int()> task1([&]() { return getThreadIdWrapper(); });
+
+    auto future1 = task1.get_future();
+    executor_.enqueue(Runnable{std::move(task1)});
+    EXPECT_NE(future1.get(), getThreadIdWrapper());
+}
+
+TEST_F(ExecutorTests, TaskOrder) {
+    std::atomic<int> counter = 0;
+    std::packaged_task<int()> task1([&]() { return counter++; });
+    std::packaged_task<int()> task2([&]() { return counter++; });
+    auto future1 = task1.get_future();
+    auto future2 = task2.get_future();
+
+    executor_.enqueue(Runnable{std::move(task1)});
+    executor_.enqueue(Runnable{std::move(task2)});
+
+    EXPECT_EQ(future1.get(), 0);
+    EXPECT_EQ(future2.get(), 1);
+    EXPECT_EQ(counter, 2);
+}
+
+TEST_F(ExecutorTests, EmptyTask) {
+    // does not crash
+    executor_.enqueue(Runnable{});
+}
+
+TEST_F(ExecutorTests, ShutdownTwice) {
+    executor_.shutdown();
+    executor_.shutdown();
+}
diff --git a/media/utils/tests/runnable_tests.cpp b/media/utils/tests/runnable_tests.cpp
new file mode 100644
index 0000000..8160767
--- /dev/null
+++ b/media/utils/tests/runnable_tests.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "runnable_tests"
+
+#include <mediautils/Runnable.h>
+
+#include <gtest/gtest.h>
+
+using namespace android::mediautils;
+
+struct Func {
+    inline static int sMoveCtor = 0;
+    inline static int sDtor = 0;
+    // accumulator for call operator of this object
+    inline static int sSum = 0;
+    static constexpr int VAL1 = 7;
+    static constexpr int VAL2 = 4;
+
+    Func(int v) : value(v) {}
+    Func(const Func&) = delete;
+    Func(Func&& other) : value(other.value) { sMoveCtor++; }
+    Func& operator=(const Func&) = delete;
+    ~Func() { sDtor++; }
+
+    void operator()() { sSum += value; }
+
+  private:
+    const int value;
+};
+
+class RunnableTests : public ::testing::Test {
+  protected:
+    void SetUp() override {
+        Func::sMoveCtor = 0;
+        Func::sDtor = 0;
+        Func::sSum = 0;
+    }
+};
+
+TEST_F(RunnableTests, testEmpty) {
+    Runnable r1{};
+    Runnable r2{nullptr};
+    // empty func should do nothing, instead of crash
+    r1();
+    r2();
+    EXPECT_FALSE(r1);
+    EXPECT_FALSE(r2);
+}
+
+static int foo() {
+    return 5;
+}
+
+struct Copy {
+    Copy() {}
+    Copy(const Copy&) {}
+    Copy(Copy&&) {}
+    void operator()(){}
+};
+
+TEST_F(RunnableTests, testCompile) {
+    const Copy b{};
+    Runnable r1{std::move(b)};
+    Runnable r2{b};
+    Runnable r4{foo};
+    std::unique_ptr<int> ptr;
+    auto move_only = [ptr = std::move(ptr)](){};
+    Runnable r5{std::move(move_only)};
+    auto copyable = [](){};
+    Runnable r6{copyable};
+}
+
+TEST_F(RunnableTests, testBool) {
+    Runnable r1{[]() {}};
+    EXPECT_TRUE(r1);
+}
+
+TEST_F(RunnableTests, testCall) {
+    Runnable r1{Func{Func::VAL1}};
+    EXPECT_TRUE(r1);
+    r1();
+    EXPECT_EQ(Func::sSum, Func::VAL1);
+}
+
+TEST_F(RunnableTests, testDtor) {
+    {
+        Runnable r1{Func{Func::VAL1}};
+    }
+    EXPECT_EQ(Func::sDtor, 2);
+}
+
+TEST_F(RunnableTests, testMoveCtor) {
+    {
+        Runnable moved_from{Func{Func::VAL1}};
+        EXPECT_EQ(Func::sMoveCtor, 1);
+        EXPECT_EQ(Func::sDtor, 1);
+        Runnable r1{std::move(moved_from)};
+        EXPECT_EQ(Func::sDtor, 2);  // impl detail that we destroy internal obj after move
+        EXPECT_EQ(Func::sMoveCtor, 2);
+        EXPECT_TRUE(r1);
+        EXPECT_FALSE(moved_from);
+        r1();
+        EXPECT_EQ(Func::sSum, Func::VAL1);
+    }
+    EXPECT_EQ(Func::sDtor, 3);
+}
+
+TEST_F(RunnableTests, testMoveAssign) {
+    {
+        Runnable r1{Func{Func::VAL2}};
+        Runnable moved_from{Func{Func::VAL1}};
+        EXPECT_EQ(Func::sMoveCtor, 2);
+        EXPECT_EQ(Func::sDtor, 2);
+        r1();
+        EXPECT_EQ(Func::sSum, 4);
+        r1 = std::move(moved_from);
+        EXPECT_EQ(Func::sDtor, 4);  // impl detail that we destroy internal obj after move
+        EXPECT_EQ(Func::sMoveCtor, 3);
+        EXPECT_TRUE(r1);
+        EXPECT_FALSE(moved_from);
+        r1();  // value should now hold Func::VAL1
+        EXPECT_EQ(Func::sSum, Func::VAL2 + Func::VAL1);
+    }
+    EXPECT_EQ(Func::sDtor, 5);
+}
diff --git a/media/utils/tests/service_singleton_tests.cpp b/media/utils/tests/service_singleton_tests.cpp
index 3e389a4..78a2173 100644
--- a/media/utils/tests/service_singleton_tests.cpp
+++ b/media/utils/tests/service_singleton_tests.cpp
@@ -251,9 +251,35 @@
         auto service = mediautils::getService<IServiceSingletonTest>();
         EXPECT_TRUE(service);
 
+        // mediautils::getService<> is a cached service.
+        // pointer equality is preserved for subsequent requests.
+        auto service_equal = mediautils::getService<IServiceSingletonTest>();
+        EXPECT_EQ(service, service_equal);
+        EXPECT_TRUE(mediautils::isSameInterface(service, service_equal));
+
+        // we can create an alias to the service by requesting it outside of the cache.
+        // this is a different pointer, but same underlying binder object.
+        auto service_equivalent =
+                mediautils::checkServicePassThrough<IServiceSingletonTest>();
+        EXPECT_NE(service, service_equivalent);
+        EXPECT_TRUE(mediautils::isSameInterface(service, service_equivalent));
+
         auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
         EXPECT_TRUE(service2);
 
+        // mediautils::getService<> is a cached service.
+        // pointer equality is preserved for subsequent requests.
+        auto service2_equal = mediautils::getService<aidl::IServiceSingletonTest>();
+        EXPECT_EQ(service2, service2_equal);
+        EXPECT_TRUE(mediautils::isSameInterface(service2, service2_equal));
+
+        // we can create an alias to the service by requesting it outside of the cache.
+        // this is a different pointer, but same underlying binder object.
+        auto service2_equivalent =
+                mediautils::checkServicePassThrough<aidl::IServiceSingletonTest>();
+        EXPECT_NE(service2, service2_equivalent);
+        EXPECT_TRUE(mediautils::isSameInterface(service2, service2_equivalent));
+
         keepAlive = service2;
 
         // we can also request our own death notifications (outside of the service traits).
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index add8a43..cf6e04e 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -149,7 +149,6 @@
         "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
-        "libaudiopermission",
         "av-types-aidl-cpp",
         "com.android.media.audio-aconfig-cc",
         "com.android.media.audioserver-aconfig-cc",
@@ -164,6 +163,7 @@
         "libaudiofoundation",
         "libaudiohal",
         "libaudiomanager",
+        "libaudiopermission",
         "libaudioprocessing",
         "libaudioutils",
         "libbinder",
@@ -211,13 +211,11 @@
 
     include_dirs: [
         "frameworks/av/services/audiopolicy",
-        "frameworks/av/services/medialog",
     ],
 
     static_libs: [
         "libaudiospdif",
         "libcpustats",
-        "libmedialogservice",
     ],
 
     header_libs: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 68c3626..a4b06ee 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -44,7 +44,6 @@
 #include <media/AidlConversion.h>
 #include <media/AudioParameter.h>
 #include <media/AudioValidator.h>
-#include <media/IMediaLogService.h>
 #include <media/IPermissionProvider.h>
 #include <media/MediaMetricsItem.h>
 #include <media/NativePermissionController.h>
@@ -83,6 +82,8 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
+namespace audioserver_flags = com::android::media::audioserver;
+
 namespace android {
 
 using namespace std::string_view_literals;
@@ -95,9 +96,12 @@
 using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
 using android::detail::AudioHalVersionInfo;
+using com::android::media::audio::audioserver_permissions;
 using com::android::media::permission::INativePermissionController;
 using com::android::media::permission::IPermissionProvider;
 using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_SETTINGS;
 using com::android::media::permission::ValidatedAttributionSourceState;
 
 static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
@@ -110,20 +114,7 @@
 
 static constexpr char kAudioServiceName[] = "audio";
 
-// Keep a strong reference to media.log service around forever.
-// The service is within our parent process so it can never die in a way that we could observe.
-// These two variables are const after initialization.
-static sp<IMediaLogService> sMediaLogService;
 
-static pthread_once_t sMediaLogOnce = PTHREAD_ONCE_INIT;
-
-static void sMediaLogInit()
-{
-    auto sMediaLogServiceAsBinder = defaultServiceManager()->getService(String16("media.log"));
-    if (sMediaLogServiceAsBinder != 0) {
-        sMediaLogService = interface_cast<IMediaLogService>(sMediaLogServiceAsBinder);
-    }
-}
 
 static int writeStr(int fd, std::string_view s) {
     return write(fd, s.data(), s.size());
@@ -142,7 +133,7 @@
         // Legacy paths may not properly populate package name, so we attempt to handle.
         if (!attr.packageName.has_value() || attr.packageName.value() == "") {
             ALOGW("Trusted client %d provided attr with missing package name" , callingUid);
-            attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(callingUid))[0];
+            attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(attr.uid))[0];
         }
         // Behavior change: In the case of delegation, if pid is invalid,
         // filling it in with the callingPid will cause a mismatch between the
@@ -329,24 +320,11 @@
                         movingBase : 1) * AUDIO_UNIQUE_ID_USE_MAX;
     }
 
-#if 1
-    // FIXME See bug 165702394 and bug 168511485
-    const bool doLog = false;
-#else
-    const bool doLog = property_get_bool("ro.test_harness", false);
-#endif
-    if (doLog) {
-        mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters",
-                MemoryHeapBase::READ_ONLY);
-        (void) pthread_once(&sMediaLogOnce, sMediaLogInit);
-    }
-
     // reset battery stats.
     // if the audio service has crashed, battery stats could be left
     // in bad state, reset the state upon service start.
     BatteryNotifier::getInstance().noteResetAudio();
 
-    mMediaLogNotifier->run("MediaLogNotifier");
 
     // Notify that we have started (also called when audioserver service restarts)
     mediametrics::LogItem(mMetricsId)
@@ -518,16 +496,6 @@
         // no hardwareMutex() needed, as there are no other references to this
         delete mAudioHwDevs.valueAt(i);
     }
-
-    // Tell media.log service about any old writers that still need to be unregistered
-    if (sMediaLogService != 0) {
-        for (size_t count = mUnregisteredWriters.size(); count > 0; count--) {
-            sp<IMemory> iMemory(mUnregisteredWriters.top()->getIMemory());
-            mUnregisteredWriters.pop();
-            sMediaLogService->unregisterWriter(iMemory);
-        }
-    }
-    mMediaLogNotifier->requestExit();
     mPatchCommandThread->exit();
 }
 
@@ -580,7 +548,7 @@
 
     // TODO b/182392553: refactor or make clearer
     AttributionSourceState adjAttributionSource;
-    if (!com::android::media::audio::audioserver_permissions()) {
+    if (!audioserver_permissions()) {
         pid_t clientPid =
             VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
         bool updatePid = (clientPid == (pid_t)-1);
@@ -661,6 +629,11 @@
         }
     }
     if (ret != NO_ERROR) {
+        if (audioserver_flags::enable_gmap_mode()
+                && direction == MmapStreamInterface::DIRECTION_INPUT) {
+            audio_utils::lock_guard _l(mutex());
+            setHasAlreadyCaptured_l(adjAttributionSource.uid);
+        }
         return ret;
     }
 
@@ -753,6 +726,20 @@
     return NULL;
 }
 
+error::BinderResult<std::monostate> AudioFlinger::enforceCallingPermission(PermissionEnum perm) {
+    const uid_t uid = IPCThreadState::self()->getCallingUid();
+    // Due to a complicated start-up sequence, we could get a call from ourselves before APS
+    // populates the permission provider (done immediately following its construction). So,
+    // bail before calling into the permission provider, even though it also does this check.
+    if (uid == getuid()) return {};
+    const bool hasPerm = VALUE_OR_RETURN(getPermissionProvider().checkPermission(perm, uid));
+    if (hasPerm) {
+        return {};
+    } else {
+        return error::unexpectedExceptionCode(EX_SECURITY, "");
+    }
+}
+
 void AudioFlinger::dumpClients_ll(int fd, bool dumpAllocators) {
     String8 result;
 
@@ -1050,61 +1037,6 @@
     return client;
 }
 
-sp<NBLog::Writer> AudioFlinger::newWriter_l(size_t size, const char *name)
-{
-    // If there is no memory allocated for logs, return a no-op writer that does nothing.
-    // Similarly if we can't contact the media.log service, also return a no-op writer.
-    if (mLogMemoryDealer == 0 || sMediaLogService == 0) {
-        return new NBLog::Writer();
-    }
-    sp<IMemory> shared = mLogMemoryDealer->allocate(NBLog::Timeline::sharedSize(size));
-    // If allocation fails, consult the vector of previously unregistered writers
-    // and garbage-collect one or more them until an allocation succeeds
-    if (shared == 0) {
-        audio_utils::lock_guard _l(unregisteredWritersMutex());
-        for (size_t count = mUnregisteredWriters.size(); count > 0; count--) {
-            {
-                // Pick the oldest stale writer to garbage-collect
-                sp<IMemory> iMemory(mUnregisteredWriters[0]->getIMemory());
-                mUnregisteredWriters.removeAt(0);
-                sMediaLogService->unregisterWriter(iMemory);
-                // Now the media.log remote reference to IMemory is gone.  When our last local
-                // reference to IMemory also drops to zero at end of this block,
-                // the IMemory destructor will deallocate the region from mLogMemoryDealer.
-            }
-            // Re-attempt the allocation
-            shared = mLogMemoryDealer->allocate(NBLog::Timeline::sharedSize(size));
-            if (shared != 0) {
-                goto success;
-            }
-        }
-        // Even after garbage-collecting all old writers, there is still not enough memory,
-        // so return a no-op writer
-        return new NBLog::Writer();
-    }
-success:
-    NBLog::Shared *sharedRawPtr = (NBLog::Shared *) shared->unsecurePointer();
-    new((void *) sharedRawPtr) NBLog::Shared(); // placement new here, but the corresponding
-                                                // explicit destructor not needed since it is POD
-    sMediaLogService->registerWriter(shared, size, name);
-    return new NBLog::Writer(shared, size);
-}
-
-void AudioFlinger::unregisterWriter(const sp<NBLog::Writer>& writer)
-{
-    if (writer == 0) {
-        return;
-    }
-    sp<IMemory> iMemory(writer->getIMemory());
-    if (iMemory == 0) {
-        return;
-    }
-    // Rather than removing the writer immediately, append it to a queue of old writers to
-    // be garbage-collected later.  This allows us to continue to view old logs for a while.
-    audio_utils::lock_guard _l(unregisteredWritersMutex());
-    mUnregisteredWriters.push(writer);
-}
-
 // IAudioFlinger interface
 
 status_t AudioFlinger::createTrack(const media::CreateTrackRequest& _input,
@@ -1132,7 +1064,7 @@
 
     AttributionSourceState adjAttributionSource;
     pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    if (!com::android::media::audio::audioserver_permissions()) {
+    if (!audioserver_permissions()) {
         adjAttributionSource = input.clientInfo.attributionSource;
         const uid_t callingUid = IPCThreadState::self()->getCallingUid();
         uid_t clientUid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(
@@ -1413,8 +1345,12 @@
     }
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     audio_utils::lock_guard _l(mutex());
@@ -1455,8 +1391,12 @@
     }
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     // check range
@@ -1489,8 +1429,12 @@
     }
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
     if (uint32_t(mode) >= AUDIO_MODE_CNT) {
         ALOGW("Illegal value: setMode(%d)", mode);
@@ -1531,8 +1475,12 @@
     }
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+     if (audioserver_permissions()) {
+         VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     audio_utils::lock_guard lock(hardwareMutex());
@@ -1601,8 +1549,12 @@
     }
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     audio_utils::lock_guard _l(mutex());
@@ -1688,8 +1640,12 @@
         bool muted, audio_io_handle_t output)
 {
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     status_t status = checkStreamType(stream);
@@ -1816,8 +1772,12 @@
 status_t AudioFlinger::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     status_t status = checkStreamType(stream);
@@ -1949,8 +1909,12 @@
             IPCThreadState::self()->getCallingPid(), IPCThreadState::self()->getCallingUid());
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     String8 filteredKeyValuePairs = keyValuePairs;
@@ -2191,8 +2155,12 @@
     }
 
     // check calling permissions
-    if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+    if (audioserver_permissions()) {
+        VALUE_OR_RETURN_CONVERTED(enforceCallingPermission(MODIFY_AUDIO_SETTINGS));
+    } else {
+        if (!settingsAllowed()) {
+            return PERMISSION_DENIED;
+        }
     }
 
     audio_utils::lock_guard lock(hardwareMutex());
@@ -2347,6 +2315,12 @@
     return mAudioPolicyServiceLocal.load()->getPermissionProvider();
 }
 
+bool AudioFlinger::isHardeningOverrideEnabled() const {
+    // This is inited as part of service construction, prior to binder registration,
+    // so it should always be non-null.
+    return mAudioPolicyServiceLocal.load()->isHardeningOverrideEnabled();
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2405,44 +2379,6 @@
     mAudioFlinger->removeNotificationClient(mPid);
 }
 
-// ----------------------------------------------------------------------------
-AudioFlinger::MediaLogNotifier::MediaLogNotifier()
-    : mPendingRequests(false) {}
-
-
-void AudioFlinger::MediaLogNotifier::requestMerge() {
-    audio_utils::lock_guard _l(mMutex);
-    mPendingRequests = true;
-    mCondition.notify_one();
-}
-
-bool AudioFlinger::MediaLogNotifier::threadLoop() {
-    // Should already have been checked, but just in case
-    if (sMediaLogService == 0) {
-        return false;
-    }
-    // Wait until there are pending requests
-    {
-        audio_utils::unique_lock _l(mMutex);
-        mPendingRequests = false; // to ignore past requests
-        while (!mPendingRequests) {
-            mCondition.wait(_l);
-            // TODO may also need an exitPending check
-        }
-        mPendingRequests = false;
-    }
-    // Execute the actual MediaLogService binder call and ignore extra requests for a while
-    sMediaLogService->requestMergeWakeup();
-    usleep(kPostTriggerSleepPeriod);
-    return true;
-}
-
-void AudioFlinger::requestLogMerge() {
-    mMediaLogNotifier->requestMerge();
-}
-
-// ----------------------------------------------------------------------------
-
 status_t AudioFlinger::createRecord(const media::CreateRecordRequest& _input,
                                     media::CreateRecordResponse& _output)
 {
@@ -2461,7 +2397,7 @@
 
     AttributionSourceState adjAttributionSource;
     pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    if (!com::android::media::audio::audioserver_permissions()) {
+    if (!audioserver_permissions()) {
         adjAttributionSource = input.clientInfo.attributionSource;
         bool updatePid = (adjAttributionSource.pid == -1);
         const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -2612,6 +2548,9 @@
             audio_utils::lock_guard _l2(thread->mutex());
             thread->addEffectChain_l(chain);
         }
+        if (audioserver_flags::enable_gmap_mode()) {
+            setHasAlreadyCaptured_l(adjAttributionSource.uid);
+        }
         break;
     }
     // End of retry loop.
@@ -2648,6 +2587,26 @@
 
 // ----------------------------------------------------------------------------
 
+void AudioFlinger::setHasAlreadyCaptured_l(uid_t uid) {
+    {
+        const std::lock_guard _l(mCapturingClientsMutex);
+        if (mCapturingClients.count(uid)) return;
+        mCapturingClients.emplace(uid);
+    }
+    for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+        IAfPlaybackThread* const playbackThread = mPlaybackThreads.valueAt(i).get();
+        playbackThread->checkUpdateTrackMetadataForUid(uid);
+    }
+    for (size_t i = 0; i < mMmapThreads.size(); i++) {
+        IAfMmapThread* const mmapThread = mMmapThreads.valueAt(i).get();
+        if (mmapThread->isOutput()) {
+            IAfMmapPlaybackThread* const mmapPlaybackThread =
+                    mmapThread->asIAfMmapPlaybackThread().get();
+            mmapPlaybackThread->checkUpdateTrackMetadataForUid(uid);
+        }
+    }
+}
+
 status_t AudioFlinger::getAudioPolicyConfig(media::AudioPolicyConfig *config)
 {
     if (config == nullptr) {
@@ -2694,9 +2653,19 @@
     if (name == NULL) {
         return AUDIO_MODULE_HANDLE_NONE;
     }
-    if (!settingsAllowed()) {
-        return AUDIO_MODULE_HANDLE_NONE;
+    if (audioserver_permissions()) {
+        const auto res = enforceCallingPermission(MODIFY_AUDIO_SETTINGS);
+        if (!res.ok()) {
+            ALOGE("Function: %s perm check result (%s)", __FUNCTION__,
+                  errorToString(res.error()).c_str());
+            return AUDIO_MODULE_HANDLE_NONE;
+        }
+    } else {
+        if (!settingsAllowed()) {
+            return AUDIO_MODULE_HANDLE_NONE;
+        }
     }
+
     audio_utils::lock_guard _l(mutex());
     audio_utils::lock_guard lock(hardwareMutex());
     AudioHwDevice* module = loadHwModule_ll(name);
@@ -3021,19 +2990,27 @@
     return NO_ERROR;
 }
 
-sp<IAudioManager> AudioFlinger::getOrCreateAudioManager()
-{
-    if (mAudioManager.load() == nullptr) {
+sp<IAudioManager> AudioFlinger::getOrCreateAudioManager() {
+    sp<IAudioManager> iface = mAudioManager.load();
+    if (iface == nullptr) {
         // use checkService() to avoid blocking
-        sp<IBinder> binder =
-            defaultServiceManager()->checkService(String16(kAudioServiceName));
+        sp<IBinder> binder = defaultServiceManager()->checkService(String16(kAudioServiceName));
         if (binder != nullptr) {
-            mAudioManager = interface_cast<IAudioManager>(binder);
-        } else {
-            ALOGE("%s(): binding to audio service failed.", __func__);
+            iface = interface_cast<IAudioManager>(binder);
+            if (const auto native_iface = iface->getNativeInterface(); native_iface) {
+                mAudioManagerNative = std::move(native_iface);
+                mAudioManager.store(iface);
+            } else {
+                iface = nullptr;
+            }
         }
     }
-    return mAudioManager.load();
+    ALOGE_IF(iface == nullptr, "%s(): binding to audio service failed.", __func__);
+    return iface;
+}
+
+sp<media::IAudioManagerNative> AudioFlinger::getAudioManagerNative() const {
+    return mAudioManagerNative.load();
 }
 
 status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) const
@@ -3855,20 +3832,11 @@
 {
     constexpr int THREAD_DUMP_TIMEOUT_MS = 2;
     constexpr auto PREFIX = "- ";
-    if (com::android::media::audioserver::fdtostring_timeout_fix()) {
-        using ::android::audio_utils::FdToString;
+    using ::android::audio_utils::FdToString;
 
-        auto writer = OR_RETURN(FdToString::createWriter(PREFIX));
-        thread->dump(writer.borrowFdUnsafe(), {} /* args */);
-        mThreadLog.logs(-1 /* time */, FdToString::closeWriterAndGetString(std::move(writer)));
-    } else {
-        audio_utils::FdToStringOldImpl fdToString("- ", THREAD_DUMP_TIMEOUT_MS);
-        const int fd = fdToString.borrowFdUnsafe();
-        if (fd >= 0) {
-            thread->dump(fd, {} /* args */);
-            mThreadLog.logs(-1 /* time */, fdToString.closeAndGetString());
-        }
-    }
+    auto writer = OR_RETURN(FdToString::createWriter(PREFIX));
+    thread->dump(writer.borrowFdUnsafe(), {} /* args */);
+    mThreadLog.logs(-1 /* time */, FdToString::closeWriterAndGetString(std::move(writer)));
 }
 
 // checkThread_l() must be called with AudioFlinger::mutex() held
@@ -4302,7 +4270,7 @@
     status_t lStatus = NO_ERROR;
     uid_t callingUid = IPCThreadState::self()->getCallingUid();
     pid_t currentPid;
-    if (!com::android::media::audio::audioserver_permissions()) {
+    if (!audioserver_permissions()) {
         adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
         currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
         if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
@@ -4336,9 +4304,23 @@
         goto Exit;
     }
 
+    bool isSettingsAllowed;
+    if (audioserver_permissions()) {
+        const auto res = getPermissionProvider().checkPermission(
+                MODIFY_AUDIO_SETTINGS,
+                IPCThreadState::self()->getCallingUid());
+        if (!res.ok()) {
+            lStatus = statusTFromBinderStatus(res.error());
+            goto Exit;
+        }
+        isSettingsAllowed = res.value();
+    } else {
+        isSettingsAllowed = settingsAllowed();
+    }
+
     // check audio settings permission for global effects
     if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
-        if (!settingsAllowed()) {
+        if (!isSettingsAllowed) {
             ALOGE("%s: no permission for AUDIO_SESSION_OUTPUT_MIX", __func__);
             lStatus = PERMISSION_DENIED;
             goto Exit;
@@ -5277,26 +5259,6 @@
             break;
     }
 
-    // List of relevant events that trigger log merging.
-    // Log merging should activate during audio activity of any kind. This are considered the
-    // most relevant events.
-    // TODO should select more wisely the items from the list
-    switch (code) {
-        case TransactionCode::CREATE_TRACK:
-        case TransactionCode::CREATE_RECORD:
-        case TransactionCode::SET_MASTER_VOLUME:
-        case TransactionCode::SET_MASTER_MUTE:
-        case TransactionCode::SET_MIC_MUTE:
-        case TransactionCode::SET_PARAMETERS:
-        case TransactionCode::CREATE_EFFECT:
-        case TransactionCode::SYSTEM_READY: {
-            requestLogMerge();
-            break;
-        }
-        default:
-            break;
-    }
-
     const std::string methodName = getIAudioFlingerStatistics().getMethodForCode(code);
     mediautils::TimeCheck check(
             std::string("IAudioFlinger::").append(methodName),
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index e99c3ed..88a06c0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,6 +33,7 @@
 #include <audio_utils/mutex.h>
 #include <audio_utils/FdToString.h>
 #include <audio_utils/SimpleLog.h>
+#include <com/android/media/permission/PermissionEnum.h>
 #include <media/IAudioFlinger.h>
 #include <media/IAudioPolicyServiceLocal.h>
 #include <media/MediaMetricsItem.h>
@@ -49,6 +50,7 @@
 #include <map>
 #include <optional>
 #include <set>
+#include <variant>
 
 namespace android {
 
@@ -380,6 +382,7 @@
         return mEffectsFactoryHal;
     }
     sp<IAudioManager> getOrCreateAudioManager() final;
+    sp<media::IAudioManagerNative> getAudioManagerNative() const final;
 
     // Called when the last effect handle on an effect instance is removed. If this
     // effect belongs to an effect chain in mOrphanEffectChains, the chain is updated
@@ -393,12 +396,6 @@
             IAfEffectChain* srcChain = nullptr) final
             REQUIRES(mutex(), audio_utils::ThreadBase_Mutex);
 
-    // This is a helper that is called during incoming binder calls.
-    // Requests media.log to start merging log buffers
-    void requestLogMerge() final;
-    sp<NBLog::Writer> newWriter_l(size_t size, const char *name) final REQUIRES(mutex());
-    void unregisterWriter(const sp<NBLog::Writer>& writer) final;
-
     sp<audioflinger::SyncEvent> createSyncEvent(AudioSystem::sync_event_t type,
             audio_session_t triggerSession,
             audio_session_t listenerSession,
@@ -418,8 +415,17 @@
 
     const ::com::android::media::permission::IPermissionProvider& getPermissionProvider() final;
 
+    bool isHardeningOverrideEnabled() const final;
+
+    bool hasAlreadyCaptured(uid_t uid) const final {
+        const std::lock_guard _l(mCapturingClientsMutex);
+        return mCapturingClients.contains(uid);
+    }
+
     // ---- end of IAfThreadCallback interface
 
+    void setHasAlreadyCaptured_l(uid_t uid) REQUIRES(mutex());
+
     /* List available audio ports and their attributes */
     status_t listAudioPorts(unsigned int* num_ports, struct audio_port* ports) const
             EXCLUDES_AudioFlinger_Mutex;
@@ -475,6 +481,9 @@
     AudioHwDevice*          findSuitableHwDev_l(audio_module_handle_t module,
             audio_devices_t deviceType) REQUIRES(mutex());
 
+    error::BinderResult<std::monostate> enforceCallingPermission(
+                    com::android::media::permission::PermissionEnum perm);
+
     // incremented by 2 when screen state changes, bit 0 == 1 means "off"
     // AudioFlinger::setParameters() updates with mutex().
     std::atomic_uint32_t mScreenState{};
@@ -514,35 +523,6 @@
         const std::unique_ptr<media::psh_utils::Token> mClientToken;
     };
 
-    // --- MediaLogNotifier ---
-    // Thread in charge of notifying MediaLogService to start merging.
-    // Receives requests from AudioFlinger's binder activity. It is used to reduce the amount of
-    // binder calls to MediaLogService in case of bursts of AudioFlinger binder calls.
-    class MediaLogNotifier : public Thread {
-    public:
-        MediaLogNotifier();
-
-        // Requests a MediaLogService notification. It's ignored if there has recently been another
-        void requestMerge();
-    private:
-        // Every iteration blocks waiting for a request, then interacts with MediaLogService to
-        // start merging.
-        // As every MediaLogService binder call is expensive, once it gets a request it ignores the
-        // following ones for a period of time.
-        virtual bool threadLoop() override;
-
-        bool mPendingRequests;
-
-        // Mutex and condition variable around mPendingRequests' value
-        audio_utils::mutex mMutex{audio_utils::MutexOrder::kMediaLogNotifier_Mutex};
-        audio_utils::condition_variable mCondition;
-
-        // Duration of the sleep period after a processed request
-        static const int kPostTriggerSleepPeriod = 1000000;
-    };
-
-    const sp<MediaLogNotifier> mMediaLogNotifier = sp<MediaLogNotifier>::make();
-
     // Find io handle by session id.
     // Preference is given to an io handle with a matching effect chain to session id.
     // If none found, AUDIO_IO_HANDLE_NONE is returned.
@@ -802,8 +782,9 @@
     int32_t mAAudioBurstsPerBuffer GUARDED_BY(mutex()) = 0;
     int32_t mAAudioHwBurstMinMicros GUARDED_BY(mutex()) = 0;
 
-    /** Interface for interacting with the AudioService. */
+    /** Interfaces for interacting with the AudioService. */
     mediautils::atomic_sp<IAudioManager> mAudioManager;
+    mediautils::atomic_sp<media::IAudioManagerNative> mAudioManagerNative;
 
     // Bluetooth Variable latency control logic is enabled or disabled
     std::atomic<bool> mBluetoothLatencyModesEnabled = true;
@@ -814,6 +795,12 @@
     const int64_t mStartTime = audio_utils_get_real_time_ns();
     // Late-inited from main()
     std::atomic<int64_t> mStartupFinishedTime {};
+
+    // List of client UIDs having already captured audio in the past.
+    // This is used to control GMAP bidirectional mode track metadata tag
+    // generation.
+    std::set<uid_t> mCapturingClients GUARDED_BY(mCapturingClientsMutex);
+    mutable std::mutex  mCapturingClientsMutex; // only for mCapturingClients
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index b9d3ebe..6d5f684 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -1457,6 +1457,11 @@
     }
     status_t status = NO_ERROR;
     if ((mDescriptor.flags & EFFECT_FLAG_DEVICE_MASK) == EFFECT_FLAG_DEVICE_IND) {
+        // for AIDL, use setDevices to pass the AudioDeviceTypeAddrVector
+        if (!EffectConfiguration::isHidl()) {
+            return mEffectInterface->setDevices(devices);
+        }
+
         status_t cmdStatus;
         uint32_t size = sizeof(status_t);
         // FIXME: use audio device types and addresses when the hal interface is ready.
@@ -1576,6 +1581,11 @@
     return IAfEffectModule::isSpatializer(&mDescriptor.type);
 }
 
+bool EffectModule::isEffect(const effect_uuid_t &uuid) const {
+    using android::effect::utils::operator==;
+    return mDescriptor.uuid == uuid;
+}
+
 status_t EffectModule::setHapticScale_l(int id, os::HapticScale hapticScale) {
     if (mStatus != NO_ERROR) {
         return mStatus;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 9ecf89e..9d99b65 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -235,6 +235,7 @@
 
     bool isHapticGenerator() const final;
     bool isSpatializer() const final;
+    bool isEffect(const effect_uuid_t &uuid) const;
 
     status_t setHapticScale_l(int id, os::HapticScale hapticScale) final
             REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 3a059b6..69c7321 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -114,6 +114,7 @@
     virtual status_t updatePolicyState() EXCLUDES_EffectBase_Mutex = 0;
     virtual bool purgeHandles() EXCLUDES_EffectBase_Mutex = 0;
     virtual void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) = 0;
+    virtual bool suspended() const EXCLUDES_EffectBase_Mutex = 0;
 
     // mCallback is atomic so this can be lock-free.
     virtual void setCallback(const sp<EffectCallbackInterface>& callback) = 0;
@@ -135,7 +136,6 @@
     virtual status_t setEnabled(bool enabled, bool fromHandle) EXCLUDES_EffectBase_Mutex = 0;
     virtual status_t setEnabled_l(bool enabled) REQUIRES(audio_utils::EffectBase_Mutex) = 0;
     virtual void setSuspended(bool suspended) EXCLUDES_EffectBase_Mutex = 0;
-    virtual bool suspended() const EXCLUDES_EffectBase_Mutex = 0;
 
     virtual ssize_t disconnectHandle(IAfEffectHandle* handle,
                                      bool unpinIfLast) EXCLUDES_EffectBase_Mutex = 0;
@@ -181,6 +181,7 @@
     virtual bool isHapticGenerator() const = 0;
     static bool isSpatializer(const effect_uuid_t* type);
     virtual bool isSpatializer() const = 0;
+    virtual bool isEffect(const effect_uuid_t &uuid) const = 0;
 
     virtual status_t setHapticScale_l(int id, os::HapticScale hapticScale)
             REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 8fef263..8b9ab19 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -96,7 +96,10 @@
     virtual const sp<IAfPatchPanel>& getPatchPanel() const = 0;
     virtual const sp<MelReporter>& getMelReporter() const = 0;
     virtual const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() const = 0;
-    virtual sp<IAudioManager> getOrCreateAudioManager() = 0;  // Tracks
+    // AudioService interfaces
+    virtual sp<IAudioManager> getOrCreateAudioManager() = 0;
+    // Populated after getOrCreateAudioManager
+    virtual sp<media::IAudioManagerNative> getAudioManagerNative() const = 0;
 
     virtual bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
             EXCLUDES_AudioFlinger_Mutex = 0;
@@ -105,11 +108,6 @@
             IAfEffectChain* srcChain = nullptr)
             REQUIRES(mutex(), audio_utils::ThreadBase_Mutex) = 0;
 
-    virtual void requestLogMerge() = 0;
-    virtual sp<NBLog::Writer> newWriter_l(size_t size, const char *name)
-            REQUIRES(mutex()) = 0;
-    virtual void unregisterWriter(const sp<NBLog::Writer>& writer) = 0;
-
     virtual sp<audioflinger::SyncEvent> createSyncEvent(AudioSystem::sync_event_t type,
             audio_session_t triggerSession,
             audio_session_t listenerSession,
@@ -130,6 +128,10 @@
 
     virtual const ::com::android::media::permission::IPermissionProvider&
             getPermissionProvider() = 0;
+
+    virtual bool isHardeningOverrideEnabled() const = 0;
+
+    virtual bool hasAlreadyCaptured(uid_t uid) const = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -144,6 +146,7 @@
         MMAP_CAPTURE,   // Thread class for MMAP capture stream
         SPATIALIZER,    //
         BIT_PERFECT,    // Thread class for BitPerfectThread
+        DIRECT_RECORD,  // Thread class for DirectRecordThread
         // When adding a value, also update IAfThreadBase::threadTypeToString()
     };
 
@@ -538,6 +541,8 @@
 
     virtual IAfTrack* getTrackById_l(audio_port_handle_t trackId) REQUIRES(mutex()) = 0;
 
+    virtual std::vector<sp<IAfTrack>> getTracks_l() REQUIRES(mutex()) = 0;
+
     virtual bool hasMixer() const = 0;
 
     virtual status_t setRequestedLatencyMode(audio_latency_mode_t mode) = 0;
@@ -562,6 +567,7 @@
 
     virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &portIds, float volume,
                                     bool muted) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void checkUpdateTrackMetadataForUid(uid_t uid) EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
@@ -686,6 +692,9 @@
     virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds)
             EXCLUDES_ThreadBase_Mutex = 0;
 
+    virtual void invalidateTracks(audio_stream_type_t streamType)
+            EXCLUDES_ThreadBase_Mutex = 0;
+
     // Sets the UID records silence - TODO(b/291317898)  move to IAfMmapCaptureThread
     virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
             EXCLUDES_ThreadBase_Mutex = 0;
@@ -704,6 +713,7 @@
 
     virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
                                     bool muted) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void checkUpdateTrackMetadataForUid(uid_t uid) EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfMmapCaptureThread : public virtual IAfMmapThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index c9c766f..ad5ccc6 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -21,6 +21,8 @@
 #include <audio_utils/mutex.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
+#include <media/AppOpsSession.h>
+#include <mediautils/SingleThreadExecutor.h>
 #include <datapath/VolumePortInterface.h>
 #include <fastpath/FastMixerDumpState.h>
 #include <media/AudioSystem.h>
@@ -45,6 +47,7 @@
 class IAfPlaybackThread;
 class IAfRecordThread;
 class IAfThreadBase;
+class IAfThreadCallback;
 
 struct TeePatch {
     sp<IAfPatchRecord> patchRecord;
@@ -131,6 +134,7 @@
     virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer) = 0;
     virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer) = 0;
 
+    virtual void signal() = 0;
     // Added for RecordTrack and OutputTrack
     virtual wp<IAfThreadBase> thread() const = 0;
     virtual const sp<ServerProxy>& serverProxy() const = 0;
@@ -258,8 +262,100 @@
     virtual bool isStopping_2() const = 0;
 };
 
-// Common interface for Playback tracks.
-class IAfTrack : public virtual IAfTrackBase, public virtual VolumePortInterface {
+// Functionality shared between MMAP and audioflinger datapath playback tracks. Note that MMAP
+// tracks don't implement the IAfTrack, just IAfTrackBase
+// Not a pure interface since no forward declaration necessary.
+class AfPlaybackCommon : public virtual VolumePortInterface {
+    using AppOpsSession = media::permission::AppOpsSession<media::permission::DefaultAppOpsFacade>;
+
+  public:
+    enum class EnforcementLevel {
+        NONE, // no enforcement
+        PARTIAL, // enforcement for CONTROL_PARTIAL
+        FULL, // enforcement for CONTROL
+    };
+
+    AfPlaybackCommon(IAfTrackBase& self, IAfThreadBase& thread, float volume, bool muted,
+                     const audio_attributes_t& attr,
+                     const AttributionSourceState& attributionSource,
+                     bool isOffloadOrMmap,
+                     bool shouldPlaybackHarden = true);
+
+    /**
+     * Updates the mute state and notifies the audio service.
+     */
+    void processMuteEvent(media::IAudioManagerNative& am, mute_state_t muteState);
+
+    void maybeLogPlaybackHardening(media::IAudioManagerNative& am) const;
+
+    // Restricted due to OP_AUDIO_CONTROL_PARTIAL
+    bool hasOpControlPartial() const {
+        return mOpControlPartialSession ? mHasOpControlPartial.load(std::memory_order_acquire)
+                                        : true;
+    }
+
+    // Restricted due to OP_AUDIO_CONTROL
+    bool hasOpControlFull() const {
+        return mOpControlFullSession ? mHasOpControlFull.load(std::memory_order_acquire) : true;
+    }
+
+    bool isPlaybackRestrictedControl() const {
+        using enum EnforcementLevel;
+        switch (mEnforcementLevel) {
+            case NONE:
+                return false;
+            case PARTIAL:
+                return !hasOpControlPartial();
+            case FULL:
+                return !hasOpControlFull();
+        }
+    }
+
+    // VolumePortInterface implementation
+    // for now the secondary patch tracks will always be not muted
+    // TODO(b/388241142): use volume capture rules to forward the vol/mute to patch tracks
+
+    void setPortVolume(float volume) final { mVolume = volume; }
+
+    void setPortMute(bool muted) final {
+        mMutedFromPort = muted;
+    }
+
+    float getPortVolume() const final { return mVolume; }
+
+    bool getPortMute() const final { return mMutedFromPort; }
+
+  protected:
+    // The following methods are for notifying that sonifying playback intends to begin/end
+    // for playback hardening purposes.
+    // TODO(b/385417236) once mute logic is centralized, the delivery request session should be
+    // tied to sonifying playback instead of track start->pause
+    void startPlaybackDelivery();
+    void endPlaybackDelivery();
+
+  private:
+    const IAfTrackBase& mSelf;
+
+    std::optional<mediautils::SingleThreadExecutor> mExecutor;
+    // TODO: atomic necessary if underneath thread lock?
+    std::atomic<mute_state_t> mMuteState;
+    std::atomic<bool> mMutedFromPort;
+    // associated with port
+    std::atomic<float> mVolume = 0.0f;
+
+    const EnforcementLevel mEnforcementLevel;
+
+    std::atomic<bool> mHasOpControlPartial {true};
+    std::atomic<bool> mHasOpControlFull {true};
+    mutable std::atomic<bool> mPlaybackHardeningLogged {false};
+    // the ref behind the optional is const
+    // these members are last in decl order to ensure it is destroyed first
+    std::optional<AppOpsSession> mOpControlPartialSession;
+    std::optional<AppOpsSession> mOpControlFullSession;
+};
+
+// Common interface for audioflinger Playback tracks.
+class IAfTrack : public virtual IAfTrackBase, public virtual AfPlaybackCommon {
 public:
     // FillingStatus is used for suppressing volume ramp at begin of playing
     enum FillingStatus { FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE };
@@ -323,7 +419,6 @@
     virtual float* mainBuffer() const = 0;
     virtual int auxEffectId() const = 0;
     virtual status_t getTimestamp(AudioTimestamp& timestamp) = 0;
-    virtual void signal() = 0;
     virtual status_t getDualMonoMode(audio_dual_mono_mode_t* mode) const = 0;
     virtual status_t setDualMonoMode(audio_dual_mono_mode_t mode) = 0;
     virtual status_t getAudioDescriptionMixLevel(float* leveldB) const = 0;
@@ -382,19 +477,16 @@
     virtual audio_output_flags_t getOutputFlags() const = 0;
     virtual float getSpeed() const = 0;
 
-    /**
-     * Updates the mute state and notifies the audio service. Call this only when holding player
-     * thread lock.
-     */
-    virtual void processMuteEvent_l(
-            const sp<IAudioManager>& audioManager, mute_state_t muteState) = 0;
-
     virtual void triggerEvents(AudioSystem::sync_event_t type) = 0;
 
     virtual void disable() = 0;
     virtual bool isDisabled() const = 0;
 
     virtual int& fastIndex() = 0;
+
+    // Restricted due to OP_PLAY_AUDIO
+    virtual bool isPlaybackRestrictedOp() const = 0;
+
     virtual bool isPlaybackRestricted() const = 0;
 
     // Used by thread only
@@ -469,7 +561,7 @@
     virtual ExtendedTimestamp getClientProxyTimestamp() const = 0;
 };
 
-class IAfMmapTrack : public virtual IAfTrackBase, public virtual VolumePortInterface {
+class IAfMmapTrack : public virtual IAfTrackBase, public virtual AfPlaybackCommon {
 public:
     static sp<IAfMmapTrack> create(IAfThreadBase* thread,
             const audio_attributes_t& attr,
@@ -496,13 +588,6 @@
     virtual bool isSilenced_l() const = 0;
     // protected by MMapThread::mLock
     virtual bool getAndSetSilencedNotified_l() = 0;
-
-    /**
-     * Updates the mute state and notifies the audio service. Call this only when holding player
-     * thread lock.
-     */
-    virtual void processMuteEvent_l(  // see IAfTrack
-            const sp<IAudioManager>& audioManager, mute_state_t muteState) = 0;
 };
 
 class RecordBufferConverter;
@@ -560,6 +645,7 @@
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t shareAudioHistory(
             const std::string& sharedAudioPackageName, int64_t sharedAudioStartMs) = 0;
+    virtual status_t setParameters(const String8& keyValuePairs) = 0;
     virtual int32_t startFrames() const = 0;
 
     static bool checkServerLatencySupported(audio_format_t format, audio_input_flags_t flags) {
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 259136b..4e2dd06 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -20,6 +20,7 @@
 #include "TrackBase.h"
 
 #include <android/content/AttributionSourceState.h>
+#include <media/AppOpsSession.h>
 
 namespace android {
 
@@ -60,24 +61,6 @@
                                                         mSilencedNotified = true;
                                                         return silencedNotified; }
 
-    /**
-     * Updates the mute state and notifies the audio service. Call this only when holding player
-     * thread lock.
-     */
-    void processMuteEvent_l(const sp<IAudioManager>& audioManager,
-                            mute_state_t muteState)
-                            /* REQUIRES(MmapPlaybackThread::mLock) */ final;
-
-    // VolumePortInterface implementation
-    void setPortVolume(float volume) override {
-        mVolume = volume;
-    }
-    void setPortMute(bool muted) override {
-        mMutedFromPort = muted;
-    }
-    float getPortVolume() const override { return mVolume; }
-    bool getPortMute() const override { return mMutedFromPort; }
-
     std::string trackFlagsAsString() const final { return {}; }
 private:
     DISALLOW_COPY_AND_ASSIGN(MmapTrack);
@@ -95,16 +78,6 @@
     const uid_t mUid;
     bool  mSilenced;            // protected by MMapThread::mLock
     bool  mSilencedNotified;    // protected by MMapThread::mLock
-
-    // TODO: replace PersistableBundle with own struct
-    // access these two variables only when holding player thread lock.
-    std::unique_ptr<os::PersistableBundle> mMuteEventExtras
-            /* GUARDED_BY(MmapPlaybackThread::mLock) */;
-    mute_state_t mMuteState
-            /* GUARDED_BY(MmapPlaybackThread::mLock) */;
-    bool mMutedFromPort;
-
-    float mVolume = 0.0f;
 };  // end of Track
 
 } // namespace android
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index c335c70..dac5959 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -45,14 +45,14 @@
                        const AttributionSourceState& attributionSource,
                        audio_usage_t usage, int id, uid_t uid);
     void onFirstRef() override;
-    static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
 
     AppOpsManager mAppOpsManager;
 
-    class PlayAudioOpCallback : public BnAppOpsCallback {
+    class PlayAudioOpCallback : public com::android::internal::app::BnAppOpsCallback {
     public:
         explicit PlayAudioOpCallback(const wp<OpPlayAudioMonitor>& monitor);
-        void opChanged(int32_t op, const String16& packageName) override;
+        binder::Status opChanged(int32_t op, int32_t uid, const String16& packageName,
+                                 const String16& persistentDeviceId) override;
 
     private:
         const wp<OpPlayAudioMonitor> mMonitor;
@@ -131,7 +131,6 @@
     float* mainBuffer() const final { return mMainBuffer; }
     int auxEffectId() const final { return mAuxEffectId; }
     status_t getTimestamp(AudioTimestamp& timestamp) final;
-    void signal() final;
     status_t getDualMonoMode(audio_dual_mono_mode_t* mode) const final;
     status_t setDualMonoMode(audio_dual_mono_mode_t mode) final;
     status_t getAudioDescriptionMixLevel(float* leveldB) const final;
@@ -216,21 +215,9 @@
     bool isSpatialized() const final { return mIsSpatialized; }
     bool isBitPerfect() const final { return mIsBitPerfect; }
 
-    /**
-     * Updates the mute state and notifies the audio service. Call this only when holding player
-     * thread lock.
-     */
-    void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState) final;
-
     bool getInternalMute() const final { return mInternalMute; }
     void setInternalMute(bool muted) final { mInternalMute = muted; }
 
-    // VolumePortInterface implementation
-    void setPortVolume(float volume) override;
-    void setPortMute(bool muted) override;
-    float getPortVolume() const override { return mVolume; }
-    bool getPortMute() const override { return mMutedFromPort; }
-
     std::string trackFlagsAsString() const final { return toString(mFlags); }
 
 protected:
@@ -291,9 +278,17 @@
     bool isDisabled() const final;
 
     int& fastIndex() final { return mFastIndex; }
-    bool isPlaybackRestricted() const final {
+
+    bool isPlaybackRestrictedOp() const final {
         // The monitor is only created for tracks that can be silenced.
-        return mOpPlayAudioMonitor ? !mOpPlayAudioMonitor->hasOpPlayAudio() : false; }
+        return mOpPlayAudioMonitor
+                       ? !mOpPlayAudioMonitor->hasOpPlayAudio()
+                       : false;
+    }
+
+    bool isPlaybackRestricted() const final {
+        return isPlaybackRestrictedOp() || isPlaybackRestrictedControl();
+    }
 
     const sp<AudioTrackServerProxy>& audioTrackServerProxy() const final {
         return mAudioTrackServerProxy;
@@ -412,13 +407,7 @@
     const bool          mIsSpatialized;
     const bool          mIsBitPerfect;
 
-    // TODO: replace PersistableBundle with own struct
-    // access these two variables only when holding player thread lock.
-    std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
-    std::atomic<mute_state_t> mMuteState;
-    std::atomic<bool>         mMutedFromPort;
     bool                      mInternalMute = false;
-    std::atomic<float>        mVolume = 0.0f;
 };  // end of Track
 
 
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 000244e..5e1fabd 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -83,6 +83,7 @@
     status_t setPreferredMicrophoneFieldDimension(float zoom) final;
     status_t shareAudioHistory(const std::string& sharedAudioPackageName,
             int64_t sharedAudioStartMs) final;
+    status_t setParameters(const String8& keyValuePairs) final;
     int32_t startFrames() const final { return mStartFrames; }
 
     using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2229655..2f5c872 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -34,7 +34,6 @@
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
 #include <audio_utils/Trace.h>
-#include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
 #include <cpustats/ThreadCpuUsage.h>
@@ -124,6 +123,8 @@
     return a < b ? a : b;
 }
 
+using com::android::media::audio::audioserver_permissions;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_HOTWORD;
 using com::android::media::permission::ValidatedAttributionSourceState;
 namespace audioserver_flags = com::android::media::audioserver;
 
@@ -634,6 +635,8 @@
         return "SPATIALIZER";
     case BIT_PERFECT:
         return "BIT_PERFECT";
+    case DIRECT_RECORD:
+        return "DIRECT_RECORD";
     default:
         return "unknown";
     }
@@ -2186,7 +2189,6 @@
 {
     snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
     mFlagsAsString = toString(output->flags);
-    mNBLogWriter = afThreadCallback->newWriter_l(kLogSize, mThreadName);
 
     // Assumes constructor is called by AudioFlinger with its mutex() held, but
     // it would be safer to explicitly pass initial masterVolume/masterMute as
@@ -2246,7 +2248,6 @@
 
 PlaybackThread::~PlaybackThread()
 {
-    mAfThreadCallback->unregisterWriter(mNBLogWriter);
     free(mSinkBuffer);
     free(mMixerBuffer);
     free(mEffectBuffer);
@@ -2895,6 +2896,15 @@
     mOutput->stream->setVolume(left, right);
 }
 
+void PlaybackThread::checkUpdateTrackMetadataForUid(uid_t uid) {
+    audio_utils::lock_guard _l(mutex());
+    for (const sp<IAfTrack>& track : mActiveTracks) {
+        if (track->uid() == uid) {
+            track->setMetadataHasChanged();
+        }
+    }
+}
+
 // addTrack_l() must be called with ThreadBase::mutex() held
 status_t PlaybackThread::addTrack_l(const sp<IAfTrack>& track)
 {
@@ -3392,49 +3402,37 @@
         return {}; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
-    static const bool stereo_spatialization_property =
-            property_get_bool("ro.audio.stereo_spatialization_enabled", false);
-    const bool stereo_spatialization_enabled =
-            stereo_spatialization_property && com_android_media_audio_stereo_spatialization();
-    if (stereo_spatialization_enabled) {
-        std::map<audio_session_t, std::vector<playback_track_metadata_v7_t> >allSessionsMetadata;
-        for (const sp<IAfTrack>& track : mActiveTracks) {
-            std::vector<playback_track_metadata_v7_t>& sessionMetadata =
-                    allSessionsMetadata[track->sessionId()];
-            auto backInserter = std::back_inserter(sessionMetadata);
-            // No track is invalid as this is called after prepareTrack_l in the same
-            // critical section
-            track->copyMetadataTo(backInserter);
+    std::map<audio_session_t, std::vector<playback_track_metadata_v7_t> >allSessionsMetadata;
+    for (const sp<IAfTrack>& track : mActiveTracks) {
+        std::vector<playback_track_metadata_v7_t>& sessionMetadata =
+                allSessionsMetadata[track->sessionId()];
+        auto backInserter = std::back_inserter(sessionMetadata);
+        // No track is invalid as this is called after prepareTrack_l in the same
+        // critical section
+        track->copyMetadataTo(backInserter);
+    }
+    std::vector<playback_track_metadata_v7_t> spatializedTracksMetaData;
+    for (const auto& [session, sessionTrackMetadata] : allSessionsMetadata) {
+        metadata.tracks.insert(metadata.tracks.end(),
+                sessionTrackMetadata.begin(), sessionTrackMetadata.end());
+        if (auto chain = getEffectChain_l(session) ; chain != nullptr) {
+            chain->sendMetadata_l(sessionTrackMetadata, {});
         }
-        std::vector<playback_track_metadata_v7_t> spatializedTracksMetaData;
-        for (const auto& [session, sessionTrackMetadata] : allSessionsMetadata) {
-            metadata.tracks.insert(metadata.tracks.end(),
+        if ((hasAudioSession_l(session) & IAfThreadBase::SPATIALIZED_SESSION) != 0) {
+            spatializedTracksMetaData.insert(spatializedTracksMetaData.end(),
                     sessionTrackMetadata.begin(), sessionTrackMetadata.end());
-            if (auto chain = getEffectChain_l(session) ; chain != nullptr) {
-                chain->sendMetadata_l(sessionTrackMetadata, {});
-            }
-            if ((hasAudioSession_l(session) & IAfThreadBase::SPATIALIZED_SESSION) != 0) {
-                spatializedTracksMetaData.insert(spatializedTracksMetaData.end(),
-                        sessionTrackMetadata.begin(), sessionTrackMetadata.end());
-            }
-        }
-        if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); chain != nullptr) {
-            chain->sendMetadata_l(metadata.tracks, {});
-        }
-        if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE); chain != nullptr) {
-            chain->sendMetadata_l(metadata.tracks, spatializedTracksMetaData);
-        }
-        if (auto chain = getEffectChain_l(AUDIO_SESSION_DEVICE); chain != nullptr) {
-            chain->sendMetadata_l(metadata.tracks, {});
-        }
-    } else {
-        auto backInserter = std::back_inserter(metadata.tracks);
-        for (const sp<IAfTrack>& track : mActiveTracks) {
-            // No track is invalid as this is called after prepareTrack_l in the same
-            // critical section
-            track->copyMetadataTo(backInserter);
         }
     }
+    if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); chain != nullptr) {
+        chain->sendMetadata_l(metadata.tracks, {});
+    }
+    if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE); chain != nullptr) {
+        chain->sendMetadata_l(metadata.tracks, spatializedTracksMetaData);
+    }
+    if (auto chain = getEffectChain_l(AUDIO_SESSION_DEVICE); chain != nullptr) {
+        chain->sendMetadata_l(metadata.tracks, {});
+    }
+
     sendMetadataToBackend_l(metadata);
     MetadataUpdate change;
     change.playbackMetadataUpdate = metadata.tracks;
@@ -3786,6 +3784,11 @@
     return nullptr;
 }
 
+// getTracks_l must be called with holding thread lock
+std::vector<sp<IAfTrack>> PlaybackThread::getTracks_l() {
+    return std::vector(mTracks.begin(), mTracks.end());
+}
+
 status_t PlaybackThread::addEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     audio_session_t session = chain->sessionId();
@@ -4010,8 +4013,6 @@
 bool PlaybackThread::threadLoop()
 NO_THREAD_SAFETY_ANALYSIS  // manual locking of AudioFlinger
 {
-    aflog::setThreadWriter(mNBLogWriter.get());
-
     if (mType == SPATIALIZER) {
         const pid_t tid = getTid();
         if (tid == -1) {  // odd: we are here, we must be a running thread.
@@ -4075,15 +4076,6 @@
 
     acquireWakeLock();
 
-    // mNBLogWriter logging APIs can only be called by a single thread, typically the
-    // thread associated with this PlaybackThread.
-    // If you want to share the mNBLogWriter with other threads (for example, binder threads)
-    // then all such threads must agree to hold a common mutex before logging.
-    // So if you need to log when mutex is unlocked, set logString to a non-NULL string,
-    // and then that string will be logged at the next convenient opportunity.
-    // See reference to logString below.
-    const char *logString = NULL;
-
     // Estimated time for next buffer to be written to hal. This is used only on
     // suspended mode (for now) to help schedule the wait time until next iteration.
     nsecs_t timeLoopNextNs = 0;
@@ -4095,10 +4087,6 @@
     // loopCount is used for statistics and diagnostics.
     for (int64_t loopCount = 0; !exitPending(); ++loopCount)
     {
-        // Log merge requests are performed during AudioFlinger binder transactions, but
-        // that does not cover audio playback. It's requested here for that reason.
-        mAfThreadCallback->requestLogMerge();
-
         cpuStats.sample(myName);
 
         Vector<sp<IAfEffectChain>> effectChains;
@@ -4163,13 +4151,6 @@
                 continue;
             }
 
-            // See comment at declaration of logString for why this is done under mutex()
-            if (logString != NULL) {
-                mNBLogWriter->logTimestamp();
-                mNBLogWriter->log(logString);
-                logString = NULL;
-            }
-
             collectTimestamps_l();
 
             saveOutputTracks();
@@ -5293,19 +5274,12 @@
         state->mColdFutexAddr = &mFastMixerFutex;
         state->mColdGen++;
         state->mDumpState = &mFastMixerDumpState;
-        mFastMixerNBLogWriter = afThreadCallback->newWriter_l(kFastMixerLogSize, "FastMixer");
-        state->mNBLogWriter = mFastMixerNBLogWriter.get();
         sq->end();
         {
             audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
             sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
         }
 
-        NBLog::thread_info_t info;
-        info.id = mId;
-        info.type = NBLog::FASTMIXER;
-        mFastMixerNBLogWriter->log<NBLog::EVENT_THREAD_INFO>(info);
-
         // start the fast mixer
         mFastMixer->run("FastMixer", PRIORITY_URGENT_AUDIO);
         pid_t tid = mFastMixer->getTid();
@@ -5381,7 +5355,6 @@
         }
 #endif
     }
-    mAfThreadCallback->unregisterWriter(mFastMixerNBLogWriter);
     delete mAudioMixer;
 }
 
@@ -5751,8 +5724,9 @@
             // don't count underruns that occur while stopping or pausing
             // or stopped which can occur when flush() is called while active
             size_t underrunFrames = 0;
-            if (!(track->isStopping() || track->isPausing() || track->isStopped()) &&
-                    recentUnderruns > 0) {
+            if (!(track->isStopping() || track->isPausing()
+                    || track->isStopped() || track->isPaused())
+                && recentUnderruns > 0) {
                 // FIXME fast mixer will pull & mix partial buffers, but we count as a full underrun
                 underrunFrames = recentUnderruns * mFrameCount;
             }
@@ -5875,6 +5849,10 @@
                         volume = masterVolume * track->getPortVolume();
                     }
                 }
+                const auto amn = mAfThreadCallback->getAudioManagerNative();
+                if (amn) {
+                    track->maybeLogPlaybackHardening(*amn);
+                }
                 handleVoipVolume_l(&volume);
 
                 // cache the combined master volume and stream type volume for fast mixer; this
@@ -5886,24 +5864,28 @@
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
                 float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
                 float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
-                if (!audioserver_flags::portid_volume_management()) {
-                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                            /*muteState=*/{masterVolume == 0.f,
-                                           mStreamTypes[track->streamType()].volume == 0.f,
-                                           mStreamTypes[track->streamType()].mute,
-                                           track->isPlaybackRestricted(),
-                                           vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f,
-                                           /*muteFromPortVolume=*/false});
-                } else {
-                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                            /*muteState=*/{masterVolume == 0.f,
-                                           track->getPortVolume() == 0.f,
-                                           /* muteFromStreamMuted= */ false,
-                                           track->isPlaybackRestricted(),
-                                           vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f,
-                                           track->getPortMute()});
+                if (amn) {
+                    if (!audioserver_flags::portid_volume_management()) {
+                        track->processMuteEvent(*amn,
+                                /*muteState=*/{masterVolume == 0.f,
+                                               mStreamTypes[track->streamType()].volume == 0.f,
+                                               mStreamTypes[track->streamType()].mute,
+                                               track->isPlaybackRestrictedOp(),
+                                               vlf == 0.f && vrf == 0.f,
+                                               vh == 0.f,
+                                               /*muteFromPortVolume=*/false,
+                                               track->isPlaybackRestrictedControl()});
+                    } else {
+                        track->processMuteEvent(*amn,
+                                /*muteState=*/{masterVolume == 0.f,
+                                               track->getPortVolume() == 0.f,
+                                               /* muteFromStreamMuted= */ false,
+                                               track->isPlaybackRestrictedOp(),
+                                               vlf == 0.f && vrf == 0.f,
+                                               vh == 0.f,
+                                           track->getPortMute(),
+                                           track->isPlaybackRestrictedControl()});
+                    }
                 }
                 vlf *= volume;
                 vrf *= volume;
@@ -6069,7 +6051,12 @@
                     v = 0;
                 }
             }
+
             handleVoipVolume_l(&v);
+            const auto amn = mAfThreadCallback->getAudioManagerNative();
+            if (amn) {
+                track->maybeLogPlaybackHardening(*amn);
+            }
 
             if (track->isPausing()) {
                 vl = vr = 0;
@@ -6088,24 +6075,28 @@
                     ALOGV("Track right volume out of range: %.3g", vrf);
                     vrf = GAIN_FLOAT_UNITY;
                 }
-                if (!audioserver_flags::portid_volume_management()) {
-                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                            /*muteState=*/{masterVolume == 0.f,
-                                           mStreamTypes[track->streamType()].volume == 0.f,
-                                           mStreamTypes[track->streamType()].mute,
-                                           track->isPlaybackRestricted(),
-                                           vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f,
-                                           /*muteFromPortVolume=*/false});
-                } else {
-                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                            /*muteState=*/{masterVolume == 0.f,
-                                           track->getPortVolume() == 0.f,
-                                           /* muteFromStreamMuted= */ false,
-                                           track->isPlaybackRestricted(),
-                                           vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f,
-                                           track->getPortMute()});
+                if (amn) {
+                    if (!audioserver_flags::portid_volume_management()) {
+                        track->processMuteEvent(*amn,
+                                /*muteState=*/{masterVolume == 0.f,
+                                               mStreamTypes[track->streamType()].volume == 0.f,
+                                               mStreamTypes[track->streamType()].mute,
+                                               track->isPlaybackRestrictedOp(),
+                                               vlf == 0.f && vrf == 0.f,
+                                               vh == 0.f,
+                                               /*muteFromPortVolume=*/false,
+                                               track->isPlaybackRestrictedControl()});
+                    } else {
+                        track->processMuteEvent(*amn,
+                                /*muteState=*/{masterVolume == 0.f,
+                                               track->getPortVolume() == 0.f,
+                                               /* muteFromStreamMuted= */ false,
+                                               track->isPlaybackRestrictedOp(),
+                                               vlf == 0.f && vrf == 0.f,
+                                               vh == 0.f,
+                                               track->getPortMute(),
+                                               track->isPlaybackRestrictedControl()});
+                    }
                 }
                 // now apply the master volume and stream type volume and shaper volume
                 vlf *= v * vh;
@@ -6835,6 +6826,7 @@
 
     const bool clientVolumeMute = (left == 0.f && right == 0.f);
 
+    const auto amn = mAfThreadCallback->getAudioManagerNative();
     if (!audioserver_flags::portid_volume_management()) {
         if (mMasterMute || mStreamTypes[track->streamType()].mute ||
             track->isPlaybackRestricted()) {
@@ -6857,14 +6849,17 @@
                 right *= mMasterBalanceRight;
             }
         }
-        track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                /*muteState=*/{mMasterMute,
-                               mStreamTypes[track->streamType()].volume == 0.f,
-                               mStreamTypes[track->streamType()].mute,
-                               track->isPlaybackRestricted(),
-                               clientVolumeMute,
-                               shaperVolume == 0.f,
-                               /*muteFromPortVolume=*/false});
+        if (amn) {
+            track->processMuteEvent(*amn,
+                    /*muteState=*/{mMasterMute,
+                                   mStreamTypes[track->streamType()].volume == 0.f,
+                                   mStreamTypes[track->streamType()].mute,
+                                   track->isPlaybackRestrictedOp(),
+                                   clientVolumeMute,
+                                   shaperVolume == 0.f,
+                                   /*muteFromPortVolume=*/false,
+                                   track->isPlaybackRestrictedControl()});
+        }
     } else {
         if (mMasterMute || track->isPlaybackRestricted()) {
             left = right = 0;
@@ -6886,16 +6881,21 @@
                 right *= mMasterBalanceRight;
             }
         }
-        track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                /*muteState=*/{mMasterMute,
-                               track->getPortVolume() == 0.f,
-                               /* muteFromStreamMuted= */ false,
-                               track->isPlaybackRestricted(),
-                               clientVolumeMute,
-                               shaperVolume == 0.f,
-                               track->getPortMute()});
+        if (amn) {
+            track->processMuteEvent(*amn,
+                    /*muteState=*/{mMasterMute,
+                                   track->getPortVolume() == 0.f,
+                                   /* muteFromStreamMuted= */ false,
+                                   track->isPlaybackRestrictedOp(),
+                                   clientVolumeMute,
+                                   shaperVolume == 0.f,
+                                   track->getPortMute(),
+                                   track->isPlaybackRestrictedControl()});
+        }
     }
-
+    if (amn) {
+        track->maybeLogPlaybackHardening(*amn);
+    }
     if (lastTrack) {
         track->setFinalVolume(left, right);
         if (left != mLeftVolFloat || right != mRightVolFloat) {
@@ -8229,15 +8229,19 @@
         AudioStreamIn* input,
         audio_io_handle_t id,
         bool systemReady) {
-    return sp<RecordThread>::make(afThreadCallback, input, id, systemReady);
+    if (input->flags & AUDIO_INPUT_FLAG_DIRECT) {
+        return sp<DirectRecordThread>::make(afThreadCallback, input, id, systemReady);
+    }
+    return sp<RecordThread>::make(afThreadCallback, RECORD, input, id, systemReady);
 }
 
 RecordThread::RecordThread(const sp<IAfThreadCallback>& afThreadCallback,
+                                         ThreadBase::type_t type,
                                          AudioStreamIn *input,
                                          audio_io_handle_t id,
                                          bool systemReady
                                          ) :
-    ThreadBase(afThreadCallback, id, RECORD, systemReady, false /* isOut */),
+    ThreadBase(afThreadCallback, id, type, systemReady, false /* isOut */),
     mInput(input),
     mSource(mInput),
     mActiveTracks(&this->mLocalLog),
@@ -8259,7 +8263,6 @@
 {
     snprintf(mThreadName, kThreadNameLength, "AudioIn_%X", id);
     mFlagsAsString = toString(input->flags);
-    mNBLogWriter = afThreadCallback->newWriter_l(kLogSize, mThreadName);
 
     if (mInput->audioHwDev != nullptr) {
         mIsMsdDevice = strcmp(
@@ -8368,9 +8371,6 @@
 #ifdef TEE_SINK
         // FIXME
 #endif
-        mFastCaptureNBLogWriter =
-                afThreadCallback->newWriter_l(kFastCaptureLogSize, "FastCapture");
-        state->mNBLogWriter = mFastCaptureNBLogWriter.get();
         sq->end();
         {
             audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastCapture->getTid());
@@ -8416,8 +8416,6 @@
         }
         mFastCapture.clear();
     }
-    mAfThreadCallback->unregisterWriter(mFastCaptureNBLogWriter);
-    mAfThreadCallback->unregisterWriter(mNBLogWriter);
     free(mRsmpInBuffer);
 }
 
@@ -9131,9 +9129,22 @@
     }
 
     if (maxSharedAudioHistoryMs != 0) {
-        if (!captureHotwordAllowed(attributionSource)) {
-            lStatus = PERMISSION_DENIED;
-            goto Exit;
+        if (audioserver_permissions()) {
+            const auto res = mAfThreadCallback->getPermissionProvider().checkPermission(
+                    CAPTURE_AUDIO_HOTWORD,
+                    attributionSource.uid);
+            if (!res.ok()) {
+                lStatus = aidl_utils::statusTFromBinderStatus(res.error());
+            }
+            if (!res.value()) {
+                lStatus = PERMISSION_DENIED;
+                goto Exit;
+            }
+        } else {
+            if (!captureHotwordAllowed(attributionSource)) {
+                lStatus = PERMISSION_DENIED;
+                goto Exit;
+            }
         }
         if (maxSharedAudioHistoryMs < 0
                 || maxSharedAudioHistoryMs > kMaxSharedAudioHistoryMs) {
@@ -9254,7 +9265,11 @@
         if (!mSharedAudioPackageName.empty()
                 && mSharedAudioPackageName == attributionSource.packageName
                 && mSharedAudioSessionId == sessionId
-                && captureHotwordAllowed(attributionSource)) {
+                && (audioserver_permissions() ?
+                      mAfThreadCallback->getPermissionProvider().checkPermission(
+                          CAPTURE_AUDIO_HOTWORD,
+                          attributionSource.uid).value_or(false)
+                    : captureHotwordAllowed(attributionSource))) {
             startFrames = mSharedAudioStartFrames;
         }
 
@@ -9670,6 +9685,18 @@
     }
 }
 
+// --------------------------------------------------------------------------------------
+//              DirectRecordThread
+// --------------------------------------------------------------------------------------
+
+DirectRecordThread::DirectRecordThread(const sp<IAfThreadCallback>& afThreadCallback,
+                                     AudioStreamIn* input, audio_io_handle_t id, bool systemReady)
+    : RecordThread(afThreadCallback, DIRECT_RECORD, input, id, systemReady) {
+    ALOGD("%s:", __func__);
+}
+
+DirectRecordThread::~DirectRecordThread() {}
+
 void ResamplerBufferProvider::reset()
 {
     const auto threadBase = mRecordTrack->thread().promote();
@@ -10417,8 +10444,11 @@
             activeTracks.add(t);
         }
         localPortId = mPortId;
+        ALOGD("%s: localPortId = %d", __func__, localPortId);
+        mPortId = AUDIO_PORT_HANDLE_NONE;
     }
     for (const sp<IAfMmapTrack>& t : activeTracks) {
+        ALOGD("%s: t->portId() = %d", __func__, t->portId());
         stop(t->portId());
     }
     // This will decrement references and may cause the destruction of this thread.
@@ -10621,9 +10651,13 @@
     sp<IAfMmapTrack> track = IAfMmapTrack::create(
             this, attr == nullptr ? mAttr : *attr, mSampleRate, mFormat,
                                         mChannelMask, mSessionId, isOutput(),
-                                        client.attributionSource,
+                                        adjAttributionSource,
                                         IPCThreadState::self()->getCallingPid(), portId,
                                         volume, muted);
+
+    // MMAP tracks are only created when they are started, so mark them as Start for the purposes
+    // of the IAfTrackBase interface
+    track->start();
     if (!isOutput()) {
         track->setSilenced_l(isClientSilenced_l(portId));
     }
@@ -10634,7 +10668,7 @@
     } else if (!track->isSilenced_l()) {
         for (const sp<IAfMmapTrack>& t : mActiveTracks) {
             if (t->isSilenced_l()
-                    && t->uid() != static_cast<uid_t>(client.attributionSource.uid)) {
+                    && t->uid() != static_cast<uid_t>(adjAttributionSource.uid)) {
                 t->invalidate();
             }
         }
@@ -10648,7 +10682,9 @@
         chain->incActiveTrackCnt();
     }
 
-    track->logBeginInterval(patchSinksToString(&mPatch)); // log to MediaMetrics
+    // log to MediaMetrics
+    track->logBeginInterval(
+            isOutput() ? patchSinksToString(&mPatch) : patchSourcesToString(&mPatch));
     *handle = portId;
 
     if (mActiveTracks.size() == 1) {
@@ -10689,6 +10725,7 @@
 
     mActiveTracks.remove(track);
     eraseClientSilencedState_l(track->portId());
+    track->stop();
 
     mutex().unlock();
     if (isOutput()) {
@@ -11000,6 +11037,16 @@
     // Force meteadata update after a route change
     mActiveTracks.setHasChanged();
 
+    const std::string patchSourcesAsString = isOutput() ? "" : patchSourcesToString(patch);
+    const std::string patchSinksAsString = isOutput() ? patchSinksToString(patch) : "";
+    mThreadMetrics.logEndInterval();
+    mThreadMetrics.logCreatePatch(patchSourcesAsString, patchSinksAsString);
+    mThreadMetrics.logBeginInterval();
+    for (const auto &track : mActiveTracks) {
+        track->logEndInterval();
+        track->logBeginInterval(isOutput() ? patchSinksAsString : patchSourcesAsString);
+    }
+
     return status;
 }
 
@@ -11326,6 +11373,15 @@
     return NO_ERROR;
 }
 
+void MmapPlaybackThread::checkUpdateTrackMetadataForUid(uid_t uid) {
+    audio_utils::lock_guard _l(mutex());
+    for (const sp<IAfMmapTrack>& track : mActiveTracks) {
+        if (track->uid() == uid) {
+            track->setMetadataHasChanged();
+        }
+    }
+}
+
 void MmapPlaybackThread::invalidateTracks(audio_stream_type_t streamType)
 {
     audio_utils::lock_guard _l(mutex());
@@ -11357,7 +11413,7 @@
 }
 
 void MmapPlaybackThread::processVolume_l()
-NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
+NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent
 {
     float volume = 0;
     if (!audioserver_flags::portid_volume_management()) {
@@ -11383,6 +11439,13 @@
             }
         }
     }
+
+    bool shouldMutePlaybackHardening = std::all_of(mActiveTracks.begin(), mActiveTracks.end(),
+            [](const auto& x) { return x->isPlaybackRestrictedControl(); });
+    if (shouldMutePlaybackHardening) {
+        volume = 0;
+    }
+
     if (volume != mHalVolFloat) {
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
@@ -11413,29 +11476,35 @@
                 }
             }
         }
+        const auto amn = mAfThreadCallback->getAudioManagerNative();
         for (const sp<IAfMmapTrack>& track : mActiveTracks) {
             track->setMetadataHasChanged();
-            if (!audioserver_flags::portid_volume_management()) {
-                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+            if (amn) {
+                if (!audioserver_flags::portid_volume_management()) {
+                    track->processMuteEvent(*amn,
+                            /*muteState=*/{mMasterMute,
+                            streamVolume_l() == 0.f,
+                            streamMuted_l(),
+                            // TODO(b/241533526): adjust logic to include mute from AppOps
+                            false /*muteFromPlaybackRestricted*/,
+                            false /*muteFromClientVolume*/,
+                            false /*muteFromVolumeShaper*/,
+                            false /*muteFromPortVolume*/,
+                            shouldMutePlaybackHardening});
+                } else {
+                    track->processMuteEvent(*amn,
                         /*muteState=*/{mMasterMute,
-                        streamVolume_l() == 0.f,
-                        streamMuted_l(),
-                        // TODO(b/241533526): adjust logic to include mute from AppOps
-                        false /*muteFromPlaybackRestricted*/,
-                        false /*muteFromClientVolume*/,
-                        false /*muteFromVolumeShaper*/,
-                        false /*muteFromPortVolume*/});
-            } else {
-                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                    /*muteState=*/{mMasterMute,
-                                   track->getPortVolume() == 0.f,
-                                   /* muteFromStreamMuted= */ false,
-                                   // TODO(b/241533526): adjust logic to include mute from AppOps
-                                   false /*muteFromPlaybackRestricted*/,
-                                   false /*muteFromClientVolume*/,
-                                   false /*muteFromVolumeShaper*/,
-                                   track->getPortMute()});
+                                       track->getPortVolume() == 0.f,
+                                       /* muteFromStreamMuted= */ false,
+                                       // TODO(b/241533526): adjust logic to include mute from AppOp
+                                       false /*muteFromPlaybackRestricted*/,
+                                       false /*muteFromClientVolume*/,
+                                       false /*muteFromVolumeShaper*/,
+                                       track->getPortMute(),
+                                       shouldMutePlaybackHardening});
                 }
+                track->maybeLogPlaybackHardening(*amn);
+            }
         }
     }
 }
@@ -11454,8 +11523,21 @@
                 .content_type = track->attributes().content_type,
                 .gain = mHalVolFloat, // TODO: propagate from aaudio pre-mix volume
         };
-        trackMetadata.channel_mask = track->channelMask(),
-        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+        trackMetadata.channel_mask = track->channelMask();
+        std::string tagStr(track->attributes().tags);
+        if (audioserver_flags::enable_gmap_mode() && track->attributes().usage == AUDIO_USAGE_GAME
+                && afThreadCallback()->hasAlreadyCaptured(track->uid())
+                && (tagStr.size() + strlen(AUDIO_ATTRIBUTES_TAG_GMAP_BIDIRECTIONAL)
+                    + (tagStr.size() ? 1 : 0))
+                    < AUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+
+            if (tagStr.size() != 0) {
+                tagStr.append(1, AUDIO_ATTRIBUTES_TAGS_SEPARATOR);
+            }
+            tagStr.append(AUDIO_ATTRIBUTES_TAG_GMAP_BIDIRECTIONAL);
+        }
+        strncpy(trackMetadata.tags, tagStr.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+        trackMetadata.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
         metadata.tracks.push_back(trackMetadata);
     }
     mOutput->stream->updateSourceMetadata(metadata);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 6784341..ddcde5d 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -741,9 +741,6 @@
                 // Updated by updateSuspendedSessions_l() only.
                 KeyedVector< audio_session_t, KeyedVector< int, sp<SuspendedSessionDesc> > >
                                         mSuspendedSessions;
-                // TODO: add comment and adjust size as needed
-                static const size_t     kLogSize = 4 * 1024;
-                sp<NBLog::Writer>       mNBLogWriter;
                 bool                    mSystemReady;
 
     // NO_THREAD_SAFETY_ANALYSIS - mTimestamp and mTimestampVerifier should be
@@ -1187,6 +1184,8 @@
 
     IAfTrack* getTrackById_l(audio_port_handle_t trackId) final REQUIRES(mutex());
 
+    std::vector<sp<IAfTrack>> getTracks_l() final REQUIRES(mutex());
+
     bool hasMixer() const final {
                     return mType == MIXER || mType == DUPLICATING || mType == SPATIALIZER;
                 }
@@ -1242,6 +1241,8 @@
 
     std::string getLocalLogHeader() const override;
 
+    void checkUpdateTrackMetadataForUid(uid_t uid) final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1550,9 +1551,6 @@
     sp<NBAIO_Sink>          mNormalSink;
 
     uint32_t                mScreenState;   // cached copy of gScreenState
-    // TODO: add comment and adjust size as needed
-    static const size_t     kFastMixerLogSize = 8 * 1024;
-    sp<NBLog::Writer>       mFastMixerNBLogWriter;
 
     // Downstream patch latency, available if mDownstreamLatencyStatMs.getN() > 0.
     audio_utils::Statistics<double> mDownstreamLatencyStatMs{0.999};
@@ -2003,6 +2001,7 @@
     }
 
             RecordThread(const sp<IAfThreadCallback>& afThreadCallback,
+                    ThreadBase::type_t type,
                     AudioStreamIn *input,
                     audio_io_handle_t id,
                     bool systemReady
@@ -2236,6 +2235,13 @@
             audio_session_t                     mSharedAudioSessionId = AUDIO_SESSION_NONE;
 };
 
+class DirectRecordThread final : public RecordThread {
+  public:
+    DirectRecordThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamIn* input,
+                       audio_io_handle_t id, bool systemReady);
+    ~DirectRecordThread() override;
+};
+
 class MmapThread : public ThreadBase, public virtual IAfMmapThread
 {
  public:
@@ -2447,6 +2453,8 @@
     void stopMelComputation_l() final
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
+    void checkUpdateTrackMetadataForUid(uid_t uid) final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     void dumpInternals_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
     float streamVolume_l() const REQUIRES(mutex()) {
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 2b3d772..6dea786 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -85,6 +85,8 @@
 
     wp<IAfThreadBase> thread() const final { return mThread; }
 
+    void signal() final;
+
     const sp<ServerProxy>& serverProxy() const final { return mServerProxy; }
 
 #ifdef TEE_SINK
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index c9b578f..9046859 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -30,10 +30,16 @@
 
 #include <audio_utils/StringUtils.h>
 #include <audio_utils/minifloat.h>
+#include <com_android_media_audio.h>
+#include <com_android_media_audioserver.h>
+#include <media/AppOpsSession.h>
+#include <media/AudioPermissionPolicy.h>
 #include <media/AudioValidator.h>
+#include <media/IPermissionProvider.h>
 #include <media/RecordBufferConverter.h>
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
+#include <mediautils/Runnable.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/SharedMemoryAllocator.h>
 #include <private/media/AudioTrackShared.h>
@@ -67,12 +73,20 @@
        std::move(_tmp.value());             \
      })
 
+namespace audioserver_flags = com::android::media::audioserver;
+
 namespace android {
 
 using ::android::aidl_utils::binderStatusFromStatusT;
+using ::com::android::media::audio::hardening_impl;
+using ::com::android::media::audio::hardening_partial;
+using ::com::android::media::audio::hardening_strict;
 using binder::Status;
+using com::android::media::audio::audioserver_permissions;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_HOTWORD;
 using content::AttributionSourceState;
 using media::VolumeShaper;
+
 // ----------------------------------------------------------------------------
 //      TrackBase
 // ----------------------------------------------------------------------------
@@ -428,6 +442,14 @@
     }
 }
 
+void TrackBase::signal() {
+    const sp<IAfThreadBase> thread = mThread.promote();
+    if (thread != nullptr) {
+        audio_utils::lock_guard _l(thread->mutex());
+        thread->broadcast_l();
+    }
+}
+
 PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
         IAfThreadBase* thread, const Timeout& timeout)
     : mProxy(proxy)
@@ -681,15 +703,11 @@
             const AttributionSourceState& attributionSource, const audio_attributes_t& attr, int id,
             audio_stream_type_t streamType)
 {
-    Vector<String16> packages;
     const uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    getPackagesForUid(uid, packages);
     if (isServiceUid(uid)) {
-        if (packages.isEmpty()) {
-            ALOGW("OpPlayAudio: not muting track:%d usage:%d for service UID %d", id, attr.usage,
-                  uid);
-            return nullptr;
-        }
+        ALOGW("OpPlayAudio: not muting track:%d usage:%d for service UID %d", id, attr.usage,
+              uid);
+        return nullptr;
     }
     // stream type has been filtered by audio policy to indicate whether it can be muted
     if (streamType == AUDIO_STREAM_ENFORCED_AUDIBLE) {
@@ -769,10 +787,10 @@
         const wp<OpPlayAudioMonitor>& monitor) : mMonitor(monitor)
 { }
 
-void OpPlayAudioMonitor::PlayAudioOpCallback::opChanged(int32_t op,
-            const String16& packageName) {
+binder::Status OpPlayAudioMonitor::PlayAudioOpCallback::opChanged(int32_t op, int32_t,
+            const String16& packageName, const String16&) {
     if (op != AppOpsManager::OP_PLAY_AUDIO) {
-        return;
+        return binder::Status::ok();
     }
 
     ALOGI("%s OP_PLAY_AUDIO callback received for %s", __func__, String8(packageName).c_str());
@@ -780,14 +798,7 @@
     if (monitor != NULL) {
         monitor->checkPlayAudioForUsage(/*doBroadcast=*/true);
     }
-}
-
-// static
-void OpPlayAudioMonitor::getPackagesForUid(
-    uid_t uid, Vector<String16>& packages)
-{
-    PermissionController permissionController;
-    permissionController.getPackagesForUid(uid, packages);
+    return binder::Status::ok();
 }
 
 // ----------------------------------------------------------------------------
@@ -871,7 +882,10 @@
             bool isBitPerfect,
             float volume,
             bool muted)
-    :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
+    :
+    AfPlaybackCommon(*this, *thread, volume, muted,
+                     attr, attributionSource, thread->isOffloadOrMmap(), type != TYPE_PATCH),
+    TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
                   //       Either document why it is safe in this case or address the
@@ -906,8 +920,7 @@
     mFlags(flags),
     mSpeed(speed),
     mIsSpatialized(isSpatialized),
-    mIsBitPerfect(isBitPerfect),
-    mVolume(volume)
+    mIsBitPerfect(isBitPerfect)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -955,13 +968,11 @@
 
     populateUsageAndContentTypeFromStreamType();
 
-    mMutedFromPort = muted;
-
     // Audio patch and call assistant volume are always max
     if (mAttr.usage == AUDIO_USAGE_CALL_ASSISTANT
             || mAttr.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
-        mVolume = 1.0f;
-        mMutedFromPort = false;
+        setPortVolume(1.0f);
+        setPortMute(false);
     }
 
     mServerLatencySupported = checkServerLatencySupported(format, flags);
@@ -1204,7 +1215,7 @@
             20.0 * log10(float_from_gain(gain_minifloat_unpack_right(vlr))),
             20.0 * log10(vsVolume.first), // VolumeShaper(s) total volume
             vsVolume.second ? 'A' : ' ',  // if any VolumeShapers active
-            20.0 * log10(mVolume),
+            20.0 * log10(getPortVolume()),
             getPortMute() ? "true" : "false",
 
             mCblk->mServer,
@@ -1246,7 +1257,8 @@
     status_t status = mServerProxy->obtainBuffer(&buf);
     buffer->frameCount = buf.mFrameCount;
     buffer->raw = buf.mRaw;
-    if (buf.mFrameCount == 0 && !isStopping() && !isStopped() && !isPaused() && !isOffloaded()) {
+    if (buf.mFrameCount == 0 && !isStopping() && !isPausing()
+        && !isStopped() && !isPaused() && !isOffloaded()) {
         ALOGV("%s(%d): underrun,  framesReady(%zu) < framesDesired(%zd), state: %d",
                 __func__, mId, buf.mFrameCount, desiredFrames, (int)mState);
         mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
@@ -1493,6 +1505,7 @@
         status = BAD_VALUE;
     }
     if (status == NO_ERROR) {
+        startPlaybackDelivery();
         // send format to AudioManager for playback activity monitoring
         const sp<IAudioManager> audioManager =
                 thread->afThreadCallback()->getOrCreateAudioManager();
@@ -1554,6 +1567,15 @@
         }
         forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->stop(); });
     }
+    // TODO(b/385417236)
+    // Due to the complexity of state management for offload we do not call endDeliveryRequest().
+    // For offload tracks, sonification may continue significantly after the STOP
+    // phase begins. Leave the session on-going until the track is eventually
+    // destroyed. We continue to allow appop callbacks during STOPPING and STOPPED state.
+    // This is suboptimal but harmless.
+    if (!isOffloaded()) {
+        endPlaybackDelivery();
+    }
 }
 
 void Track::pause()
@@ -1598,6 +1620,9 @@
         // Pausing the TeePatch to avoid a glitch on underrun, at the cost of buffered audio loss.
         forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->pause(); });
     }
+    // When stopping a paused track, there will be two endDeliveryRequests. This is tolerated by
+    // the implementation.
+    endPlaybackDelivery();
 }
 
 void Track::flush()
@@ -1734,31 +1759,6 @@
     return INVALID_OPERATION;
 }
 
-void Track::setPortVolume(float volume) {
-    mVolume = volume;
-    if (mType != TYPE_PATCH) {
-        // Do not recursively propagate a PatchTrack setPortVolume to
-        // downstream PatchTracks.
-        forEachTeePatchTrack_l([volume](const auto &patchTrack) {
-            patchTrack->setPortVolume(volume);
-        });
-    }
-}
-
-void Track::setPortMute(bool muted) {
-    if (mMutedFromPort == muted) {
-        return;
-    }
-    mMutedFromPort = muted;
-    if (mType != TYPE_PATCH) {
-        // Do not recursively propagate a PatchTrack setPortVolume to
-        // downstream PatchTracks.
-        forEachTeePatchTrack_l([muted](const auto &patchTrack) {
-            patchTrack->setPortMute(muted);
-        });
-    }
-}
-
 VolumeShaper::Status Track::applyVolumeShaper(
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
@@ -1815,7 +1815,21 @@
     };
 
     metadata.channel_mask = mChannelMask;
-    strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+    std::string tagStr(mAttr.tags);
+    const sp<IAfThreadBase> thread = mThread.promote();
+    if (audioserver_flags::enable_gmap_mode() && mAttr.usage == AUDIO_USAGE_GAME
+            && thread != nullptr && thread->afThreadCallback()->hasAlreadyCaptured(uid())
+            && (tagStr.size() + strlen(AUDIO_ATTRIBUTES_TAG_GMAP_BIDIRECTIONAL)
+                + (tagStr.size() ? 1 : 0))
+                < AUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+        if (tagStr.size() != 0) {
+            tagStr.append(1, AUDIO_ATTRIBUTES_TAGS_SEPARATOR);
+        }
+        tagStr.append(AUDIO_ATTRIBUTES_TAG_GMAP_BIDIRECTIONAL);
+    }
+    strncpy(metadata.tags, tagStr.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+    metadata.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
     *backInserter++ = metadata;
 }
 
@@ -1840,35 +1854,6 @@
     mTeePatchesToUpdate = std::move(teePatchesToUpdate);
 }
 
-// must be called with player thread lock held
-void Track::processMuteEvent_l(const sp<
-    IAudioManager>& audioManager, mute_state_t muteState)
-{
-    if (mMuteState == muteState) {
-        // mute state did not change, do nothing
-        return;
-    }
-
-    status_t result = UNKNOWN_ERROR;
-    if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
-        if (mMuteEventExtras == nullptr) {
-            mMuteEventExtras = std::make_unique<os::PersistableBundle>();
-        }
-        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey), static_cast<int>(muteState));
-
-        result = audioManager->portEvent(mPortId, PLAYER_UPDATE_MUTED, mMuteEventExtras);
-    }
-
-    if (result == OK) {
-        ALOGI("%s(%d): processed mute state for port ID %d from %#x to %#x", __func__, id(),
-              mPortId, static_cast<int>(mMuteState.load()), static_cast<int>(muteState));
-        mMuteState = muteState;
-    } else {
-        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
-              mPortId, result);
-    }
-}
-
 status_t Track::getTimestamp(AudioTimestamp& timestamp)
 {
     if (!isOffloaded() && !isDirect()) {
@@ -2083,16 +2068,6 @@
     (void) syscall(__NR_futex, &cblk->mFutex, FUTEX_WAKE, INT_MAX);
 }
 
-void Track::signal()
-{
-    const sp<IAfThreadBase> thread = mThread.promote();
-    if (thread != 0) {
-        auto* const t = thread->asIAfPlaybackThread().get();
-        audio_utils::lock_guard _l(t->mutex());
-        t->broadcast_l();
-    }
-}
-
 status_t Track::getDualMonoMode(audio_dual_mono_mode_t* mode) const
 {
     status_t status = INVALID_OPERATION;
@@ -2168,7 +2143,9 @@
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
             audio_utils::lock_guard lock(t->mutex());
-            status = t->getOutput_l()->stream->getPlaybackRateParameters(playbackRate);
+            if (auto* const output = t->getOutput_l()) {
+                status = output->stream->getPlaybackRateParameters(playbackRate);
+            }
             ALOGD_IF((status == NO_ERROR) &&
                     !isAudioPlaybackRateEqual(mPlaybackRateParameters, *playbackRate),
                     "%s: playbackRate inconsistent", __func__);
@@ -2186,9 +2163,11 @@
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
             audio_utils::lock_guard lock(t->mutex());
-            status = t->getOutput_l()->stream->setPlaybackRateParameters(playbackRate);
-            if (status == NO_ERROR) {
-                mPlaybackRateParameters = playbackRate;
+            if (auto* const output = t->getOutput_l()) {
+                status = output->stream->setPlaybackRateParameters(playbackRate);
+                if (status == NO_ERROR) {
+                    mPlaybackRateParameters = playbackRate;
+                }
             }
         }
     }
@@ -2364,7 +2343,12 @@
             audio_channel_mask_t channelMask,
             size_t frameCount,
             const AttributionSourceState& attributionSource)
-    :   Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
+    :
+    AfPlaybackCommon(*this, *playbackThread, /* volume= */ 0.0f,
+                     /* muted= */ false,
+                     AUDIO_ATTRIBUTES_INITIALIZER, attributionSource, /* isOffloadOrMmap= */ false,
+                     /* shouldPlaybackHarden= */ false),
+    Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
               AUDIO_ATTRIBUTES_INITIALIZER ,
               sampleRate, format, channelMask, frameCount,
               nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
@@ -2672,7 +2656,12 @@
                                                      float speed,
                                                      float volume,
                                                      bool muted)
-    :   Track(playbackThread, NULL, streamType,
+    : AfPlaybackCommon(*this, *playbackThread, volume, muted,
+                       AUDIO_ATTRIBUTES_INITIALIZER,
+                       audioServerAttributionSource(getpid()),
+                       /* isOffloadOrMmap= */ false,
+                       /* shouldPlaybackHarden= */ false),
+    Track(playbackThread, NULL, streamType,
               AUDIO_ATTRIBUTES_INITIALIZER,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
@@ -2831,6 +2820,7 @@
     binder::Status setPreferredMicrophoneFieldDimension(float zoom) final;
     binder::Status shareAudioHistory(
             const std::string& sharedAudioPackageName, int64_t sharedAudioStartMs) final;
+    binder::Status setParameters(const ::std::string& keyValuePairs) final;
 
 private:
     const sp<IAfRecordTrack> mRecordTrack;
@@ -2900,6 +2890,11 @@
             mRecordTrack->shareAudioHistory(sharedAudioPackageName, sharedAudioStartMs));
 }
 
+binder::Status RecordHandle::setParameters(const ::std::string& keyValuePairs) {
+    return binderStatusFromStatusT(mRecordTrack->setParameters(
+            String8(keyValuePairs.c_str())));
+}
+
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::RecordTrack"
@@ -3287,11 +3282,23 @@
     attributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
     attributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingPid));
     attributionSource.token = sp<BBinder>::make();
-    if (!captureHotwordAllowed(attributionSource)) {
-        return PERMISSION_DENIED;
+    const sp<IAfThreadBase> thread = mThread.promote();
+    if (audioserver_permissions()) {
+        const auto res = thread->afThreadCallback()->getPermissionProvider().checkPermission(
+                    CAPTURE_AUDIO_HOTWORD,
+                    attributionSource.uid);
+            if (!res.ok()) {
+                return aidl_utils::statusTFromBinderStatus(res.error());
+            }
+            if (!res.value()) {
+                return PERMISSION_DENIED;
+            }
+    } else {
+        if (!captureHotwordAllowed(attributionSource)) {
+            return PERMISSION_DENIED;
+        }
     }
 
-    const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         auto* const recordThread = thread->asIAfRecordThread().get();
         status_t status = recordThread->shareAudioHistory(
@@ -3305,6 +3312,18 @@
     }
 }
 
+status_t RecordTrack::setParameters(const String8& keyValuePairs) {
+    const sp<IAfThreadBase> thread = mThread.promote();
+    if (thread == nullptr) {
+        ALOGE("%s(%d): thread is dead", __func__, mId);
+        return FAILED_TRANSACTION;
+    } else if (thread->type() == IAfThreadBase::DIRECT_RECORD) {
+        return thread->setParameters(keyValuePairs);
+    } else {
+        return PERMISSION_DENIED;
+    }
+}
+
 void RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
 {
 
@@ -3321,7 +3340,7 @@
     };
     metadata.channel_mask = mChannelMask;
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
+    metadata.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
     *backInserter++ = metadata;
 }
 
@@ -3666,6 +3685,149 @@
 }
 
 // ----------------------------------------------------------------------------
+// AfPlaybackCommon
+
+static AfPlaybackCommon::EnforcementLevel getOpControlEnforcementLevel(audio_usage_t usage,
+        IAfThreadCallback& cb) {
+    using enum AfPlaybackCommon::EnforcementLevel;
+    if (cb.isHardeningOverrideEnabled()) {
+        return FULL;
+    }
+    if (usage == AUDIO_USAGE_VIRTUAL_SOURCE || media::permission::isSystemUsage(usage)) {
+        return NONE;
+    }
+    if (hardening_strict()) {
+        return FULL;
+    } else if (hardening_partial()) {
+        return PARTIAL;
+    } else {
+        return NONE;
+    }
+}
+
+AfPlaybackCommon::AfPlaybackCommon(IAfTrackBase& self, IAfThreadBase& thread, float volume,
+                                   bool muted, const audio_attributes_t& attr,
+                                   const AttributionSourceState& attributionSource,
+                                   bool isOffloadOrMmap,
+                                   bool shouldPlaybackHarden)
+    : mSelf(self),
+      mMutedFromPort(muted),
+      mVolume(volume),
+      mEnforcementLevel(getOpControlEnforcementLevel(attr.usage, *thread.afThreadCallback())) {
+    ALOGI("creating track with enforcement level %d", mEnforcementLevel);
+    using AppOpsManager::OP_CONTROL_AUDIO_PARTIAL;
+    using AppOpsManager::OP_CONTROL_AUDIO;
+    using media::permission::Ops;
+    using media::permission::skipOpsForUid;
+    using media::permission::ValidatedAttributionSourceState;
+
+    if (hardening_impl()) {
+        // Don't bother for trusted uids
+        if (!skipOpsForUid(attributionSource.uid) && shouldPlaybackHarden) {
+            if (isOffloadOrMmap) {
+                mExecutor.emplace();
+            }
+            auto thread_wp = wp<IAfThreadBase>::fromExisting(&thread);
+            mOpControlPartialSession.emplace(
+                    ValidatedAttributionSourceState::createFromTrustedSource(attributionSource),
+                    Ops{.attributedOp = OP_CONTROL_AUDIO_PARTIAL},
+                    [this, isOffloadOrMmap, thread_wp](bool isPermitted) {
+                        mHasOpControlPartial.store(isPermitted, std::memory_order_release);
+                        if (isOffloadOrMmap) {
+                            mExecutor->enqueue(mediautils::Runnable{[thread_wp]() {
+                                auto thread = thread_wp.promote();
+                                if (thread != nullptr) {
+                                    audio_utils::lock_guard l {thread->mutex()};
+                                    thread->broadcast_l();
+                                }
+                            }});
+                        }
+                    }
+            );
+            // Same as previous but for mHasOpControlFull, OP_CONTROL_AUDIO
+            mOpControlFullSession.emplace(
+                    ValidatedAttributionSourceState::createFromTrustedSource(attributionSource),
+                    Ops{.attributedOp = OP_CONTROL_AUDIO},
+                    [this, isOffloadOrMmap, thread_wp](bool isPermitted) {
+                        mHasOpControlFull.store(isPermitted, std::memory_order_release);
+                        if (isOffloadOrMmap) {
+                            mExecutor->enqueue(mediautils::Runnable{[thread_wp]() {
+                                auto thread = thread_wp.promote();
+                                if (thread != nullptr) {
+                                    audio_utils::lock_guard l {thread->mutex()};
+                                    thread->broadcast_l();
+                                }
+                            }});
+                        }
+                    }
+            );
+        }
+    }
+}
+
+void AfPlaybackCommon::maybeLogPlaybackHardening(media::IAudioManagerNative& am) const {
+    using media::IAudioManagerNative::HardeningType::PARTIAL;
+    using media::IAudioManagerNative::HardeningType::FULL;
+    // The op state deviates from if the track is actually muted if the playback was exempted for
+    // some compat reason.
+    // The state could have technically TOCTOU, but this is for metrics and that is very unlikely
+    if (!hasOpControlPartial()) {
+        if (!mPlaybackHardeningLogged.exchange(true, std::memory_order_acq_rel)) {
+            am.playbackHardeningEvent(mSelf.uid(), PARTIAL,
+                                      /* bypassed= */
+                                      !isPlaybackRestrictedControl());
+        }
+    } else if (!hasOpControlFull()) {
+        if (!mPlaybackHardeningLogged.exchange(true, std::memory_order_acq_rel)) {
+            am.playbackHardeningEvent(mSelf.uid(), FULL,
+                                      /* bypassed= */
+                                      !isPlaybackRestrictedControl());
+        }
+    }
+}
+
+void AfPlaybackCommon::processMuteEvent(media::IAudioManagerNative& am, mute_state_t muteState) {
+    const auto trackId = mSelf.id();
+    const auto portId = mSelf.portId();
+    if (mMuteState == muteState) {
+        // mute state did not change, do nothing
+        return;
+    }
+
+    const auto result = portId != AUDIO_PORT_HANDLE_NONE
+                                ? am.portMuteEvent(portId, static_cast<int>(muteState))
+                                : Status::fromExceptionCode(Status::EX_ILLEGAL_STATE);
+    if (result.isOk()) {
+        ALOGI("%s(%d): processed mute state for port ID %d from %#x to %#x", __func__, trackId,
+              portId, static_cast<int>(mMuteState.load()), static_cast<int>(muteState));
+        mMuteState = muteState;
+    } else {
+        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %s", __func__,
+              trackId, portId, result.toString8().c_str());
+    }
+}
+
+void AfPlaybackCommon::startPlaybackDelivery() {
+    if (mOpControlPartialSession) {
+        mHasOpControlPartial.store(mOpControlPartialSession->beginDeliveryRequest(),
+                            std::memory_order_release);
+    }
+    if (mOpControlFullSession) {
+        mHasOpControlFull.store(mOpControlFullSession->beginDeliveryRequest(),
+                            std::memory_order_release);
+    }
+}
+
+void AfPlaybackCommon::endPlaybackDelivery() {
+    if (mOpControlPartialSession) {
+        mOpControlPartialSession->endDeliveryRequest();
+    }
+    if (mOpControlFullSession) {
+        mOpControlFullSession->endDeliveryRequest();
+    }
+}
+
+// ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::MmapTrack"
 
@@ -3710,7 +3872,9 @@
         audio_port_handle_t portId,
         float volume,
         bool muted)
-    :   TrackBase(thread, NULL, attr, sampleRate, format,
+    :   AfPlaybackCommon(*this, *thread,
+                         volume, muted, attr, attributionSource, /* isOffloadOrMmap */ true),
+        TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
                   sessionId, creatorPid,
@@ -3721,16 +3885,15 @@
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
         mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.pid))),
         mUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid))),
-            mSilenced(false), mSilencedNotified(false), mVolume(volume)
+            mSilenced(false), mSilencedNotified(false)
 {
-    mMutedFromPort = muted;
     // Once this item is logged by the server, the client can add properties.
     mTrackMetrics.logConstructor(creatorPid, uid(), id());
     if (isOut && (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
             || attr.usage == AUDIO_USAGE_VIRTUAL_SOURCE)) {
         // Audio patch and call assistant volume are always max
-        mVolume = 1.0f;
-        mMutedFromPort = false;
+        setPortVolume(1.0f);
+        setPortMute(false);
     }
 }
 
@@ -3746,6 +3909,7 @@
 status_t MmapTrack::start(AudioSystem::sync_event_t event __unused,
                                                     audio_session_t triggerSession __unused)
 {
+    startPlaybackDelivery();
     if (ATRACE_ENABLED()) [[unlikely]] {
         ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
                 .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_START)
@@ -3756,6 +3920,7 @@
 
 void MmapTrack::stop()
 {
+    endPlaybackDelivery();
     if (ATRACE_ENABLED()) [[unlikely]] {
         ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
                 .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_STOP)
@@ -3785,39 +3950,6 @@
 {
 }
 
-void MmapTrack::processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState)
-{
-    if (mMuteState == muteState) {
-        // mute state did not change, do nothing
-        return;
-    }
-
-    status_t result = UNKNOWN_ERROR;
-    if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
-        if (mMuteEventExtras == nullptr) {
-            mMuteEventExtras = std::make_unique<os::PersistableBundle>();
-        }
-        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
-                                 static_cast<int>(muteState));
-
-        result = audioManager->portEvent(mPortId,
-                                         PLAYER_UPDATE_MUTED,
-                                         mMuteEventExtras);
-    }
-
-    if (result == OK) {
-        ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
-                static_cast<int>(mMuteState), static_cast<int>(muteState));
-        mMuteState = muteState;
-    } else {
-        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
-              __func__,
-              id(),
-              mPortId,
-              result);
-    }
-}
-
 void MmapTrack::appendDumpHeader(String8& result) const
 {
     const auto res = IAfMmapTrack::getLogHeader();
@@ -3837,8 +3969,8 @@
             mAttr.flags);
     if (isOut()) {
         result.appendFormat("%4x %2x", mAttr.usage, mAttr.content_type);
-        result.appendFormat("%11.2g", 20.0 * log10(mVolume));
-        result.appendFormat("%12s", mMutedFromPort ? "true" : "false");
+        result.appendFormat("%11.2g", 20.0 * log10(getPortVolume()));
+        result.appendFormat("%12s", getPortMute() ? "true" : "false");
     } else {
         result.appendFormat("%7x", mAttr.source);
     }
diff --git a/services/audioflinger/afutils/Vibrator.cpp b/services/audioflinger/afutils/Vibrator.cpp
index 7c99ca9..c5e3b56 100644
--- a/services/audioflinger/afutils/Vibrator.cpp
+++ b/services/audioflinger/afutils/Vibrator.cpp
@@ -51,12 +51,13 @@
     }
     const sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != nullptr) {
-
         os::ExternalVibrationScale ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
             ALOGD("%s, start external vibration with intensity as %d", __func__, ret.scaleLevel);
             return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
+        } else {
+            ALOGE("Start external vibration request failed: %s", status.toString8().c_str());
         }
     }
     ALOGD("%s, start external vibration with intensity as MUTE due to %s",
diff --git a/services/audioflinger/fastpath/FastThread.cpp b/services/audioflinger/fastpath/FastThread.cpp
index d054d71..9c26541 100644
--- a/services/audioflinger/fastpath/FastThread.cpp
+++ b/services/audioflinger/fastpath/FastThread.cpp
@@ -54,6 +54,13 @@
         // either nanosleep, sched_yield, or busy wait
         if (mSleepNs >= 0) {
             if (mSleepNs > 0) {
+                if (mOldTsValid) {
+                    mOldTs.tv_nsec += mSleepNs;
+                    if (mOldTs.tv_nsec >= 1000000000) {
+                        mOldTs.tv_sec++;
+                        mOldTs.tv_nsec -= 1000000000;
+                    }
+                }
                 ALOG_ASSERT(mSleepNs < 1000000000);
                 const struct timespec req = {
                     0, // tv_sec
diff --git a/services/audioflinger/timing/tests/mediasyncevent_tests.cpp b/services/audioflinger/timing/tests/mediasyncevent_tests.cpp
index ab2d88f..745bb35 100644
--- a/services/audioflinger/timing/tests/mediasyncevent_tests.cpp
+++ b/services/audioflinger/timing/tests/mediasyncevent_tests.cpp
@@ -25,8 +25,7 @@
 using namespace android::audioflinger;
 
 namespace {
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wenum-constexpr-conversion"
+
 TEST(MediaSyncEventTests, Basic) {
     struct Cookie : public RefBase {};
 
@@ -67,5 +66,5 @@
     syncEvent->cancel();
     ASSERT_TRUE(syncEvent->isCancelled());
 }
-#pragma clang diagnostic pop
+
 } // namespace
diff --git a/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp b/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp
index 82df059..68f154d 100644
--- a/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp
+++ b/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp
@@ -26,8 +26,6 @@
 
 namespace {
 
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wenum-constexpr-conversion"
 TEST(SynchronizedRecordStateTests, Basic) {
     struct Cookie : public RefBase {};
 
@@ -76,5 +74,5 @@
     ASSERT_FALSE(triggered);
     ASSERT_TRUE(syncEvent->isCancelled());
 }
-#pragma clang diagnostic pop
+
 }
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index 0b2c1ba..eb112c4 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -41,6 +41,9 @@
         "av-audio-types-aidl-ndk",
         "libbase",
         "libbinder_ndk",
+        "libmedia_helper",
+        "libmediautils",
+        "libutils",
     ],
 
     cflags: [
diff --git a/services/medialog/OWNERS b/services/audioparameterparser/OWNERS
similarity index 66%
rename from services/medialog/OWNERS
rename to services/audioparameterparser/OWNERS
index fe3205a..bf01b0b 100644
--- a/services/medialog/OWNERS
+++ b/services/audioparameterparser/OWNERS
@@ -1,4 +1,6 @@
 # Bug component: 48436
-atneya@google.com
+elaurent@google.com
 hunga@google.com
+mnaganov@google.com
+yaoshunkai@google.com
 include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/services/audioparameterparser/ParameterParser.cpp b/services/audioparameterparser/ParameterParser.cpp
index 8d6a64f..096018c 100644
--- a/services/audioparameterparser/ParameterParser.cpp
+++ b/services/audioparameterparser/ParameterParser.cpp
@@ -14,20 +14,29 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "Audio_ParameterParser"
+
 #include "ParameterParser.h"
 
-#define LOG_TAG "Audio_ParameterParser"
 #include <android-base/logging.h>
+#include <media/AudioParameter.h>
 
 namespace vendor::audio::parserservice {
 
 using ::aidl::android::hardware::audio::core::VendorParameter;
 using ParameterScope = ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope;
 
-::ndk::ScopedAStatus ParameterParser::parseVendorParameterIds(ParameterScope in_scope,
-                                                              const std::string& in_rawKeys,
-                                                              std::vector<std::string>*) {
+::ndk::ScopedAStatus ParameterParser::parseVendorParameterIds(
+        ParameterScope in_scope, const std::string& in_rawKeys,
+        std::vector<std::string>* _aidl_return) {
     LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope) << ", keys: " << in_rawKeys;
+    if (in_scope == ParameterScope::MODULE) {
+        ::android::AudioParameter params(::android::String8(in_rawKeys.c_str()));
+        if (params.containsKey(
+                    ::android::String8(::android::AudioParameter::keyClipTransitionSupport))) {
+            _aidl_return->emplace_back(::android::AudioParameter::keyClipTransitionSupport);
+        }
+    }
     return ::ndk::ScopedAStatus::ok();
 }
 
@@ -53,9 +62,21 @@
 }
 
 ::ndk::ScopedAStatus ParameterParser::processVendorParameters(
-        ParameterScope in_scope, const std::vector<VendorParameter>& in_parameters, std::string*) {
+        ParameterScope in_scope, const std::vector<VendorParameter>& in_parameters,
+        std::string* _aidl_return) {
     LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
                << ", parameters: " << ::android::internal::ToString(in_parameters);
+    if (in_scope == ParameterScope::MODULE) {
+        ::android::AudioParameter result;
+        for (const auto& param : in_parameters) {
+            if (param.id == ::android::AudioParameter::keyClipTransitionSupport) {
+                result.addInt(
+                        ::android::String8(::android::AudioParameter::keyClipTransitionSupport),
+                        true);
+            }
+        }
+        *_aidl_return = result.toString().c_str();
+    }
     return ::ndk::ScopedAStatus::ok();
 }
 
diff --git a/services/audioparameterparser/main.cpp b/services/audioparameterparser/main.cpp
index d22eb55..f9c3aea 100644
--- a/services/audioparameterparser/main.cpp
+++ b/services/audioparameterparser/main.cpp
@@ -16,26 +16,67 @@
 
 #define LOG_TAG "Audio_ParameterParser"
 #include <android-base/logging.h>
+#include <android-base/properties.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
+#include <mediautils/Library.h>
 
 #include "ParameterParser.h"
 
 using vendor::audio::parserservice::ParameterParser;
 
+binder_status_t tryRegisteringVendorImpl() {
+    /*
+     * The property "ro.audio.ihaladaptervendorextension_libname" allows vendors
+     * or OEMs to dynamically load a specific library
+     * into this process space using dlopen.
+     *
+     * "createIHalAdapterVendorExtension" symbol needs to be defined in
+     * the dynamically loaded library used to register the
+     * "::aidl::android::hardware::audio::core::IHalAdapterVendorExtension/default"
+     * with the ServiceManager.
+     */
+    static std::string kLibPropName =
+            ::android::base::GetProperty("ro.audio.ihaladaptervendorextension_libname", "");
+    if (kLibPropName == "") {
+        LOG(DEBUG) << kLibPropName << "property is not found";
+        return STATUS_BAD_VALUE;
+    }
+    static std::shared_ptr<void> libHandle =
+            android::mediautils::loadLibrary(kLibPropName.c_str());
+    if (libHandle == nullptr) {
+        LOG(ERROR) << "Failed to load library:" << kLibPropName;
+        return STATUS_BAD_VALUE;
+    }
+    const std::string kLibSymbol = "createIHalAdapterVendorExtension";
+    std::shared_ptr<void> untypedObject = android::mediautils::getUntypedObjectFromLibrary(
+            kLibSymbol.c_str(), libHandle);
+    auto createIHalAdapterVendorExtension = reinterpret_cast<int (*)()>(untypedObject.get());
+    if (createIHalAdapterVendorExtension == nullptr) {
+        LOG(ERROR) << "Failed to find symbol \"" << kLibSymbol << "\"";
+        return STATUS_BAD_VALUE;
+    }
+    return createIHalAdapterVendorExtension();
+}
+
 int main() {
     // This is a debug implementation, always enable debug logging.
     android::base::SetMinimumLogSeverity(::android::base::DEBUG);
-
-    auto parser = ndk::SharedRefBase::make<ParameterParser>();
-    const std::string parserFqn =
-            std::string().append(ParameterParser::descriptor).append("/default");
-    binder_status_t status =
-            AServiceManager_addService(parser->asBinder().get(), parserFqn.c_str());
-    if (status != STATUS_OK) {
-        LOG(ERROR) << "failed to register service for \"" << parserFqn << "\"";
+    if (tryRegisteringVendorImpl() != STATUS_OK) {
+        const std::string parserFqn =
+                std::string()
+                        .append(::aidl::android::media::audio::IHalAdapterVendorExtension::
+                                        descriptor)
+                        .append("/default");
+        auto parser = ndk::SharedRefBase::make<ParameterParser>();
+        binder_status_t status =
+                AServiceManager_addService(parser->asBinder().get(), parserFqn.c_str());
+        if (status != STATUS_OK) {
+            LOG(ERROR) << "failed to register service for \"" << parserFqn << "\"";
+        }
+    } else {
+        LOG(INFO) << "IHalAdapterVendorExtension registered with vendor's implementation";
     }
-
     ABinderProcess_joinThreadPool();
-    return EXIT_FAILURE;  // should not reach
+    return EXIT_FAILURE; // should not reach
 }
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index e5bd121..4f26aca 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -114,7 +114,7 @@
     // indicate a change in device connection status
     virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
                                               const android::media::audio::common::AudioPort& port,
-                                              audio_format_t encodedFormat) = 0;
+                                              audio_format_t encodedFormat, bool deviceSwitch) = 0;
     // retrieve a device connection status
     virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                               const char *device_address) = 0;
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 0c03900..170329a 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -157,7 +157,8 @@
 static inline bool apm_audio_in_device_distinguishes_on_address(audio_devices_t device)
 {
     return device == AUDIO_DEVICE_IN_REMOTE_SUBMIX ||
-           device == AUDIO_DEVICE_IN_BUS;
+           device == AUDIO_DEVICE_IN_BUS ||
+           device == AUDIO_DEVICE_IN_ECHO_REFERENCE;
 }
 
 /**
@@ -288,8 +289,8 @@
         // Multiple device selection is either:
         //  - dock + one other device: give priority to dock in this case.
         //  - speaker + one other device: give priority to speaker in this case.
-        //  - one A2DP device + another device: happens with duplicated output. In this case
-        // retain the device on the A2DP output as the other must not correspond to an active
+        //  - one removable device + another device: happens with duplicated output. In this case
+        // retain the removable device as the other must not correspond to an active
         // selection if not the speaker.
         //  - HDMI-CEC system audio mode only output: give priority to available item in order.
         if (deviceTypes.count(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET) != 0) {
@@ -307,13 +308,13 @@
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPDIF) != 0) {
             return AUDIO_DEVICE_OUT_SPDIF;
         } else {
-            std::vector<audio_devices_t> a2dpDevices = android::Intersection(
-                    deviceTypes, android::getAudioDeviceOutAllA2dpSet());
-            if (a2dpDevices.empty() || a2dpDevices.size() > 1) {
+            std::vector<audio_devices_t> volumeDevices = android::Intersection(
+                    deviceTypes, android::getAudioDeviceOutPickForVolumeSet());
+            if (volumeDevices.empty() || volumeDevices.size() > 1) {
                 ALOGW("%s invalid device combination: %s",
                       __func__, android::dumpDeviceTypes(deviceTypes).c_str());
             }
-            return a2dpDevices.empty() ? AUDIO_DEVICE_NONE : a2dpDevices[0];
+            return volumeDevices.empty() ? AUDIO_DEVICE_NONE : volumeDevices[0];
         }
     }
 }
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 0e1d090..f7ab86f 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -74,8 +74,11 @@
     export_include_dirs: ["include"],
 
     cflags: [
+        "-DENABLE_CAP_AIDL_HYBRID_MODE",
         "-Wall",
         "-Werror",
-    ],
-
+    ] + select(release_flag("RELEASE_HARDWARE_AUDIO_USE_CAP_AIDL"), {
+        true: [],
+        default: ["-DDISABLE_CAP_AIDL"],
+    }),
 }
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index b193cb8..5ac0df2 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -45,6 +45,10 @@
 
     // The source used to indicate the configuration from the AIDL HAL.
     static const constexpr char* const kAidlConfigSource = "AIDL HAL";
+#ifdef ENABLE_CAP_AIDL_HYBRID_MODE
+    // The source used to indicate the configuration from the AIDL HAL but engine still use XML.
+    static const constexpr char* const kHybridAidlConfigSource = "AIDL HAL Hybrid CAP";
+#endif
     // The source used to indicate the default fallback configuration.
     static const constexpr char* const kDefaultConfigSource = "AudioPolicyConfig::setDefault";
     // The suffix of the "engine default" implementation shared library name.
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index c2ee5f6..76762f9 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -70,11 +70,20 @@
         return mMixerBehaviors;
     }
 
+    /**
+     * NO_MATCH: Both config and flags are not compatible.
+     * PARTIAL_MATCH: Both config and flags are partially matched.
+     * PARTIAL_MATCH_WITH_CONFIG: Partial match with flags(e.g. fast flags) and exact match with
+     * config.
+     * PARTIAL_MATCH_WITH_FLAG: Partial match with config and exact match with flags.
+     * EXACT_MATCH: Both config and flags are exactly matched.
+     */
     enum CompatibilityScore{
         NO_MATCH = 0,
         PARTIAL_MATCH = 1,
-        PARTIAL_MATCH_WITH_FLAG = 2,
-        EXACT_MATCH = 3
+        PARTIAL_MATCH_WITH_CONFIG = 2,
+        PARTIAL_MATCH_WITH_FLAG = 3,
+        EXACT_MATCH = 4
     };
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
index 723887d..a7a39d9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -272,6 +272,17 @@
     mSource = kAidlConfigSource;
     if (aidl.engineConfig.capSpecificConfig.has_value()) {
         setEngineLibraryNameSuffix(kCapEngineLibraryNameSuffix);
+#ifdef ENABLE_CAP_AIDL_HYBRID_MODE
+        // Using AIDL Audio HAL to get policy configuration and relying on vendor xml configuration
+        // file for CAP engine.
+#ifndef DISABLE_CAP_AIDL
+        if (!aidl.engineConfig.capSpecificConfig.value().domains.has_value()) {
+#endif
+            mSource = kHybridAidlConfigSource;
+#ifndef DISABLE_CAP_AIDL
+        }
+#endif
+#endif
     }
     // No need to augmentData() as AIDL HAL must provide correct mic addresses.
     return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index bc9eb20..78e0582 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -77,7 +77,11 @@
             }
             result = EXACT_MATCH;
         } else if (checkExactAudioProfile(&config) == NO_ERROR) {
-            result = EXACT_MATCH;
+            if (flagsCompatibleScore == EXACT_MATCH) {
+                result = EXACT_MATCH;
+            } else {
+                result = PARTIAL_MATCH_WITH_CONFIG;
+            }
         } else if (checkCompatibleAudioProfile(
                 myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
             if (flagsCompatibleScore == EXACT_MATCH) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index cd54626..60c1c6e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -254,7 +254,7 @@
         }
     }
     ALOGV("%s Port[nm:%s] profile rate=%d, format=%d, channels=%d", __FUNCTION__,
-            asAudioPort()->getName().c_str(), samplingRate, channelMask, format);
+            asAudioPort()->getName().c_str(), samplingRate, format, channelMask);
 }
 
 status_t PolicyAudioPort::checkAudioProfile(
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 4445b66..4c2001f 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -178,6 +178,8 @@
 
     void updateDeviceSelectionCache() override;
 
+    engineConfig::ParsingResult parseAndSetDefaultConfiguration();
+
 protected:
     DeviceVector getPreferredAvailableDevicesForProductStrategy(
         const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
@@ -235,6 +237,9 @@
      */
     virtual DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const = 0;
 
+    sp<DeviceDescriptor> getInputDeviceForEchoRef(const audio_attributes_t &attr,
+            const DeviceVector &availableInputDevices) const;
+
     DeviceStrategyMap mDevicesForStrategies;
 };
 
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 0799399..b0279b9 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -125,6 +125,18 @@
     return "";
 }
 
+engineConfig::ParsingResult EngineBase::parseAndSetDefaultConfiguration() {
+    mProductStrategies.clear();
+    mVolumeGroups.clear();
+    engineConfig::Config config = gDefaultEngineConfig;
+    android::status_t ret = engineConfig::parseLegacyVolumes(config.volumeGroups);
+    if (ret != NO_ERROR) {
+        ALOGD("%s: No legacy volume group found, using default music group", __FUNCTION__);
+        config.volumeGroups = gDefaultVolumeGroups;
+    }
+    return processParsingResult({std::make_unique<engineConfig::Config>(config), 1});
+}
+
 engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
         const media::audio::common::AudioHalEngineConfig& aidlConfig, bool)
 {
@@ -154,10 +166,7 @@
             engineConfig::parse(filePath.c_str(), isConfigurable) : engineConfig::ParsingResult{};
     if (result.parsedConfig == nullptr) {
         ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
-        engineConfig::Config config = gDefaultEngineConfig;
-        android::status_t ret = engineConfig::parseLegacyVolumes(config.volumeGroups);
-        result = {std::make_unique<engineConfig::Config>(config),
-                  static_cast<size_t>(ret == NO_ERROR ? 0 : 1)};
+        return parseAndSetDefaultConfiguration();
     } else {
         // Append for internal use only volume groups (e.g. rerouting/patch)
         result.parsedConfig->volumeGroups.insert(
@@ -798,6 +807,41 @@
     return disabledDevices;
 }
 
+sp<DeviceDescriptor> EngineBase::getInputDeviceForEchoRef(const audio_attributes_t &attr,
+            const DeviceVector &availableInputDevices) const
+{
+    // get the first input device whose address matches a tag
+
+    std::string tags { attr.tags }; // tags separate by ';'
+    std::size_t posBegin = 0; // first index of current tag, inclusive
+    std::size_t posEnd; // last index of current tag, exclusive
+
+    while (posBegin < tags.size()) {
+        // ';' is used as the delimiter of tags
+        // find the first delimiter after posBegin
+        posEnd = tags.find(';', posBegin);
+
+        std::string tag;
+
+        if (posEnd == std::string::npos) { // no more delimiter found
+            tag = tags.substr(posBegin); // last tag
+        } else {
+            // get next tag
+            tag = tags.substr(posBegin, posEnd - posBegin);
+        }
+        // get the input device whose address matches the tag
+        sp<DeviceDescriptor> device = availableInputDevices.getDevice(
+                AUDIO_DEVICE_IN_ECHO_REFERENCE, String8(tag.c_str()), AUDIO_FORMAT_DEFAULT);
+        if (device != nullptr) {
+            return device;
+        }
+
+        // update posBegin for next tag
+        posBegin = posEnd + 1;
+    }
+    return nullptr;
+}
+
 void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
 {
     dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 229c5e2..4933b34 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -185,11 +185,23 @@
     }
 };
 
+const engineConfig::VolumeGroups gDefaultVolumeGroups = {
+    {"AUDIO_STREAM_MUSIC", 0, 15,
+     {
+        {"DEVICE_CATEGORY_SPEAKER", {{0,0}, {100, 15}}},
+        {"DEVICE_CATEGORY_HEADSET", {{0,0}, {100, 15}}},
+        {"DEVICE_CATEGORY_EARPIECE", {{0,0}, {100, 15}}},
+        {"DEVICE_CATEGORY_EXT_MEDIA", {{0,0}, {100, 15}}},
+        {"DEVICE_CATEGORY_HEARING_AID", {{0,0}, {100, 15}}},
+     }
+    },
+};
+
 const engineConfig::Config gDefaultEngineConfig = {
     1.0,
     gOrderedStrategies,
     {},
     {},
-    {}
+    {},
 };
 } // namespace android
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 8ed7403..bc5db83 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -309,6 +309,7 @@
 
 void ProductStrategyMap::initialize()
 {
+    mDefaultStrategy = PRODUCT_STRATEGY_NONE;
     mDefaultStrategy = getDefault();
     ALOG_ASSERT(mDefaultStrategy != PRODUCT_STRATEGY_NONE, "No default product strategy found");
 }
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index 70461ad..6d5e15b 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -192,6 +192,7 @@
      */
     virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
             const audio_attributes_t &attr,
+            bool ignorePreferredDevice = true,
             uid_t uid = 0,
             audio_session_t session = AUDIO_SESSION_NONE,
             sp<AudioPolicyMix> *mix = nullptr) const = 0;
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index 1c98faf..5da2689 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -24,10 +24,14 @@
         "src/Stream.cpp",
     ],
     cflags: [
+        "-DENABLE_CAP_AIDL_HYBRID_MODE",
         "-Wall",
         "-Werror",
         "-Wextra",
-    ],
+    ] + select(release_flag("RELEASE_HARDWARE_AUDIO_USE_CAP_AIDL"), {
+        true: [],
+        default: ["-DDISABLE_CAP_AIDL"],
+    }),
     local_include_dirs: ["include"],
     header_libs: [
         "libaudiopolicycommon",
@@ -42,6 +46,7 @@
         "libaudiopolicyengineconfigurable_pfwwrapper",
     ],
     shared_libs: [
+        "com.android.media.audioserver-aconfig-cc",
         "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
         "libaudiopolicy",
@@ -59,10 +64,16 @@
         "latest_android_media_audio_common_types_cpp_shared",
     ],
     required: [
-        "CapClass.xml",
-        "CapProductStrategies.xml",
-        "CapSubsystem-CommonTypes.xml",
-        "CapSubsystem.xml",
-        "ParameterFrameworkConfigurationCap.xml",
-    ],
+    ] + select(release_flag("RELEASE_HARDWARE_AUDIO_USE_CAP_AIDL"), {
+        true: [
+            "CapClass.xml",
+            "CapProductStrategies.xml",
+            "CapSubsystem-CommonTypes.xml",
+            "CapSubsystem.xml",
+            "ParameterFrameworkConfigurationCap.xml",
+        ],
+        default: [
+            // empty, provisionned by the vendor
+        ],
+    }),
 }
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_product_strategies.xml
index f598cf2..6b597bf 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_product_strategies.xml
@@ -30,11 +30,11 @@
     ( type == 1 ) )
     -->
 
-    <ProductStrategy name="oem_traffic_anouncement">
-        <AttributesGroup volumeGroup="oem_traffic_anouncement">
+    <ProductStrategy name="oem_traffic_announcement">
+        <AttributesGroup volumeGroup="oem_traffic_announcement">
             <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
             <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/>
-            <!--  traffic_annoucement = 1 -->
+            <!--  traffic_announcement = 1 -->
             <Bundle key="oem" value="1"/>
         </AttributesGroup>
     </ProductStrategy>
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_volumes.xml b/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_volumes.xml
index 97a25a8..288f926 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_volumes.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/audio_policy_engine_volumes.xml
@@ -23,7 +23,7 @@
 
 <volumeGroups>
     <volumeGroup>
-        <name>oem_traffic_anouncement</name>
+        <name>oem_traffic_announcement</name>
         <indexMin>0</indexMin>
         <indexMax>40</indexMax>
         <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_product_strategies.xml
index f598cf2..c658638 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_product_strategies.xml
@@ -17,7 +17,7 @@
 <ProductStrategies>
     <!-- OEM Usages -->
     <!-- product_strategy will be defined according this order
-    product_strategy is oem_traffic_anouncement if all the conditions are satisfied for
+    product_strategy is oem_traffic_announcement if all the conditions are satisfied for
     AudioAttributes aa
 
     int type = 0;
@@ -30,8 +30,8 @@
     ( type == 1 ) )
     -->
 
-    <ProductStrategy name="oem_traffic_anouncement">
-        <AttributesGroup volumeGroup="oem_traffic_anouncement">
+    <ProductStrategy name="oem_traffic_announcement">
+        <AttributesGroup volumeGroup="oem_traffic_announcement">
             <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
             <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/>
             <!--  traffic_annoucement = 1 -->
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_volumes.xml b/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_volumes.xml
index 97a25a8..288f926 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_volumes.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/audio_policy_engine_volumes.xml
@@ -23,7 +23,7 @@
 
 <volumeGroups>
     <volumeGroup>
-        <name>oem_traffic_anouncement</name>
+        <name>oem_traffic_announcement</name>
         <indexMin>0</indexMin>
         <indexMax>40</indexMax>
         <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
diff --git a/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types.xml.in b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types.xml.in
index e134c42..098d678 100644
--- a/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types.xml.in
+++ b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types.xml.in
@@ -88,6 +88,7 @@
             <value literal="ForceNone" numerical="0"/>
             <value literal="ForceEncodedSurroundNever" numerical="13"/>
             <value literal="ForceEncodedSurroundAlways" numerical="14"/>
+            <value literal="ForceEncodedSurroundManual" numerical="15"/>
         </values>
     </criterion_type>
     <criterion_type name="ForceUseForVibrateRingingType" type="exclusive">
diff --git a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
index b89fba0..0f753d3 100644
--- a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
+++ b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
@@ -61,6 +61,7 @@
 static constexpr const char *gLegacyStreamPrefix = "AUDIO_STREAM_";
 static constexpr const char *gLegacySourcePrefix = "AUDIO_SOURCE_";
 static constexpr const char *gPolicyParamPrefix = "/Policy/policy/";
+static constexpr const char *gVendorStrategyPrefix = "vx_";
 
 namespace {
 
@@ -316,11 +317,12 @@
     }
     rule += "{";
     if (!aidlRule.nestedRules.empty()) {
-        for (const auto& nestedRule: aidlRule.nestedRules) {
-            rule += VALUE_OR_FATAL(aidl2legacy_AudioHalCapRule_CapRule(nestedRule));
-        }
-        if (!aidlRule.criterionRules.empty()) {
-            rule += ",";
+        for (auto ruleIter = aidlRule.nestedRules.begin(); ruleIter != aidlRule.nestedRules.end();
+                ++ruleIter) {
+            rule += VALUE_OR_FATAL(aidl2legacy_AudioHalCapRule_CapRule(*ruleIter));
+            if (ruleIter != (aidlRule.nestedRules.end()  - 1) || !aidlRule.criterionRules.empty()) {
+                rule += ",";
+            }
         }
     }
     bool isFirstCriterionRule = true;
@@ -366,6 +368,21 @@
     return legacy;
 }
 
+ConversionResult<std::string> aidl2legacy_AudioHalProductStrategyId_StrategyParamName(
+        int id) {
+    std::string strategyName;
+    if (id < media::audio::common::AudioHalProductStrategy::VENDOR_STRATEGY_ID_START) {
+        strategyName = legacy_strategy_to_string(static_cast<legacy_strategy>(id));
+        if (strategyName.empty()) {
+            ALOGE("%s Invalid legacy strategy id %d", __func__, id);
+            return unexpected(BAD_VALUE);
+        }
+    } else {
+        strategyName = gVendorStrategyPrefix + std::to_string(id);
+    }
+    return strategyName;
+}
+
 ConversionResult<ConfigurableElementValue> aidl2legacy_ParameterSetting_ConfigurableElementValue(
         const AudioHalCapParameter& aidl) {
     ConfigurableElementValue legacy;
@@ -387,14 +404,18 @@
                 deviceLiteral = "stub";
             }
             legacy.configurableElement.path = std::string(gPolicyParamPrefix)
-                    + "product_strategies/vx_" + std::to_string(strategyDevice.id)
+                    + "product_strategies/"
+                    + VALUE_OR_RETURN(aidl2legacy_AudioHalProductStrategyId_StrategyParamName(
+                            strategyDevice.id))
                     + "/selected_output_devices/mask/" + deviceLiteral;
             break;
         }
         case AudioHalCapParameter::strategyDeviceAddress: {
             auto strategyAddress = aidl.get<AudioHalCapParameter::strategyDeviceAddress>();
             legacy.configurableElement.path = std::string(gPolicyParamPrefix)
-                    + "product_strategies/vx_" + std::to_string(strategyAddress.id)
+                    + "product_strategies/"
+                    + VALUE_OR_RETURN(aidl2legacy_AudioHalProductStrategyId_StrategyParamName(
+                            strategyAddress.id))
                     + "/device_address";
             literalValue = strategyAddress.deviceAddress.get<AudioDeviceAddress::id>();
             break;
@@ -534,7 +555,7 @@
             }
             std::string deviceLiteral = VALUE_OR_RETURN_STATUS(truncatePrefix(legacyTypeLiteral,
                     isOut ? gLegacyOutputDevicePrefix : gLegacyInputDevicePrefix));
-            uint64_t pfwCriterionValue = 1 << shift++;
+            uint64_t pfwCriterionValue = 1ULL << (shift++);
             criterionType.valuePairs.push_back(
                     {pfwCriterionValue, static_cast<int32_t>(legacyDeviceType), deviceLiteral});
             ALOGV("%s: adding %" PRIu64 " %d %s %s", __func__, pfwCriterionValue, legacyDeviceType,
diff --git a/services/audiopolicy/engineconfigurable/data/Android.bp b/services/audiopolicy/engineconfigurable/data/Android.bp
index 303cabc..1b5d0bf 100644
--- a/services/audiopolicy/engineconfigurable/data/Android.bp
+++ b/services/audiopolicy/engineconfigurable/data/Android.bp
@@ -67,7 +67,7 @@
 
 genrule {
     name: "buildaidlcommontypesstructure_gen",
-    defaults: ["buildcommontypesstructurerule"],
+    defaults: ["capBuildcommontypesstructurerule"],
     out: ["CapSubsystem-CommonTypes.xml"],
 }
 
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
index 61f056a..79a95b1 100644
--- a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
@@ -19,6 +19,15 @@
   <!-- This structure is expected to be in the system partition and provisionned a maximum
        allowed strategies to be used by vendor. -->
   <ComponentType Name="ProductStrategies" Description="">
+    <Component Name="STRATEGY_PHONE" Type="ProductStrategy" Mapping="Name:STRATEGY_PHONE"/>
+    <Component Name="STRATEGY_SONIFICATION" Type="ProductStrategy" Mapping="Name:STRATEGY_SONIFICATION"/>
+    <Component Name="STRATEGY_ENFORCED_AUDIBLE" Type="ProductStrategy" Mapping="Name:STRATEGY_ENFORCED_AUDIBLE"/>
+    <Component Name="STRATEGY_ACCESSIBILITY" Type="ProductStrategy" Mapping="Name:STRATEGY_ACCESSIBILITY"/>
+    <Component Name="STRATEGY_SONIFICATION_RESPECTFUL" Type="ProductStrategy" Mapping="Name:STRATEGY_SONIFICATION_RESPECTFUL"/>
+    <Component Name="STRATEGY_MEDIA" Type="ProductStrategy" Mapping="Name:STRATEGY_MEDIA"/>
+    <Component Name="STRATEGY_DTMF" Type="ProductStrategy" Mapping="Name:STRATEGY_DTMF"/>
+    <Component Name="STRATEGY_CALL_ASSISTANT" Type="ProductStrategy" Mapping="Name:STRATEGY_CALL_ASSISTANT"/>
+    <Component Name="STRATEGY_TRANSMITTED_THROUGH_SPEAKER" Type="ProductStrategy" Mapping="Name:STRATEGY_TRANSMITTED_THROUGH_SPEAKER"/>
     <Component Name="vx_1000" Type="ProductStrategy" Mapping="Identifier:1000,Name:vx_1000"/>
     <Component Name="vx_1001" Type="ProductStrategy" Mapping="Identifier:1001,Name:vx_1001"/>
     <Component Name="vx_1002" Type="ProductStrategy" Mapping="Identifier:1002,Name:vx_1002"/>
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Settings/device_for_product_strategies.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Settings/device_for_product_strategies.pfw
index ddae356..e75f5ca 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Settings/device_for_product_strategies.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Settings/device_for_product_strategies.pfw
@@ -1,8 +1,8 @@
 supDomain: DeviceForProductStrategies
-	supDomain: OemTrafficAnouncement
+	supDomain: OemTrafficAnnouncement
 		domain: UnreachableDevices
 			conf: calibration
-				component: /Policy/policy/product_strategies/oem_traffic_anouncement/selected_output_devices/mask
+				component: /Policy/policy/product_strategies/oem_traffic_announcement/selected_output_devices/mask
 					earpiece = 0
 					speaker = 0
 					wired_headset = 0
@@ -30,18 +30,18 @@
 					proxy = 0
 					usb_headset = 0
 					stub = 0
-				/Policy/policy/product_strategies/oem_traffic_anouncement/device_address = BUS08_OEM1
+				/Policy/policy/product_strategies/oem_traffic_announcement/device_address = BUS08_OEM1
 
 		domain: SelectedDevice
 			conf: Bus
 				AvailableOutputDevices Includes Bus
 				AvailableOutputDevicesAddresses Includes BUS08_OEM1
 
-				component: /Policy/policy/product_strategies/oem_traffic_anouncement/selected_output_devices/mask
+				component: /Policy/policy/product_strategies/oem_traffic_announcement/selected_output_devices/mask
 					bus = 1
 
 			conf: Default
-				component: /Policy/policy/product_strategies/oem_traffic_anouncement/selected_output_devices/mask
+				component: /Policy/policy/product_strategies/oem_traffic_announcement/selected_output_devices/mask
 					bus = 0
 
 	supDomain: OemStrategy1
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Settings/device_for_product_strategies.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Settings/device_for_product_strategies.pfw
index cc778df..430c944 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Settings/device_for_product_strategies.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Settings/device_for_product_strategies.pfw
@@ -1,8 +1,8 @@
 supDomain: DeviceForProductStrategies
-	supDomain: OemTrafficAnouncement
+	supDomain: OemTrafficAnnouncement
 		domain: UnreachableDevices
 			conf: calibration
-				component: /Policy/policy/product_strategies/oem_traffic_anouncement/selected_output_devices/mask
+				component: /Policy/policy/product_strategies/oem_traffic_announcement/selected_output_devices/mask
 					earpiece = 0
 					speaker = 0
 					wired_headset = 0
@@ -30,18 +30,18 @@
 					proxy = 0
 					usb_headset = 0
 					stub = 0
-				/Policy/policy/product_strategies/oem_traffic_anouncement/device_address = BUS00_MEDIA
+				/Policy/policy/product_strategies/oem_traffic_announcement/device_address = BUS00_MEDIA
 
 		domain: SelectedDevice
 			conf: Bus
 				AvailableOutputDevices Includes Bus
 				AvailableOutputDevicesAddresses Includes BUS00_MEDIA
 
-				component: /Policy/policy/product_strategies/oem_traffic_anouncement/selected_output_devices/mask
+				component: /Policy/policy/product_strategies/oem_traffic_announcement/selected_output_devices/mask
 					bus = 1
 
 			conf: Default
-				component: /Policy/policy/product_strategies/oem_traffic_anouncement/selected_output_devices/mask
+				component: /Policy/policy/product_strategies/oem_traffic_announcement/selected_output_devices/mask
 					bus = 0
 
 	supDomain: OemStrategy1
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
index bf5767d..8bd7f66 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
@@ -72,7 +72,7 @@
         );
     addSubsystemObjectFactory(
         new TSubsystemObjectFactory<ProductStrategy>(
-            mProductStrategyComponentName, (1 << MappingKeyName) | (1 << MappingKeyIdentifier))
+            mProductStrategyComponentName, (1 << MappingKeyName))
         );
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
index 06efbf28..866998e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
@@ -29,9 +29,6 @@
                                 (MappingKeyAmendEnd - MappingKeyAmend1 + 1),
                                 context) {
 
-    size_t id = context.getItemAsInteger(MappingKeyIdentifier);
-    std::string nameFromStructure(context.getItem(MappingKeyName));
-
     ALOG_ASSERT(instanceConfigurableElement != nullptr, "Invalid Configurable Element");
     mPolicySubsystem = static_cast<const PolicySubsystem *>(
             instanceConfigurableElement->getBelongingSubsystem());
@@ -40,14 +37,22 @@
     mPolicyPluginInterface = mPolicySubsystem->getPolicyPluginInterface();
     ALOG_ASSERT(mPolicyPluginInterface != nullptr, "Invalid Policy Plugin Interface");
 
-    mId = static_cast<android::product_strategy_t>(id);
-    std::string name = mPolicyPluginInterface->getProductStrategyName(mId);
-    if (name.empty()) {
+    const std::string nameFromStructure(context.getItem(MappingKeyName));
+    std::string name;
+    if (context.iSet(MappingKeyIdentifier)) {
+        size_t id = context.getItemAsInteger(MappingKeyIdentifier);
+        mId = static_cast<android::product_strategy_t>(id);
+        name = mPolicyPluginInterface->getProductStrategyName(mId);
+        if (name.empty()) {
+            name = nameFromStructure;
+            mId = mPolicyPluginInterface->getProductStrategyByName(name);
+        }
+    } else {
         name = nameFromStructure;
-        mId = mPolicyPluginInterface->getProductStrategyByName(name);
+        mId = mPolicyPluginInterface->getProductStrategyByName(nameFromStructure);
     }
 
-    ALOG_ASSERT(mId != PRODUCT_STRATEGY_INVALID, "Product Strategy %s not found", name.c_str());
+    ALOG_ASSERT(mId != PRODUCT_STRATEGY_NONE, "Product Strategy %s not found", name.c_str());
 
     ALOGE("Product Strategy %s added", name.c_str());
 }
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index ad49b19..4192714 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -31,7 +31,7 @@
 #include <EngineConfig.h>
 #include <policy.h>
 #include <AudioIODescriptorInterface.h>
-#include <ParameterManagerWrapper.h>
+#include <com_android_media_audioserver.h>
 #include <media/AudioContainers.h>
 
 #include <media/TypeConverter.h>
@@ -66,13 +66,20 @@
     return mInputSourceCollection;
 }
 
-Engine::Engine() : mPolicyParameterMgr(new ParameterManagerWrapper())
-{
-}
-
 status_t Engine::loadFromHalConfigWithFallback(
         const media::audio::common::AudioHalEngineConfig& aidlConfig) {
-
+#ifdef DISABLE_CAP_AIDL
+    (void) aidlConfig;
+    ALOGE("%s CapEngine Config disabled, falling back on vendor XML for engine", __func__);
+    return loadFromXmlConfigWithFallback(engineConfig::DEFAULT_PATH);
+#else
+#ifdef ENABLE_CAP_AIDL_HYBRID_MODE
+    if (!aidlConfig.capSpecificConfig.value().domains.has_value()) {
+        ALOGE("%s CapEngine Config missing, falling back on vendor XML for engine", __func__);
+        return loadFromXmlConfigWithFallback(engineConfig::DEFAULT_PATH);
+    }
+#endif
+    mPolicyParameterMgr = std::make_unique<ParameterManagerWrapper>();
     auto capResult = capEngineConfig::convert(aidlConfig);
     if (capResult.parsedConfig == nullptr) {
         ALOGE("%s CapEngine Config invalid", __func__);
@@ -92,20 +99,24 @@
     };
     loadCriteria(capResult.parsedConfig->capCriteria);
     std::string error;
-    if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
-        ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
-        return NO_INIT;
+    if (mPolicyParameterMgr->start(error) != NO_ERROR) {
+        ALOGE("%s: could not start Policy PFW: %s, fallback on default", __func__ , error.c_str());
+        auto result = parseAndSetDefaultConfiguration();
+        return result.nbSkippedElement == 0 ? NO_ERROR : BAD_VALUE;
     }
     return mPolicyParameterMgr->setConfiguration(capResult);
+#endif
 }
 
 status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
 {
+    mPolicyParameterMgr = std::make_unique<ParameterManagerWrapper>(true /*useLegacyVendorFile*/);
     status_t status = loadWithFallback(xmlFilePath);
     std::string error;
-    if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
-        ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
-        return NO_INIT;
+    if (mPolicyParameterMgr->start(error) != NO_ERROR) {
+        ALOGE("%s: could not start Policy PFW: %s, fallback on default", __func__ , error.c_str());
+        auto result = parseAndSetDefaultConfiguration();
+        return result.nbSkippedElement == 0 ? NO_ERROR : BAD_VALUE;
     }
     return status;
 }
@@ -191,6 +202,10 @@
 
 status_t Engine::setPhoneState(audio_mode_t mode)
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     status_t status = mPolicyParameterMgr->setPhoneState(mode);
     if (status != NO_ERROR) {
         return status;
@@ -200,12 +215,20 @@
 
 audio_mode_t Engine::getPhoneState() const
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return AUDIO_MODE_NORMAL;
+    }
     return mPolicyParameterMgr->getPhoneState();
 }
 
 status_t Engine::setForceUse(audio_policy_force_use_t usage,
                                       audio_policy_forced_cfg_t config)
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     status_t status = mPolicyParameterMgr->setForceUse(usage, config);
     if (status != NO_ERROR) {
         return status;
@@ -215,12 +238,20 @@
 
 audio_policy_forced_cfg_t Engine::getForceUse(audio_policy_force_use_t usage) const
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return AUDIO_POLICY_FORCE_NONE;
+    }
     return mPolicyParameterMgr->getForceUse(usage);
 }
 
 status_t Engine::setOutputDevicesConnectionState(const DeviceVector &devices,
                                                  audio_policy_dev_state_t state)
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     for (const auto &device : devices) {
         mPolicyParameterMgr->setDeviceConnectionState(device->type(), device->address(), state);
     }
@@ -236,6 +267,10 @@
 status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
                                           audio_policy_dev_state_t state)
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     mPolicyParameterMgr->setDeviceConnectionState(device->type(), device->address(), state);
     if (audio_is_output_device(device->type())) {
         return mPolicyParameterMgr->setAvailableOutputDevices(
@@ -405,8 +440,9 @@
         auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
         ALOG_ASSERT(defaultDevice != nullptr, "no valid default device defined");
         selectedDevices = DeviceVector(defaultDevice);
-    } else if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
-            deviceTypes, AUDIO_DEVICE_OUT_BUS)) {
+    } else if (/*device_distinguishes_on_address(*deviceTypes.begin())*/
+            isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_BUS) ||
+            isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_SPEAKER)) {
         // We do expect only one device for these types of devices
         // Criterion device address garantee this one is available
         // If this criterion is not wished, need to ensure this device is available
@@ -469,6 +505,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         bool ignorePreferredDevice,
                                                          uid_t uid,
                                                          audio_session_t session,
                                                          sp<AudioPolicyMix> *mix) const
@@ -483,10 +520,13 @@
     //
     // Honor explicit routing requests only if all active clients have a preferred route in which
     // case the last active client route is used
-    sp<DeviceDescriptor> device =
-            findPreferredDevice(inputs, attr.source, availableInputDevices);
-    if (device != nullptr) {
-        return device;
+    sp<DeviceDescriptor> device;
+    if (!com::android::media::audioserver::conditionally_ignore_preferred_input_device()
+            || !ignorePreferredDevice) {
+        device = findPreferredDevice(inputs, attr.source, availableInputDevices);
+        if (device != nullptr) {
+            return device;
+        }
     }
 
     device = policyMixes.getDeviceAndMixForInputSource(attr,
@@ -500,6 +540,14 @@
 
     audio_devices_t deviceType = getPropertyForKey<audio_devices_t, audio_source_t>(attr.source);
 
+    if (deviceType == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
+        device = getInputDeviceForEchoRef(attr, availableInputDevices);
+        if (device != nullptr) {
+            return device;
+        }
+    }
+
+
     if (audio_is_remote_submix_device(deviceType)) {
         address = "0";
         std::size_t pos;
@@ -524,6 +572,10 @@
 
 bool Engine::setDeviceTypesForProductStrategy(product_strategy_t strategy, uint64_t devices)
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return false;
+    }
     if (getProductStrategies().find(strategy) == getProductStrategies().end()) {
         ALOGE("%s: set device %" PRId64 " on invalid strategy %d", __FUNCTION__, devices, strategy);
         return false;
@@ -537,6 +589,10 @@
 
 bool Engine::setDeviceForInputSource(const audio_source_t &inputSource, uint64_t device)
 {
+    if (mPolicyParameterMgr == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return false;
+    }
     DeviceTypeSet types = mPolicyParameterMgr->convertDeviceCriterionValueToDeviceTypes(
                 device, false /*isOut*/);
     ALOG_ASSERT(types.size() <= 1, "one input device expected at most");
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index d9ebbe7..df3c13d 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -16,24 +16,26 @@
 
 #pragma once
 
+#include <memory>
+
 #include "Collection.h"
 #include "EngineBase.h"
 #include <AudioPolicyPluginInterface.h>
 #include <CapEngineConfig.h>
 #include <EngineInterface.h>
+#include <ParameterManagerWrapper.h>
 
 namespace android {
 class AudioPolicyManagerObserver;
 
 namespace audio_policy {
 
-class ParameterManagerWrapper;
 class VolumeProfile;
 
 class Engine : public EngineBase, AudioPolicyPluginInterface
 {
 public:
-    Engine();
+    Engine() = default;
     virtual ~Engine() = default;
 
     template <class RequestedInterface>
@@ -71,6 +73,7 @@
                                            bool fromCache = false) const override;
 
     sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     bool ignorePreferredDevice = true,
                                                      uid_t uid = 0,
                                                      audio_session_t session = AUDIO_SESSION_NONE,
                                                      sp<AudioPolicyMix> *mix = nullptr)
@@ -157,7 +160,7 @@
     /**
      * Policy Parameter Manager hidden through a wrapper.
      */
-    ParameterManagerWrapper *mPolicyParameterMgr;
+    std::unique_ptr<ParameterManagerWrapper> mPolicyParameterMgr;
 };
 
 } // namespace audio_policy
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 7ae124c..f35829b 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -171,6 +171,30 @@
 // Tools for policy parameter-framework common type structure file generation
 //
 python_binary_host {
+    name: "capBuildCommonTypesStructureFile",
+    main: "capBuildCommonTypesStructureFile.py",
+    srcs: [
+        "capBuildCommonTypesStructureFile.py",
+    ],
+}
+
+genrule_defaults {
+    name: "capBuildcommontypesstructurerule",
+    tools: ["capBuildCommonTypesStructureFile"],
+    cmd: "$(location capBuildCommonTypesStructureFile) " +
+        "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        "--commontypesstructure $(location :common_types_structure_template) " +
+        "--outputfile $(out)",
+    srcs: [
+        ":common_types_structure_template",
+        ":libaudio_system_audio_base",
+    ],
+}
+
+//##################################################################################################
+// Legacy tools for policy parameter-framework common type structure file generation
+//
+python_binary_host {
     name: "buildCommonTypesStructureFile",
     main: "buildCommonTypesStructureFile.py",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/tools/capBuildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/capBuildCommonTypesStructureFile.py
new file mode 100755
index 0000000..c883ac1
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/tools/capBuildCommonTypesStructureFile.py
@@ -0,0 +1,214 @@
+#!/usr/bin/python3
+
+#
+# Copyright 2025, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import argparse
+import re
+import sys
+import os
+import logging
+import xml.etree.ElementTree as ET
+from collections import OrderedDict
+import xml.dom.minidom as MINIDOM
+
+def parseArgs():
+    argparser = argparse.ArgumentParser(description="Parameter-Framework XML \
+        structure file generator.\n\
+        Exit with the number of (recoverable or not) error that occurred.")
+    argparser.add_argument('--androidaudiobaseheader',
+                           help="Android Audio Base C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--commontypesstructure',
+                           help="Structure XML base file. Mandatory.",
+                           metavar="STRUCTURE_FILE_IN",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--outputfile',
+                           help="Structure XML file. Mandatory.",
+                           metavar="STRUCTURE_FILE_OUT",
+                           type=argparse.FileType('w'),
+                           required=True)
+    argparser.add_argument('--verbose',
+                           action='store_true')
+
+    return argparser.parse_args()
+
+
+def findBitPos(decimal):
+    pos = 0
+    i = 1
+    while i < decimal:
+        i = i << 1
+        pos = pos + 1
+        if pos == 64:
+            return -1
+
+    # TODO: b/168065706. This is just to fix the build. That the problem of devices with
+    # multiple bits set must be addressed more generally in the configurable audio policy
+    # and parameter framework.
+    if i > decimal:
+        logging.info("Device:{} which has multiple bits set is skipped. b/168065706".format(decimal))
+        return -2
+    return pos
+
+def generateXmlStructureFile(componentTypeDict, structureTypesFile, outputFile):
+
+    logging.info("Importing structureTypesFile {}".format(structureTypesFile))
+    component_types_in_tree = ET.parse(structureTypesFile)
+
+    component_types_root = component_types_in_tree.getroot()
+
+    for component_types_name, values_dict in componentTypeDict.items():
+        for component_type in component_types_root.findall('ComponentType'):
+            if component_type.get('Name') == component_types_name:
+                bitparameters_node = component_type.find("BitParameterBlock")
+                if bitparameters_node is not None:
+                    ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
+                    for key, value in ordered_values.items():
+                        pos = findBitPos(value)
+                        if pos >= 0:
+                            value_node = ET.SubElement(bitparameters_node, "BitParameter")
+                            value_node.set('Name', key)
+                            value_node.set('Size', "1")
+                            value_node.set('Pos', str(pos))
+
+                enum_parameter_node = component_type.find("EnumParameter")
+                if enum_parameter_node is not None:
+                    ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
+                    for key, value in ordered_values.items():
+                        value_node = ET.SubElement(enum_parameter_node, "ValuePair")
+                        value_node.set('Literal', key)
+                        value_node.set('Numerical', str(value))
+
+    xmlstr = ET.tostring(component_types_root, encoding='utf8', method='xml')
+    reparsed = MINIDOM.parseString(xmlstr)
+    prettyXmlStr = reparsed.toprettyxml(indent="    ", newl='\n')
+    prettyXmlStr = os.linesep.join([s for s in prettyXmlStr.splitlines() if s.strip()])
+    outputFile.write(prettyXmlStr)
+
+
+def capitalizeLine(line):
+    return ' '.join((w.capitalize() for w in line.split(' ')))
+
+def parseAndroidAudioFile(androidaudiobaseheaderFile):
+    #
+    # Adaptation table between Android Enumeration prefix and Audio PFW Criterion type names
+    #
+    component_type_mapping_table = {
+        'AUDIO_STREAM' : "VolumeProfileType",
+        'AUDIO_DEVICE_OUT' : "OutputDevicesMask",
+        'AUDIO_DEVICE_IN' : "InputDevicesMask"}
+
+    all_component_types = {
+        'VolumeProfileType' : {},
+        'OutputDevicesMask' : {},
+        'InputDevicesMask' : {}
+    }
+
+    #
+    # _CNT, _MAX, _ALL and _NONE are prohibited values as ther are just helpers for enum users.
+    #
+    ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
+
+    criteria_pattern = re.compile(
+        r"\s*V\((?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])[0-9a-fA-F]+|[0-9]+)")
+
+    logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
+
+    multi_bit_output_device_shift = 32
+    input_device_shift = 0
+
+    for line_number, line in enumerate(androidaudiobaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiobaseheaderFile.name, line_number, line))
+
+            component_type_name = component_type_mapping_table[match.groupdict()['type']]
+            component_type_literal = match.groupdict()['literal'].lower()
+
+            component_type_numerical_value = match.groupdict()['values']
+
+            # for AUDIO_DEVICE_IN: rename default to stub
+            if component_type_name == "InputDevicesMask":
+                component_type_numerical_value = str(int(component_type_numerical_value, 0))
+                if component_type_literal == "default":
+                    component_type_literal = "stub"
+
+                # Remove ambient and in_communication since they were deprecated
+                if component_type_literal == "ambient" or component_type_literal == "communication":
+                    logging.info("Remove deprecated device {}".format(component_type_literal))
+                    continue
+
+                component_type_numerical_value = str(2**input_device_shift)
+                input_device_shift += 1
+
+            if component_type_name == "OutputDevicesMask":
+                if component_type_literal == "default":
+                    component_type_literal = "stub"
+
+                string_int = int(component_type_numerical_value, 0)
+                num_bits = bin(string_int).count("1")
+                if num_bits > 1:
+                    logging.info("The value {} is for criterion {} binary rep {} has {} bits sets"
+                        .format(component_type_numerical_value, component_type_name, bin(string_int), num_bits))
+                    string_int = 2**multi_bit_output_device_shift
+                    logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                    multi_bit_output_device_shift += 1
+                    component_type_numerical_value = str(string_int)
+
+            # Remove duplicated numerical values
+            if int(component_type_numerical_value, 0) in all_component_types[component_type_name].values():
+                logging.info("The value {}:{} is duplicated for criterion {}, KEEPING LATEST".format(component_type_numerical_value, component_type_literal, component_type_name))
+                for key in list(all_component_types[component_type_name]):
+                    if all_component_types[component_type_name][key] == int(component_type_numerical_value, 0):
+                        del all_component_types[component_type_name][key]
+
+            all_component_types[component_type_name][component_type_literal] = int(component_type_numerical_value, 0)
+
+            logging.debug("type:{}, literal:{}, values:{}.".format(component_type_name, component_type_literal, component_type_numerical_value))
+
+    if "stub" not in all_component_types["OutputDevicesMask"]:
+        all_component_types["OutputDevicesMask"]["stub"] = 0x40000000
+        logging.info("added stub output device mask")
+    if "stub" not in all_component_types["InputDevicesMask"]:
+        all_component_types["InputDevicesMask"]["stub"] = 0x40000000
+        logging.info("added stub input device mask")
+
+    # Transform input source in inclusive criterion
+    for component_types in all_component_types:
+        values = ','.join('{}:{}'.format(value, key) for key, value in all_component_types[component_types].items())
+        logging.info("{}: <{}>".format(component_types, values))
+
+    return all_component_types
+
+
+def main():
+    logging.root.setLevel(logging.INFO)
+    args = parseArgs()
+    route_criteria = 0
+
+    all_component_types = parseAndroidAudioFile(args.androidaudiobaseheader)
+
+    generateXmlStructureFile(all_component_types, args.commontypesstructure, args.outputfile)
+
+# If this file is directly executed
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
index 1adc602..eab3659 100755
--- a/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
+++ b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
@@ -237,15 +237,6 @@
                         # Append AUDIO_DEVICE_IN for android type tag
                         input_devices_type_value[criterion_literal] = hex(string_int | 2147483648)
 
-                        num_bits = bin(string_int).count("1")
-                        if num_bits > 1:
-                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
-                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
-                            string_int = 2**multi_bit_inputdevice_shift
-                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
-                            multi_bit_inputdevice_shift += 1
-                            criterion_numerical_value = str(string_int)
-
                     except ValueError:
                         # Handle the exception
                         logging.info("value {}:{} for criterion {} is not a number, ignoring"
@@ -261,15 +252,6 @@
                         string_int = int(criterion_numerical_value, 0)
                         output_devices_type_value[criterion_literal] = criterion_numerical_value
 
-                        num_bits = bin(string_int).count("1")
-                        if num_bits > 1:
-                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
-                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
-                            string_int = 2**multi_bit_outputdevice_shift
-                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
-                            multi_bit_outputdevice_shift += 1
-                            criterion_numerical_value = str(string_int)
-
                     except ValueError:
                         # Handle the exception
                         logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 506b19b..f15233e 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -13,10 +13,14 @@
     export_include_dirs: ["include"],
     srcs: ["ParameterManagerWrapper.cpp"],
     cflags: [
+        "-DENABLE_CAP_AIDL_HYBRID_MODE",
         "-Wall",
         "-Werror",
         "-Wextra",
-    ],
+    ] + select(release_flag("RELEASE_HARDWARE_AUDIO_USE_CAP_AIDL"), {
+        true: [],
+        default: ["-DDISABLE_CAP_AIDL"],
+    }),
     header_libs: [
         "libaudiofoundation_headers",
         "libaudiopolicycapengine_config_headers",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 0bcde8d..2fd4f19 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -63,27 +63,39 @@
 
 namespace audio_policy {
 
-const char *const ParameterManagerWrapper::mPolicyPfwDefaultConfFileName =
-    "/etc/parameter-framework/ParameterFrameworkConfigurationCap.xml";
-const char *const ParameterManagerWrapper::mPolicyPfwVendorConfFileName =
+#ifdef ENABLE_CAP_AIDL_HYBRID_MODE
+// Legacy XML from vendor partition used when disabling AIDL CAP configuration (HIDL or Hybrid)
+const char *const ParameterManagerWrapper::mVendorPolicyPfwConfFileName =
     "/vendor/etc/parameter-framework/ParameterFrameworkConfigurationPolicy.xml";
+#endif
+const char *const ParameterManagerWrapper::mPolicyPfwConfFileName =
+    "/etc/parameter-framework/ParameterFrameworkConfigurationCap.xml";
 
 template <>
 struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionInterface> {};
 template <>
 struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionTypeInterface> {};
 
-ParameterManagerWrapper::ParameterManagerWrapper(bool enableSchemaVerification,
-                                                 const std::string &schemaUri)
+ParameterManagerWrapper::ParameterManagerWrapper(bool useLegacyConfigurationFile,
+        bool enableSchemaVerification, const std::string &schemaUri)
     : mPfwConnectorLogger(new ParameterMgrPlatformConnectorLogger)
 {
+    std::string policyPfwConfFileName;
+#ifdef ENABLE_CAP_AIDL_HYBRID_MODE
     // Connector
-    if (access(mPolicyPfwVendorConfFileName, R_OK) == 0) {
-        mPfwConnector = new CParameterMgrFullConnector(mPolicyPfwVendorConfFileName);
-    } else {
-        mPfwConnector = new CParameterMgrFullConnector(mPolicyPfwDefaultConfFileName);
+    if (useLegacyConfigurationFile && access(mVendorPolicyPfwConfFileName, R_OK) == 0) {
+        policyPfwConfFileName = mVendorPolicyPfwConfFileName;
     }
-
+#endif
+    if (!useLegacyConfigurationFile && access(mPolicyPfwConfFileName, R_OK) == 0) {
+        policyPfwConfFileName = mPolicyPfwConfFileName;
+    }
+    if (policyPfwConfFileName.empty()) {
+        // bailing out
+        ALOGE("%s: failed to find Cap config file, cannot init Cap.", __func__);
+        return;
+    }
+    mPfwConnector = new CParameterMgrFullConnector(policyPfwConfFileName);
     // Logger
     mPfwConnector->setLogger(mPfwConnectorLogger);
 
@@ -100,7 +112,11 @@
 status_t ParameterManagerWrapper::addCriterion(const std::string &name, bool isInclusive,
                                                ValuePairs pairs, const std::string &defaultValue)
 {
-    ALOG_ASSERT(not isStarted(), "Cannot add a criterion if PFW is already started");
+    if (mPfwConnector == nullptr) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
+    ALOG_ASSERT(not isStarted(), "%s failed since PFW is already started", __func__);
     auto criterionType = mPfwConnector->createSelectionCriterionType(isInclusive);
 
     for (auto pair : pairs) {
@@ -110,13 +126,13 @@
         criterionType->addValuePair(std::get<0>(pair), std::get<2>(pair), error);
 
         if (name == capEngineConfig::gOutputDeviceCriterionName) {
-            ALOGV("%s: Adding mOutputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
+            ALOGV("%s: Adding mOutputDeviceToCriterionTypeMap 0x%X %" PRIu64" for criterionType %s",
                   __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
             audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
             mOutputDeviceToCriterionTypeMap[androidType] = std::get<0>(pair);
         }
         if (name == capEngineConfig::gInputDeviceCriterionName) {
-            ALOGV("%s: Adding mInputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
+            ALOGV("%s: Adding mInputDeviceToCriterionTypeMap 0x%X %" PRIu64" for criterionType %s",
                   __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
             audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
             mInputDeviceToCriterionTypeMap[androidType] = std::get<0>(pair);
@@ -142,7 +158,9 @@
 ParameterManagerWrapper::~ParameterManagerWrapper()
 {
     // Unset logger
-    mPfwConnector->setLogger(NULL);
+    if (mPfwConnector != nullptr) {
+        mPfwConnector->setLogger(NULL);
+    }
     // Remove logger
     delete mPfwConnectorLogger;
     // Remove connector
@@ -153,11 +171,12 @@
 {
     ALOGD("%s: in", __FUNCTION__);
     /// Start PFW
-    if (!mPfwConnector->start(error)) {
-        ALOGE("%s: Policy PFW start error: %s", __FUNCTION__, error.c_str());
+    if (mPfwConnector == nullptr || !mPfwConnector->start(error)) {
+        ALOGE("%s: Policy PFW failed (error:  %s)", __func__,
+              mPfwConnector == nullptr ? "invalid connector" : error.c_str());
         return NO_INIT;
     }
-    ALOGD("%s: Policy PFW successfully started!", __FUNCTION__);
+    ALOGD("%s: Policy PFW succeeded!", __FUNCTION__);
     return NO_ERROR;
 }
 
@@ -179,13 +198,17 @@
     return it != elementsMap.end() ? it->second : NULL;
 }
 
-bool ParameterManagerWrapper::isStarted()
+bool ParameterManagerWrapper::isStarted() const
 {
     return mPfwConnector && mPfwConnector->isStarted();
 }
 
 status_t ParameterManagerWrapper::setPhoneState(audio_mode_t mode)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
             capEngineConfig::gPhoneStateCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
@@ -203,11 +226,14 @@
 
 audio_mode_t ParameterManagerWrapper::getPhoneState() const
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return AUDIO_MODE_NORMAL;
+    }
     const ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
             capEngineConfig::gPhoneStateCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__,
-              capEngineConfig::gPhoneStateCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__, capEngineConfig::gPhoneStateCriterionName);
         return AUDIO_MODE_NORMAL;
     }
     return static_cast<audio_mode_t>(criterion->getCriterionState());
@@ -220,11 +246,14 @@
     if (usage > AUDIO_POLICY_FORCE_USE_CNT) {
         return BAD_VALUE;
     }
-
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
             capEngineConfig::gForceUseCriterionTag[usage], mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+        ALOGE("%s: no criterion found for %s", __func__,
               capEngineConfig::gForceUseCriterionTag[usage]);
         return BAD_VALUE;
     }
@@ -242,10 +271,14 @@
     if (usage > AUDIO_POLICY_FORCE_USE_CNT) {
         return AUDIO_POLICY_FORCE_NONE;
     }
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return AUDIO_POLICY_FORCE_NONE;
+    }
     const ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
             capEngineConfig::gForceUseCriterionTag[usage], mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+        ALOGE("%s: no criterion found for %s", __func__,
               capEngineConfig::gForceUseCriterionTag[usage]);
         return AUDIO_POLICY_FORCE_NONE;
     }
@@ -263,6 +296,10 @@
 status_t ParameterManagerWrapper::setDeviceConnectionState(
         audio_devices_t type, const std::string &address, audio_policy_dev_state_t state)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     std::string criterionName = audio_is_output_device(type) ?
             capEngineConfig::gOutputDeviceAddressCriterionName :
             capEngineConfig::gInputDeviceAddressCriterionName;
@@ -272,14 +309,14 @@
             getElement<ISelectionCriterionInterface>(criterionName, mPolicyCriteria);
 
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, criterionName.c_str());
+        ALOGE("%s: no criterion found for %s", __func__, criterionName.c_str());
         return DEAD_OBJECT;
     }
 
     auto criterionType = criterion->getCriterionType();
     uint64_t deviceAddressId;
     if (not criterionType->getNumericalValue(address.c_str(), deviceAddressId)) {
-        ALOGW("%s: unknown device address reported (%s) for criterion %s", __FUNCTION__,
+        ALOGW("%s: unknown device address reported (%s) for criterion %s", __func__,
               address.c_str(), criterionName.c_str());
         return BAD_TYPE;
     }
@@ -295,6 +332,10 @@
 }
 
 status_t ParameterManagerWrapper::setAvailableInputDevices(const DeviceTypeSet &types) {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
             capEngineConfig::gInputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
@@ -308,6 +349,10 @@
 }
 
 status_t ParameterManagerWrapper::setAvailableOutputDevices(const DeviceTypeSet &types) {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return NO_INIT;
+    }
     ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
             capEngineConfig::gOutputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
@@ -322,6 +367,10 @@
 
 void ParameterManagerWrapper::applyPlatformConfiguration()
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return;
+    }
     mPfwConnector->applyConfigurations();
 }
 
@@ -361,10 +410,14 @@
 
 void ParameterManagerWrapper::createDomain(const std::string &domain)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return;
+    }
     std::string error;
     bool ret = mPfwConnector->createDomain(domain, error);
     if (!ret) {
-        ALOGD("%s: failed to create domain %s (error=%s)", __func__, domain.c_str(),
+        ALOGD("%s: failed for %s (error=%s)", __func__, domain.c_str(),
         error.c_str());
     }
 }
@@ -372,27 +425,39 @@
 void ParameterManagerWrapper::addConfigurableElementToDomain(const std::string &domain,
         const std::string &elementPath)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return;
+    }
     std::string error;
     bool ret = mPfwConnector->addConfigurableElementToDomain(domain, elementPath, error);
-    ALOGE_IF(!ret, "%s: failed to add parameter %s for domain %s (error=%s)",
+    ALOGE_IF(!ret, "%s: failed for %s for domain %s (error=%s)",
               __func__, elementPath.c_str(), domain.c_str(), error.c_str());
 }
 
 void ParameterManagerWrapper::createConfiguration(const std::string &domain,
         const std::string &configurationName)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return;
+    }
     std::string error;
     bool ret = mPfwConnector->createConfiguration(domain, configurationName, error);
-    ALOGE_IF(!ret, "%s: failed to create configuration %s for domain %s (error=%s)",
+    ALOGE_IF(!ret, "%s: failed for %s for domain %s (error=%s)",
               __func__, configurationName.c_str(), domain.c_str(), error.c_str());
 }
 
 void ParameterManagerWrapper::setApplicationRule(
         const std::string &domain, const std::string &configurationName, const std::string &rule)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return;
+    }
     std::string error;
     bool ret = mPfwConnector->setApplicationRule(domain, configurationName, rule, error);
-    ALOGE_IF(!ret, "%s: failed to set rule %s for domain %s and configuration %s (error=%s)",
+    ALOGE_IF(!ret, "%s: failed for %s for domain %s and configuration %s (error=%s)",
               __func__, rule.c_str(), domain.c_str(), configurationName.c_str(), error.c_str());
 }
 
@@ -400,6 +465,10 @@
         const std::string &configurationName, const std::string &elementPath,
         std::string &value)
 {
+    if (!isStarted()) {
+        ALOGE("%s: failed, Cap not initialized", __func__);
+        return;
+    }
     std::string error;
     bool ret = mPfwConnector->accessConfigurationValue(domain, configurationName, elementPath,
             value, /*set=*/ true, error);
@@ -416,7 +485,7 @@
     }
     std::string error;
     if (!mPfwConnector->setTuningMode(/* bOn= */ true, error)) {
-        ALOGD("%s: failed to set Tuning Mode error=%s", __FUNCTION__, error.c_str());
+        ALOGD("%s: failed (error=%s)", __func__, error.c_str());
         return DEAD_OBJECT;
     }
     for (auto &domain: capSettings.parsedConfig->capConfigurableDomains) {
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index 0c45a60..f72c458 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -47,7 +47,8 @@
     using Criteria = std::map<std::string, ISelectionCriterionInterface *>;
 
 public:
-    ParameterManagerWrapper(bool enableSchemaVerification = false,
+    ParameterManagerWrapper(bool useLegacyConfigurationFile = false,
+                            bool enableSchemaVerification = false,
                             const std::string &schemaUri = {});
     ~ParameterManagerWrapper();
 
@@ -75,7 +76,7 @@
      *
      * @return true if platform state is started correctly, false otherwise.
      */
-    bool isStarted();
+    bool isStarted() const;
 
     /**
      * Set Telephony Mode.
@@ -234,9 +235,10 @@
 
     DeviceToCriterionTypeAdapter mOutputDeviceToCriterionTypeMap;
     DeviceToCriterionTypeAdapter mInputDeviceToCriterionTypeMap;
-
-    static const char *const mPolicyPfwDefaultConfFileName; /**< Default Policy PFW top file name.*/
-    static const char *const mPolicyPfwVendorConfFileName; /**< Vendor Policy PFW top file name.*/
+#ifdef ENABLE_CAP_AIDL_HYBRID_MODE
+    static const char *const mVendorPolicyPfwConfFileName; /**< CapEngine PFW top file name.*/
+#endif
+    static const char *const mPolicyPfwConfFileName; /**< CapEngine PFW top file name.*/
 };
 
 } // namespace audio_policy
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index b140a9d..e2e5ec6 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -450,21 +450,19 @@
                 excludedDevices.push_back(AUDIO_DEVICE_OUT_AUX_DIGITAL);
             }
             if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
-                // Get the last connected device of wired and bluetooth a2dp
-                devices2 = availableOutputDevices.getFirstDevicesFromTypes(
-                        getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
                 if (com::android::media::audioserver::use_bt_sco_for_media()) {
-                    if (isBtScoActive(availableOutputDevices)
-                         && !(devices2.getDevicesFromTypes(
-                                 getAudioDeviceOutAllA2dpSet()).isEmpty()
-                             && devices2.getDevicesFromTypes(
-                                     getAudioDeviceOutAllBleSet()).isEmpty())) {
+                    if (isBtScoActive(availableOutputDevices)) {
                         devices2 = availableOutputDevices.getFirstDevicesFromTypes(
                                 { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
-                                  AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
-                                  AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
                     }
                 }
+                if (devices2.isEmpty()) {
+                    // Get the last connected device of wired and bluetooth a2dp
+                    devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+                            getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
+                }
             } else {
                 // Get the last connected device of wired except bluetooth a2dp
                 devices2 = availableOutputDevices.getFirstDevicesFromTypes(
@@ -871,6 +869,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         bool ignorePreferredDevice,
                                                          uid_t uid,
                                                          audio_session_t session,
                                                          sp<AudioPolicyMix> *mix) const
@@ -886,10 +885,13 @@
     //
     // Honor explicit routing requests only if all active clients have a preferred route in which
     // case the last active client route is used
-    sp<DeviceDescriptor> device =
-            findPreferredDevice(inputs, attr.source, availableInputDevices);
-    if (device != nullptr) {
-        return device;
+    sp<DeviceDescriptor> device;
+    if (!com::android::media::audioserver::conditionally_ignore_preferred_input_device()
+            || !ignorePreferredDevice) {
+        device = findPreferredDevice(inputs, attr.source, availableInputDevices);
+        if (device != nullptr) {
+            return device;
+        }
     }
 
     device = policyMixes.getDeviceAndMixForInputSource(attr,
@@ -902,6 +904,14 @@
     }
 
     device = getDeviceForInputSource(attr.source);
+
+    if (device != nullptr && device->type() == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
+        sp<DeviceDescriptor> device2 = getInputDeviceForEchoRef(attr, availableInputDevices);
+        if (device2 != nullptr) {
+            return device2;
+        }
+    }
+
     if (device == nullptr || !audio_is_remote_submix_device(device->type())) {
         // Return immediately if the device is null or it is not a remote submix device.
         return device;
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index e9c71dd..188bc66 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -58,6 +58,7 @@
                                            bool fromCache = false) const override;
 
     sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     bool ignorePreferredDevice = true,
                                                      uid_t uid = 0,
                                                      audio_session_t session = AUDIO_SESSION_NONE,
                                                      sp<AudioPolicyMix> *mix = nullptr)
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index b17a248..42c3728 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -185,11 +185,7 @@
 
 template <typename T>
 T getValueFromVector(FuzzedDataProvider *fdp, std::vector<T> arr) {
-    if (fdp->ConsumeBool()) {
-        return arr[fdp->ConsumeIntegralInRange<int32_t>(0, arr.size() - 1)];
-    } else {
-        return (T)fdp->ConsumeIntegral<uint32_t>();
-    }
+    return arr[fdp->ConsumeIntegralInRange<int32_t>(0, arr.size() - 1)];
 }
 
 class AudioPolicyManagerFuzzer {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index f133dfa..ac8062e 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -109,8 +109,9 @@
 // ----------------------------------------------------------------------------
 
 status_t AudioPolicyManager::setDeviceConnectionState(audio_policy_dev_state_t state,
-        const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat) {
-    status_t status = setDeviceConnectionStateInt(state, port, encodedFormat);
+        const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat,
+        bool deviceSwitch) {
+    status_t status = setDeviceConnectionStateInt(state, port, encodedFormat, deviceSwitch);
     nextAudioPortGeneration();
     return status;
 }
@@ -123,7 +124,7 @@
     media::AudioPortFw aidlPort;
     if (status_t status = deviceToAudioPort(device, device_address, device_name, &aidlPort);
         status == OK) {
-        return setDeviceConnectionState(state, aidlPort.hal, encodedFormat);
+        return setDeviceConnectionState(state, aidlPort.hal, encodedFormat, false /*deviceSwitch*/);
     } else {
         ALOGE("Failed to convert to AudioPort Parcelable: %s", statusToString(status).c_str());
         return status;
@@ -144,7 +145,7 @@
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(
         audio_policy_dev_state_t state, const android::media::audio::common::AudioPort& port,
-        audio_format_t encodedFormat) {
+        audio_format_t encodedFormat, bool deviceSwitch) {
     if (port.ext.getTag() != AudioPortExt::device) {
         return BAD_VALUE;
     }
@@ -169,18 +170,19 @@
     if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
         device->setExtraAudioDescriptors(port.extraAudioDescriptors);
     }
-    return setDeviceConnectionStateInt(device, state);
+    return setDeviceConnectionStateInt(device, state, deviceSwitch);
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t deviceType,
                                                          audio_policy_dev_state_t state,
                                                          const char* device_address,
                                                          const char* device_name,
-                                                         audio_format_t encodedFormat) {
+                                                         audio_format_t encodedFormat,
+                                                         bool deviceSwitch) {
     media::AudioPortFw aidlPort;
     if (status_t status = deviceToAudioPort(deviceType, device_address, device_name, &aidlPort);
         status == OK) {
-        return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat);
+        return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat, deviceSwitch);
     } else {
         ALOGE("Failed to convert to AudioPort Parcelable: %s", statusToString(status).c_str());
         return status;
@@ -188,7 +190,8 @@
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(const sp<DeviceDescriptor> &device,
-                                                         audio_policy_dev_state_t state)
+                                                         audio_policy_dev_state_t state,
+                                                         bool deviceSwitch)
 {
     // handle output devices
     if (audio_is_output_device(device->type())) {
@@ -329,57 +332,59 @@
             return false;
         };
 
-        if (doCheckForDeviceAndOutputChanges) {
+        if (doCheckForDeviceAndOutputChanges && !deviceSwitch) {
             checkForDeviceAndOutputChanges(checkCloseOutputs);
         } else {
             checkCloseOutputs();
         }
-        (void)updateCallRouting(false /*fromCache*/);
-        const DeviceVector msdOutDevices = getMsdAudioOutDevices();
-        const DeviceVector activeMediaDevices =
-                mEngine->getActiveMediaDevices(mAvailableOutputDevices);
-        std::map<audio_io_handle_t, DeviceVector> outputsToReopenWithDevices;
-        for (size_t i = 0; i < mOutputs.size(); i++) {
-            sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-            if (desc->isActive() && ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
-                (desc != mPrimaryOutput))) {
-                DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
-                // do not force device change on duplicated output because if device is 0, it will
-                // also force a device 0 for the two outputs it is duplicated to which may override
-                // a valid device selection on those outputs.
-                bool force = (msdOutDevices.isEmpty() || msdOutDevices != desc->devices())
-                        && !desc->isDuplicated()
-                        && (!device_distinguishes_on_address(device->type())
-                                // always force when disconnecting (a non-duplicated device)
-                                || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-                if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
-                    // If the device is using preferred mixer attributes, the output need to reopen
-                    // with default configuration when the new selected devices are different from
-                    // current routing devices
-                    outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
-                    continue;
+        if (!deviceSwitch) {
+            (void)updateCallRouting(false /*fromCache*/);
+            const DeviceVector msdOutDevices = getMsdAudioOutDevices();
+            const DeviceVector activeMediaDevices =
+                    mEngine->getActiveMediaDevices(mAvailableOutputDevices);
+            std::map<audio_io_handle_t, DeviceVector> outputsToReopenWithDevices;
+            for (size_t i = 0; i < mOutputs.size(); i++) {
+                sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+                if (desc->isActive() && ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
+                    (desc != mPrimaryOutput))) {
+                    DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
+                    // do not force device change on duplicated output because if device is 0,
+                    // it will also force a device 0 for the two outputs it is duplicated to
+                    // a valid device selection on those outputs.
+                    bool force = (msdOutDevices.isEmpty() || msdOutDevices != desc->devices())
+                            && !desc->isDuplicated()
+                            && (!device_distinguishes_on_address(device->type())
+                                    // always force when disconnecting (a non-duplicated device)
+                                    || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+                    if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
+                        // If the device is using preferred mixer attributes, the output need to
+                        // reopen with default configuration when the new selected devices are
+                        // different from current routing devices
+                        outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
+                        continue;
+                    }
+                    setOutputDevices(__func__, desc, newDevices, force, 0);
                 }
-                setOutputDevices(__func__, desc, newDevices, force, 0);
-            }
-            if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
-                    !activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
-                    desc->supportsDevicesForPlayback(activeMediaDevices)) {
-                // Reopen the output to query the dynamic profiles when there is not active
-                // clients or all active clients will be rerouted. Otherwise, set the flag
-                // `mPendingReopenToQueryProfiles` in the SwOutputDescriptor so that the output
-                // can be reopened to query dynamic profiles when all clients are inactive.
-                if (areAllActiveTracksRerouted(desc)) {
-                    outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), activeMediaDevices);
-                } else {
-                    desc->mPendingReopenToQueryProfiles = true;
+                if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
+                        !activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
+                        desc->supportsDevicesForPlayback(activeMediaDevices)) {
+                    // Reopen the output to query the dynamic profiles when there is not active
+                    // clients or all active clients will be rerouted. Otherwise, set the flag
+                    // `mPendingReopenToQueryProfiles` in the SwOutputDescriptor so that the output
+                    // can be reopened to query dynamic profiles when all clients are inactive.
+                    if (areAllActiveTracksRerouted(desc)) {
+                        outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), activeMediaDevices);
+                    } else {
+                        desc->mPendingReopenToQueryProfiles = true;
+                    }
+                }
+                if (!desc->supportsDevicesForPlayback(activeMediaDevices)) {
+                    // Clear the flag that previously set for re-querying profiles.
+                    desc->mPendingReopenToQueryProfiles = false;
                 }
             }
-            if (!desc->supportsDevicesForPlayback(activeMediaDevices)) {
-                // Clear the flag that previously set for re-querying profiles.
-                desc->mPendingReopenToQueryProfiles = false;
-            }
+            reopenOutputsWithDevices(outputsToReopenWithDevices);
         }
-        reopenOutputsWithDevices(outputsToReopenWithDevices);
 
         if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
             cleanUpForDevice(device);
@@ -469,19 +474,22 @@
             return BAD_VALUE;
         }
 
-        checkCloseInputs();
-        // As the input device list can impact the output device selection, update
-        // getDeviceForStrategy() cache
-        updateDevicesAndOutputs();
+        if (!deviceSwitch) {
+            checkCloseInputs();
+            // As the input device list can impact the output device selection, update
+            // getDeviceForStrategy() cache
+            updateDevicesAndOutputs();
 
-        (void)updateCallRouting(false /*fromCache*/);
-        // Reconnect Audio Source
-        for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
-            auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
-            checkAudioSourceForAttributes(attributes);
-        }
-        if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
-            cleanUpForDevice(device);
+            (void)updateCallRouting(false /*fromCache*/);
+            // Reconnect Audio Source
+            for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
+                auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
+                checkAudioSourceForAttributes(attributes);
+            }
+
+            if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
+                cleanUpForDevice(device);
+            }
         }
 
         mpClientInterface->onAudioPortListUpdate();
@@ -1139,7 +1147,9 @@
                                         audio_channel_mask_t channelMask,
                                         audio_output_flags_t flags,
                                         bool directOnly) {
-    sp<IOProfile> profile;
+    sp<IOProfile> directOnlyProfile = nullptr;
+    sp<IOProfile> compressOffloadProfile = nullptr;
+    sp<IOProfile> profile = nullptr;
     for (const auto& hwModule : hwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
              if (curProfile->getCompatibilityScore(devices,
@@ -1161,19 +1171,21 @@
                 return curProfile;
              }
 
-             // when searching for direct outputs, if several profiles are compatible, give priority
-             // to one with offload capability
-             if (profile != 0 &&
-                 ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
-                continue;
-             }
              profile = curProfile;
-             if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
-                 break;
+             if ((flags == AUDIO_OUTPUT_FLAG_DIRECT) &&
+                 curProfile->getFlags() == AUDIO_OUTPUT_FLAG_DIRECT) {
+                 directOnlyProfile = curProfile;
+             }
+
+             if ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+                 compressOffloadProfile = curProfile;
              }
         }
     }
-    return profile;
+
+    return directOnlyProfile ? directOnlyProfile
+                            : (compressOffloadProfile ? compressOffloadProfile : profile);
+
 }
 
 sp<IOProfile> AudioPolicyManager::getSpatializerOutputProfile(
@@ -1658,11 +1670,7 @@
     }
 
     if (!profile->canOpenNewIo()) {
-        if (!com::android::media::audioserver::direct_track_reprioritization()) {
-            ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
-                  profile->getName().c_str());
-            return NAME_NOT_FOUND;
-        } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
+        if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
             // MMAP gracefully handles lack of an exclusive track resource by mixing
             // above the audio framework. For AAudio to know that the limit is reached,
             // return an error.
@@ -2490,7 +2498,7 @@
                                          const sp<TrackClientDescriptor>& client,
                                          uint32_t *delayMs)
 {
-    // cannot start playback of STREAM_TTS if any other output is being used
+    // cannot start beacon playback if any other output is being used
     uint32_t beaconMuteLatency = 0;
 
     *delayMs = 0;
@@ -2498,17 +2506,22 @@
     auto clientVolSrc = client->volumeSource();
     auto clientStrategy = client->strategy();
     auto clientAttr = client->attributes();
-    if (stream == AUDIO_STREAM_TTS) {
-        ALOGV("\t found BEACON stream");
-        if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(
-                                    toVolumeSource(AUDIO_STREAM_TTS, false) /*sourceToIgnore*/)) {
-            return INVALID_OPERATION;
+    // SPEAKER_CLEANUP doesn't the share the high-frequency requirements of beacons
+    if (clientAttr.usage != AUDIO_USAGE_SPEAKER_CLEANUP) {
+        if (stream == AUDIO_STREAM_TTS) {
+            ALOGV("\t found BEACON stream");
+            if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(
+                    toVolumeSource(AUDIO_STREAM_TTS, false) /*sourceToIgnore*/)) {
+                return INVALID_OPERATION;
+            } else {
+                beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
+            }
         } else {
-            beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
+            // some playback other than beacon starts
+            beaconMuteLatency = handleEventForBeacon(STARTING_OUTPUT);
         }
     } else {
-        // some playback other than beacon starts
-        beaconMuteLatency = handleEventForBeacon(STARTING_OUTPUT);
+        // TODO handle muting of other streams outside of a11y
     }
 
     // force device change if the output is inactive and no audio patch is already present.
@@ -2613,8 +2626,15 @@
                 // a volume ramp if there is no mute.
                 requiresMuteCheck |= sharedDevice && isActive;
 
-                if (needToCloseBitPerfectOutput && desc->isBitPerfect()) {
-                    outputsToReopen.push_back(desc);
+                if (desc->isBitPerfect()) {
+                    if (needToCloseBitPerfectOutput) {
+                        outputsToReopen.push_back(desc);
+                    } else if (!desc->devices().filter(devices).isEmpty()) {
+                        // There is an active bit-perfect playback on one of the targeted device,
+                        // the client should be reattached to the bit-perfect thread.
+                        ALOGD("%s, fails as there is bit-perfect playback active", __func__);
+                        return DEAD_OBJECT;
+                    }
                 }
             }
         }
@@ -2780,7 +2800,11 @@
     auto clientVolSrc = client->volumeSource();
     bool wasLeUnicastActive = isLeUnicastActive();
 
-    handleEventForBeacon(stream == AUDIO_STREAM_TTS ? STOPPING_BEACON : STOPPING_OUTPUT);
+    // speaker cleanup is not a beacon event
+    // TODO handle speaker cleanup activity
+    if (client->attributes().usage != AUDIO_USAGE_SPEAKER_CLEANUP) {
+        handleEventForBeacon(stream == AUDIO_STREAM_TTS ? STOPPING_BEACON : STOPPING_OUTPUT);
+    }
 
     if (outputDesc->getActivityCount(clientVolSrc) > 0) {
         if (outputDesc->getActivityCount(clientVolSrc) == 1) {
@@ -2928,20 +2952,53 @@
     return false;
 }
 
+
+static AudioPolicyClientInterface::MixType getMixType(audio_devices_t deviceType,
+                                                      bool externallyRouted,
+                                                      const sp<AudioPolicyMix>& mix) {
+    using MixType = AudioPolicyClientInterface::MixType;
+    // If the client chose the route, special perms
+    if (externallyRouted) {
+        if (is_mix_loopback_render(mix->mRouteFlags)) {
+            return MixType::PUBLIC_CAPTURE_PLAYBACK;
+        }
+        return MixType::EXT_POLICY_REROUTE;
+    }
+    switch (deviceType) {
+        case AUDIO_DEVICE_IN_ECHO_REFERENCE:
+            return MixType::CAPTURE;
+        case AUDIO_DEVICE_IN_TELEPHONY_RX:
+            return MixType::TELEPHONY_RX_CAPTURE;
+        case AUDIO_DEVICE_IN_REMOTE_SUBMIX:
+            if (!mix) {
+                return MixType::CAPTURE;
+            } else {
+                ALOG_ASSERT(mix->mMixType == MIX_TYPE_RECORDERS, "Invalid Mix Type");
+                // when routed due to a policy, no perms (client not in control)
+                // there is an external policy, but this input is attached to a mix of recorders,
+                // meaning it receives audio injected into the framework, so the recorder doesn't
+                // know about it and is therefore considered "legacy"
+                return MixType::NONE;
+            }
+        default:
+            return MixType::NONE;
+    }
+}
+
 base::expected<media::GetInputForAttrResponse, std::variant<binder::Status, AudioConfigBase>>
-AudioPolicyManager::getInputForAttr(audio_attributes_t attributes,
+AudioPolicyManager::getInputForAttr(audio_attributes_t attributes_,
                                      audio_io_handle_t requestedInput,
                                      audio_port_handle_t requestedDeviceId,
                                      audio_config_base_t config,
-                                     audio_input_flags_t flags,
+                                     const audio_input_flags_t flags,
                                      audio_unique_id_t riid,
                                      audio_session_t session,
                                      const AttributionSourceState& attributionSource)
 {
     ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
           "flags %#x attributes=%s requested device ID %d",
-          __func__, attributes.source, config.sample_rate, config.format, config.channel_mask,
-          session, flags, toString(attributes).c_str(), requestedDeviceId);
+          __func__, attributes_.source, config.sample_rate, config.format, config.channel_mask,
+          session, flags, toString(attributes_).c_str(), requestedDeviceId);
 
     sp<AudioPolicyMix> policyMix;
     sp<DeviceDescriptor> device;
@@ -2953,21 +3010,13 @@
     int vdi = 0 /* default device id */;
     audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
 
-    if (attributes.source == AUDIO_SOURCE_DEFAULT) {
-        attributes.source = AUDIO_SOURCE_MIC;
+    if (attributes_.source == AUDIO_SOURCE_DEFAULT) {
+        attributes_.source = AUDIO_SOURCE_MIC;
     }
 
-    using PermissionReqs = AudioPolicyClientInterface::PermissionReqs;
-    using MixType = AudioPolicyClientInterface::MixType;
-    PermissionReqs permReq {
-        .source =  legacy2aidl_audio_source_t_AudioSource(attributes.source).value(),
-        .mixType = MixType::NONE, // can be modified
-        .virtualDeviceId = 0, // can be modified
-        .isHotword = (flags & (AUDIO_INPUT_FLAG_HW_HOTWORD | AUDIO_INPUT_FLAG_HOTWORD_TAP |
-                               AUDIO_INPUT_FLAG_HW_LOOKBACK)) != 0,
-        .isCallRedir = (attributes.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0,
-    };
+    const auto& attributes = attributes_;
 
+    bool externallyRouted = false;
     // Explicit routing?
     sp<DeviceDescriptor> explicitRoutingDevice =
             mAvailableInputDevices.getDeviceFromId(requestedDeviceId);
@@ -2976,7 +3025,6 @@
     // possible
     if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) == AUDIO_INPUT_FLAG_MMAP_NOIRQ &&
             requestedInput != AUDIO_IO_HANDLE_NONE) {
-        input = requestedInput;
         ssize_t index = mInputs.indexOfKey(requestedInput);
         if (index < 0) {
             return base::unexpected{Status::fromExceptionCode(
@@ -3011,94 +3059,67 @@
                 }
             }
         }
+        input = requestedInput;
         device = inputDesc->getDevice();
-        ALOGV("%s reusing MMAP input %d for session %d", __FUNCTION__, requestedInput, session);
-        auto permRes = mpClientInterface->checkPermissionForInput(attributionSource, permReq);
-        if (!permRes.has_value()) return base::unexpected {permRes.error()};
-        if (!permRes.value()) {
-            return base::unexpected{Status::fromExceptionCode(
-                    EX_SECURITY, String8::format("%s: %s missing perms for source %d mix %d vdi %d"
-                        "hotword? %d callredir? %d", __func__, attributionSource.toString().c_str(),
-                                                 static_cast<int>(permReq.source),
-                                                 static_cast<int>(permReq.mixType),
-                                                 permReq.virtualDeviceId,
-                                                 permReq.isHotword,
-                                                 permReq.isCallRedir))};
-        }
-    } else {
-        if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
+    } else if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
                 extractAddressFromAudioAttributes(attributes).has_value()) {
-            status_t status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
-            if (status != NO_ERROR) {
-                ALOGW("%s could not find input mix for attr %s",
-                        __func__, toString(attributes).c_str());
-                return base::unexpected {aidl_utils::binderStatusFromStatusT(status)};
-            }
-            device = mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
-                                                      String8(attributes.tags + strlen("addr=")),
-                                                      AUDIO_FORMAT_DEFAULT);
-            if (device == nullptr) {
-                return base::unexpected{Status::fromExceptionCode(
-                        EX_ILLEGAL_ARGUMENT,
-                        String8::format(
-                                "%s could not find in Remote Submix device for source %d, tags %s",
-                                __func__, attributes.source, attributes.tags))};
-            }
-
-            if (is_mix_loopback_render(policyMix->mRouteFlags)) {
-                permReq.mixType = MixType::PUBLIC_CAPTURE_PLAYBACK;
-            } else {
-                permReq.mixType = MixType::EXT_POLICY_REROUTE;
-            }
-            // TODO is this correct?
-            permReq.virtualDeviceId = policyMix->mVirtualDeviceId;
+        status_t status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
+        if (status != NO_ERROR) {
+            ALOGW("%s could not find input mix for attr %s",
+                    __func__, toString(attributes).c_str());
+            return base::unexpected {aidl_utils::binderStatusFromStatusT(status)};
+        }
+        device = mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+                                                  String8(attributes.tags + strlen("addr=")),
+                                                  AUDIO_FORMAT_DEFAULT);
+        externallyRouted = true;
+    } else {
+        if (explicitRoutingDevice != nullptr) {
+            device = explicitRoutingDevice;
         } else {
-            if (explicitRoutingDevice != nullptr) {
-                device = explicitRoutingDevice;
-            } else {
-                // Prevent from storing invalid requested device id in clients
-                requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
-                device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
-                ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
-                    __FUNCTION__, device->type());
-            }
-            if (device == nullptr) {
-                return base::unexpected{Status::fromExceptionCode(
-                        EX_ILLEGAL_ARGUMENT,
-                        String8::format("%s could not find device for source %d", __func__,
-                                        attributes.source))};
-            }
-            if (device->type() == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
-                permReq.mixType = MixType::CAPTURE;
-            } else if (policyMix) {
-                ALOG_ASSERT(policyMix->mMixType == MIX_TYPE_RECORDERS, "Invalid Mix Type");
-                // there is an external policy, but this input is attached to a mix of recorders,
-                // meaning it receives audio injected into the framework, so the recorder doesn't
-                // know about it and is therefore considered "legacy"
-                permReq.mixType = MixType::NONE;
-                permReq.virtualDeviceId = policyMix->mVirtualDeviceId;
-            } else if (audio_is_remote_submix_device(device->type())) {
-                permReq.mixType = MixType::CAPTURE;
-            } else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
-                permReq.mixType = MixType::TELEPHONY_RX_CAPTURE;
-            } else {
-                permReq.mixType = MixType::NONE;
-            }
+            // Prevent from storing invalid requested device id in clients
+            requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
+            device = mEngine->getInputDeviceForAttributes(
+                    attributes, true /*ignorePreferredDevice*/, uid, session, &policyMix);
+            ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
+                __FUNCTION__, device->type());
         }
+    }
 
-        auto permRes = mpClientInterface->checkPermissionForInput(attributionSource, permReq);
-        if (!permRes.has_value()) return base::unexpected {permRes.error()};
-        if (!permRes.value()) {
-            return base::unexpected{Status::fromExceptionCode(
-                    EX_SECURITY, String8::format("%s: %s missing perms for source %d mix %d vdi %d"
-                        "hotword? %d callredir? %d", __func__, attributionSource.toString().c_str(),
-                                                 static_cast<int>(permReq.source),
-                                                 static_cast<int>(permReq.mixType),
-                                                 permReq.virtualDeviceId,
-                                                 permReq.isHotword,
-                                                 permReq.isCallRedir))};
-        }
+    if (device == nullptr) {
+        const auto attr = legacy2aidl_audio_attributes_t_AudioAttributes(attributes);
+        return base::unexpected{Status::fromExceptionCode(
+                EX_ILLEGAL_ARGUMENT,
+                String8::format("%s could not find device for attr %s", __func__,
+                                attr.has_value() ? attr->toString().c_str() : ""))};
+    }
 
+    const auto mixType = getMixType(device->type(), externallyRouted, policyMix);
+    const AudioPolicyClientInterface::PermissionReqs permReq {
+        .source =  legacy2aidl_audio_source_t_AudioSource(attributes.source).value(),
+        .mixType = mixType,
+        .virtualDeviceId = (mixType == AudioPolicyClientInterface::MixType::NONE &&
+                            policyMix != nullptr) ? policyMix->mVirtualDeviceId : 0,
+
+        .isHotword = (flags & (AUDIO_INPUT_FLAG_HW_HOTWORD | AUDIO_INPUT_FLAG_HOTWORD_TAP |
+                               AUDIO_INPUT_FLAG_HW_LOOKBACK)) != 0,
+        .isCallRedir = (attributes.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0,
+    };
+
+    auto permRes = mpClientInterface->checkPermissionForInput(attributionSource, permReq);
+    if (!permRes.has_value()) return base::unexpected {permRes.error()};
+    if (!permRes.value()) {
+        return base::unexpected{Status::fromExceptionCode(
+                EX_SECURITY, String8::format("%s: %s missing perms for source %d mix %d vdi %d"
+                    "hotword? %d callredir? %d", __func__, attributionSource.toString().c_str(),
+                                             static_cast<int>(permReq.source),
+                                             static_cast<int>(permReq.mixType),
+                                             permReq.virtualDeviceId,
+                                             permReq.isHotword,
+                                             permReq.isCallRedir))};
+    }
+
+    if (input == AUDIO_IO_HANDLE_NONE) {
         input = getInputForDevice(device, session, attributes, config, flags, policyMix);
         if (input == AUDIO_IO_HANDLE_NONE) {
             AudioProfileVector profiles;
@@ -3204,8 +3225,10 @@
         return input;
     }
 
-    // Reuse an already opened input if a client with the same session ID already exists
-    // on that input
+    // Reuse an already opened input if:
+    //  - a client with the same session ID already exists on that input
+    //  - OR the requested device is a remote submix device with the same adrress
+    //    as the one connected to that input
     for (size_t i = 0; i < mInputs.size(); i++) {
         sp <AudioInputDescriptor> desc = mInputs.valueAt(i);
         if (desc->mProfile != profile) {
@@ -3217,6 +3240,11 @@
                 return desc->mIoHandle;
             }
         }
+        if (audio_is_remote_submix_device(device->type())
+                && (device->address() != "0")
+                && device->equals(desc->getDevice())) {
+            return desc->mIoHandle;
+        }
     }
 
     bool isPreemptor = false;
@@ -3226,7 +3254,8 @@
             //  - Preempt and input if:
             //     - It has only strictly lower priority use cases than the new client
             //     - It has equal priority use cases than the new client, was not
-            //     opened thanks to preemption or has been active since opened.
+            //     opened thanks to preemption, is not routed to the same device than the device to
+            //     consider or has been active since opened.
             //  - Order the preemption candidates by inactive first and priority second
             sp<AudioInputDescriptor> closeCandidate;
             int leastCloseRank = INT_MAX;
@@ -3244,7 +3273,7 @@
                 int topPrio = source_priority(topPrioClient->source());
                 if (topPrio < source_priority(attributes.source)
                       || (topPrio == source_priority(attributes.source)
-                          && !desc->isPreemptor())) {
+                          && !(desc->isPreemptor() || desc->getDevice() == device))) {
                     int closeRank = (desc->isActive() ? sCloseActive : 0) + topPrio;
                     if (closeRank < leastCloseRank) {
                         leastCloseRank = closeRank;
@@ -3557,8 +3586,9 @@
     }
     for (const auto& client : input->clientsList()) {
         sp<DeviceDescriptor> device =
-            mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
-                                                 client->session());
+            mEngine->getInputDeviceForAttributes(
+                    client->attributes(), false /*ignorePreferredDevice*/, client->uid(),
+                    client->session());
         if (!input->supportedDevices().contains(device)) {
             return true;
         }
@@ -3618,14 +3648,20 @@
 
     const DeviceVector devices = mEngine->getOutputDevicesForAttributes(
             attributesToDriveAbs, nullptr /* preferredDevice */, true /* fromCache */);
-    changed &= devices.types().contains(deviceType);
+    audio_devices_t volumeDevice = Volume::getDeviceForVolume(devices.types());
+    changed &= (volumeDevice == deviceType);
     // if something changed on the output device for the changed attributes, apply the stream
     // volumes regarding the new absolute mode to all the outputs without any delay
     if (changed) {
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-            ALOGI("%s: apply stream volumes for portId %d and device type %d", __func__,
-                  desc->getId(), deviceType);
+            DeviceTypeSet curDevices = desc->devices().types();
+            if (volumeDevice != Volume::getDeviceForVolume(curDevices)) {
+                continue;  // skip if not using the target volume device
+            }
+
+            ALOGI("%s: apply stream volumes for %s(curDevices %s) and device type 0x%X", __func__,
+                  desc->info().c_str(), dumpDeviceTypes(curDevices).c_str(), deviceType);
             applyStreamVolumes(desc, {deviceType});
         }
     }
@@ -4095,6 +4131,7 @@
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
     Vector<AudioMix> registeredMixes;
+    AudioDeviceTypeAddrVector devices;
     // examine each mix's route type
     for (size_t i = 0; i < mixes.size(); i++) {
         AudioMix mix = mixes[i];
@@ -4218,6 +4255,7 @@
                 break;
             } else {
                 checkOutputs = true;
+                devices.push_back(AudioDeviceTypeAddr(mix.mDeviceType, mix.mDeviceAddress.c_str()));
                 registeredMixes.add(mix);
             }
         }
@@ -4233,7 +4271,10 @@
         }
     } else if (checkOutputs) {
         checkForDeviceAndOutputChanges();
-        updateCallAndOutputRouting();
+        changeOutputDevicesMuteState(devices);
+        updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+            true /* skipDelays */);
+        changeOutputDevicesMuteState(devices);
     }
     return res;
 }
@@ -4244,6 +4285,7 @@
     status_t res = NO_ERROR;
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
+    AudioDeviceTypeAddrVector devices;
     // examine each mix's route type
     for (const auto& mix : mixes) {
         if ((mix.mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
@@ -4291,6 +4333,7 @@
                 res = INVALID_OPERATION;
                 continue;
             } else {
+                devices.push_back(AudioDeviceTypeAddr(mix.mDeviceType, mix.mDeviceAddress.c_str()));
                 checkOutputs = true;
             }
         }
@@ -4298,7 +4341,10 @@
 
     if (res == NO_ERROR && checkOutputs) {
         checkForDeviceAndOutputChanges();
-        updateCallAndOutputRouting();
+        changeOutputDevicesMuteState(devices);
+        updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+            true /* skipDelays */);
+        changeOutputDevicesMuteState(devices);
     }
     return res;
 }
@@ -4614,6 +4660,7 @@
 
     if (status == NO_ERROR) {
         updateInputRouting();
+        updateCallRouting(false /*fromCache*/);
     }
     return status;
 }
@@ -4631,7 +4678,10 @@
             "Engine could not add preferred devices %s for audio source %d role %d",
             dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
 
-    updateInputRouting();
+    if (status == NO_ERROR) {
+        updateInputRouting();
+        updateCallRouting(false /*fromCache*/);
+    }
     return status;
 }
 
@@ -4652,6 +4702,7 @@
             "Engine could not remove devices role (%d) for capture preset %d", role, audioSource);
     if (status == NO_ERROR) {
         updateInputRouting();
+        updateCallRouting(false /*fromCache*/);
     }
     return status;
 }
@@ -4665,6 +4716,7 @@
             "Engine could not clear devices role (%d) for capture preset %d", role, audioSource);
     if (status == NO_ERROR) {
         updateInputRouting();
+        updateCallRouting(false /*fromCache*/);
     }
     return status;
 }
@@ -7546,7 +7598,8 @@
         }
 
         for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
-            if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
+            if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId
+                    || client->isInvalid()) {
                 continue;
             }
             if (!desc->supportsAllDevices(newDevices)) {
@@ -7576,6 +7629,9 @@
         for (audio_io_handle_t srcOut : srcOutputs) {
             sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
             if (desc == nullptr) continue;
+            if (desc == mSpatializerOutput && newDevices == oldDevices) {
+                continue;
+            }
 
             if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
                 maxLatency = desc->latency();
@@ -7583,6 +7639,9 @@
 
             bool invalidate = false;
             for (auto client : desc->clientsList(false /*activeOnly*/)) {
+                if (client->isInvalid()) {
+                    continue;
+                }
                 if (desc->isDuplicated() || !desc->mProfile->isDirectOutput()) {
                     // a client on a non direct outputs has necessarily a linear PCM format
                     // so we can call selectOutput() safely
@@ -7897,7 +7956,8 @@
         attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     }
     if (attributes.source != AUDIO_SOURCE_DEFAULT) {
-        device = mEngine->getInputDeviceForAttributes(attributes, uid, session);
+        device = mEngine->getInputDeviceForAttributes(
+                attributes, false /*ignorePreferredDevice*/, uid, session);
     }
 
     return device;
@@ -8150,6 +8210,7 @@
               devices.toString().c_str());
         // restore previous device after evaluating strategy mute state
         outputDesc->setDevices(prevDevices);
+        applyStreamVolumes(outputDesc, prevDevices.types(), delayMs, true /*force*/);
         return muteWaitMs;
     }
 
@@ -8316,6 +8377,7 @@
         uint32_t updatedSamplingRate = 0;
         audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
         audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
+        auto bestCompatibleScore = IOProfile::NO_MATCH;
         for (const auto& hwModule : mHwModules) {
             for (const auto& profile : hwModule->getInputProfiles()) {
                 // profile->log();
@@ -8338,10 +8400,13 @@
                 } else if ((flags != AUDIO_INPUT_FLAG_NONE
                         && compatibleScore == IOProfile::PARTIAL_MATCH_WITH_FLAG)
                     || (inexact == nullptr && compatibleScore != IOProfile::NO_MATCH)) {
-                    inexact = profile;
-                    inexactSamplingRate = updatedSamplingRate;
-                    inexactFormat = updatedFormat;
-                    inexactChannelMask = updatedChannelMask;
+                    if (compatibleScore > bestCompatibleScore) {
+                        inexact = profile;
+                        inexactSamplingRate = updatedSamplingRate;
+                        inexactFormat = updatedFormat;
+                        inexactChannelMask = updatedChannelMask;
+                        bestCompatibleScore = compatibleScore;
+                    }
                 }
             }
         }
@@ -8376,41 +8441,37 @@
     device_category deviceCategory = Volume::getDeviceCategory({volumeDevice});
     float volumeDb = curves.volIndexToDb(deviceCategory, index);
 
-    if (com_android_media_audio_abs_volume_index_fix()) {
-        const auto it = mAbsoluteVolumeDrivingStreams.find(volumeDevice);
-        if (it != mAbsoluteVolumeDrivingStreams.end()) {
-            audio_attributes_t attributesToDriveAbs = it->second;
-            auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
-            if (groupToDriveAbs == VOLUME_GROUP_NONE) {
-                ALOGD("%s: no group matching with %s", __FUNCTION__,
-                      toString(attributesToDriveAbs).c_str());
-                return volumeDb;
-            }
-
-            float volumeDbMax = curves.volIndexToDb(deviceCategory, curves.getVolumeIndexMax());
-            VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
-            if (vsToDriveAbs == volumeSource) {
-                // attenuation is applied by the abs volume controller
-                // do not mute LE broadcast to allow the secondary device to continue playing
-                return (index != 0 || volumeDevice == AUDIO_DEVICE_OUT_BLE_BROADCAST) ? volumeDbMax
-                                                                                      : volumeDb;
-            } else {
-                IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
-                int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
-                float volumeDbAbs = curvesAbs.volIndexToDb(deviceCategory, indexAbs);
-                float volumeDbAbsMax = curvesAbs.volIndexToDb(deviceCategory,
-                                                              curvesAbs.getVolumeIndexMax());
-                float newVolumeDb = fminf(volumeDb + volumeDbAbsMax - volumeDbAbs, volumeDbMax);
-                ALOGV("%s: abs vol stream %d with attenuation %f is adjusting stream %d from "
-                      "attenuation %f to attenuation %f %f", __func__, vsToDriveAbs, volumeDbAbs,
-                      volumeSource, volumeDb, newVolumeDb, volumeDbMax);
-                return newVolumeDb;
-            }
+    const auto it = mAbsoluteVolumeDrivingStreams.find(volumeDevice);
+    if (it != mAbsoluteVolumeDrivingStreams.end()) {
+        audio_attributes_t attributesToDriveAbs = it->second;
+        auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
+        if (groupToDriveAbs == VOLUME_GROUP_NONE) {
+            ALOGD("%s: no group matching with %s", __FUNCTION__,
+                  toString(attributesToDriveAbs).c_str());
+            return volumeDb;
         }
-        return volumeDb;
-    } else {
-        return volumeDb;
+
+        float volumeDbMax = curves.volIndexToDb(deviceCategory, curves.getVolumeIndexMax());
+        VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
+        if (vsToDriveAbs == volumeSource) {
+            // attenuation is applied by the abs volume controller
+            // do not mute LE broadcast to allow the secondary device to continue playing
+            return (index != 0 || volumeDevice == AUDIO_DEVICE_OUT_BLE_BROADCAST) ? volumeDbMax
+                                                                                  : volumeDb;
+        } else {
+            IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
+            int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
+            float volumeDbAbs = curvesAbs.volIndexToDb(deviceCategory, indexAbs);
+            float volumeDbAbsMax = curvesAbs.volIndexToDb(deviceCategory,
+                                                          curvesAbs.getVolumeIndexMax());
+            float newVolumeDb = fminf(volumeDb + volumeDbAbsMax - volumeDbAbs, volumeDbMax);
+            ALOGV("%s: abs vol stream %d with attenuation %f is adjusting stream %d from "
+                  "attenuation %f to attenuation %f %f", __func__, vsToDriveAbs, volumeDbAbs,
+                  volumeSource, volumeDb, newVolumeDb, volumeDbMax);
+            return newVolumeDb;
+        }
     }
+    return volumeDb;
 }
 
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
@@ -9309,10 +9370,7 @@
 
 status_t AudioPolicyManager::getInputDevicesForAttributes(
         const audio_attributes_t &attr, DeviceVector &devices) {
-    devices = DeviceVector(
-            mEngine->getInputDeviceForAttributes(attr, 0 /*uid unknown here*/,
-                                                 AUDIO_SESSION_NONE,
-                                                 nullptr /* mix */));
+    devices = DeviceVector(mEngine->getInputDeviceForAttributes(attr));
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 44863ee..da60c06 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -101,7 +101,8 @@
 
         // AudioPolicyInterface
         virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
-                const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
+                const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat,
+                bool deviceSwitch);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                   const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
@@ -1240,14 +1241,14 @@
         // Called by setDeviceConnectionState().
         status_t setDeviceConnectionStateInt(audio_policy_dev_state_t state,
                                              const android::media::audio::common::AudioPort& port,
-                                             audio_format_t encodedFormat);
+                                             audio_format_t encodedFormat, bool deviceSwitch);
         status_t setDeviceConnectionStateInt(audio_devices_t deviceType,
                                              audio_policy_dev_state_t state,
                                              const char *device_address,
                                              const char *device_name,
-                                             audio_format_t encodedFormat);
+                                             audio_format_t encodedFormat, bool deviceSwitch = false);
         status_t setDeviceConnectionStateInt(const sp<DeviceDescriptor> &device,
-                                             audio_policy_dev_state_t state);
+                                             audio_policy_dev_state_t state, bool deviceSwitch);
 
         void setEngineDeviceConnectionState(const sp<DeviceDescriptor> device,
                                       audio_policy_dev_state_t state);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 21803e0..4089900 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -19,7 +19,6 @@
 
 #include "AudioPolicyService.h"
 #include "AudioRecordClient.h"
-#include "TypeConverter.h"
 
 #include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
@@ -29,6 +28,7 @@
 #include <cutils/properties.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversion.h>
+#include <media/AudioPermissionPolicy.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioValidator.h>
 #include <media/MediaMetricsItem.h>
@@ -57,7 +57,6 @@
 #define MAX_ITEMS_PER_LIST 1024
 
 namespace android {
-namespace audiopolicy_flags = android::media::audiopolicy;
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
 using android::media::audio::concurrent_audio_record_bypass_permission;
@@ -94,6 +93,7 @@
 using media::audio::common::AudioUsage;
 using media::audio::common::AudioUuid;
 using media::audio::common::Int;
+using media::permission::isSystemUsage;
 
 constexpr int kDefaultVirtualDeviceId = 0;
 namespace {
@@ -128,19 +128,6 @@
 
 }
 
-const std::vector<audio_usage_t>& SYSTEM_USAGES = {
-    AUDIO_USAGE_CALL_ASSISTANT,
-    AUDIO_USAGE_EMERGENCY,
-    AUDIO_USAGE_SAFETY,
-    AUDIO_USAGE_VEHICLE_STATUS,
-    AUDIO_USAGE_ANNOUNCEMENT
-};
-
-bool isSystemUsage(audio_usage_t usage) {
-    return std::find(std::begin(SYSTEM_USAGES), std::end(SYSTEM_USAGES), usage)
-        != std::end(SYSTEM_USAGES);
-}
-
 bool AudioPolicyService::isSupportedSystemUsage(audio_usage_t usage) {
     return std::find(std::begin(mSupportedSystemUsages), std::end(mSupportedSystemUsages), usage)
         != std::end(mSupportedSystemUsages);
@@ -194,7 +181,8 @@
 Status AudioPolicyService::setDeviceConnectionState(
         media::AudioPolicyDeviceState stateAidl,
         const android::media::audio::common::AudioPort& port,
-        const AudioFormatDescription& encodedFormatAidl) {
+        const AudioFormatDescription& encodedFormatAidl,
+        bool deviceSwitch) {
     audio_policy_dev_state_t state = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(stateAidl));
     audio_format_t encodedFormat = VALUE_OR_RETURN_BINDER_STATUS(
@@ -217,7 +205,7 @@
     audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->setDeviceConnectionState(
-            state, port, encodedFormat);
+            state, port, encodedFormat, deviceSwitch);
     if (status == NO_ERROR) {
         onCheckSpatializer_l();
     }
@@ -2917,4 +2905,9 @@
             mAudioPolicyManager->getMmapPolicyForDevice(policyType, policyInfo));
 }
 
+Status AudioPolicyService::setEnableHardening(bool shouldEnable) {
+    mShouldEnableHardening.store(shouldEnable);
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 4c506e8..663e0d6 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -59,6 +59,10 @@
 
 static const nsecs_t kAudioCommandTimeoutNs = seconds(3); // 3 seconds
 
+// longer timeout for create audio patch to account for specific scenarii
+// with Bluetooth devices
+static const nsecs_t kPatchAudioCommandTimeoutNs = seconds(4); // 4 seconds
+
 static const String16 sManageAudioPolicyPermission("android.permission.MANAGE_AUDIO_POLICY");
 
 namespace {
@@ -2435,7 +2439,9 @@
     }
     audio_utils::unique_lock ul(command->mMutex);
     while (command->mWaitStatus) {
-        nsecs_t timeOutNs = kAudioCommandTimeoutNs + milliseconds(delayMs);
+        nsecs_t timeOutNs = (command->mCommand == CREATE_AUDIO_PATCH ?
+                kPatchAudioCommandTimeoutNs : kAudioCommandTimeoutNs) + milliseconds(delayMs);
+
         if (command->mCond.wait_for(
                 ul, std::chrono::nanoseconds(timeOutNs), getTid()) == std::cv_status::timeout) {
             command->mStatus = TIMED_OUT;
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index acd9fe9..935117d 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -100,7 +100,8 @@
     binder::Status setDeviceConnectionState(
             media::AudioPolicyDeviceState state,
             const android::media::audio::common::AudioPort& port,
-            const AudioFormatDescription& encodedFormat) override;
+            const AudioFormatDescription& encodedFormat,
+            bool deviceSwitch) override;
     binder::Status getDeviceConnectionState(const AudioDevice& device,
                                             media::AudioPolicyDeviceState* _aidl_return) override;
     binder::Status handleDeviceConfigChange(
@@ -337,10 +338,13 @@
             AudioMMapPolicyType policyType,
             AudioMMapPolicyInfo* policyInfo) override;
 
+    binder::Status setEnableHardening(bool shouldEnable) override;
+
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
     // -- IAudioPolicyLocal methods
     const IPermissionProvider& getPermissionProvider() const override;
+    bool isHardeningOverrideEnabled() const override { return mShouldEnableHardening.load(); };
 
     // IBinder::DeathRecipient
     virtual     void        binderDied(const wp<IBinder>& who);
@@ -1139,6 +1143,7 @@
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
     std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
     const sp<NativePermissionController> mPermissionController;
+    std::atomic<bool> mShouldEnableHardening;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index 79a7458..1fa2eb8 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -50,7 +50,7 @@
         if (pm != nullptr) {
             const auto status = pm->getTargetSdkVersionForPackage(
                     String16{packageName.data(), packageName.size()}, &targetSdk);
-            return status.isOk() ? targetSdk : -1;
+            return status.isOk() ? targetSdk : __ANDROID_API_FUTURE__;
         }
     }
     return targetSdk;
@@ -205,15 +205,16 @@
         const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
 { }
 
-void OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
-            const String16& packageName __unused) {
+binder::Status OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op, int32_t,
+            const String16&, const String16&) {
     sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
     if (monitor != NULL) {
         if (op != monitor->getOp()) {
-            return;
+            return binder::Status::ok();
         }
         monitor->checkOp(true);
     }
+    return binder::Status::ok();
 }
 
 }  // namespace android::media::audiopolicy
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index 6037a8d..433c15a 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -54,10 +54,11 @@
 
     AppOpsManager mAppOpsManager;
 
-    class RecordAudioOpCallback : public BnAppOpsCallback {
+    class RecordAudioOpCallback : public com::android::internal::app::BnAppOpsCallback {
     public:
         explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
-        void opChanged(int32_t op, const String16& packageName) override;
+        binder::Status opChanged(int32_t op, int32_t uid, const String16& packageName,
+                                 const String16& persistentDeviceId) override;
 
     private:
         const wp<OpRecordAudioMonitor> mMonitor;
diff --git a/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h b/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
index 6776ff9..0b434d5 100644
--- a/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
+++ b/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
@@ -26,6 +26,8 @@
     virtual const ::com::android::media::permission::IPermissionProvider&
     getPermissionProvider() const = 0;
 
+    virtual bool isHardeningOverrideEnabled() const = 0;
+
     virtual ~IAudioPolicyServiceLocal() = default;
 };
 
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index 70a3022..e2ed3d7 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -116,11 +116,11 @@
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
         ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
-                AUDIO_POLICY_DEVICE_STATE_AVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT));
+                AUDIO_POLICY_DEVICE_STATE_AVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT, false));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
         ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
-                AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT));
+                AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT, false));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
     }
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 2eccebf..e85d4e2 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -473,7 +473,7 @@
 class AudioPolicyManagerTestWithDefaultEngineConfig : public AudioPolicyManagerTest {
   protected:
     // The APM will use the default engine config from EngineDefaultConfig.h.
-    std::string getEngineConfigFilePath() const override { return ""; }
+    std::string getEngineConfigFilePath() const override { return "non_existent_file.xml"; }
 };
 
 TEST_F(AudioPolicyManagerTestWithDefaultEngineConfig, BuiltInStrategyIdsAreValid) {
@@ -1348,6 +1348,36 @@
     EXPECT_EQ(expectedChannelMask, requestedChannelMask);
 }
 
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, UpdateConfigFromExactProfile) {
+    const audio_format_t expectedFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const uint32_t expectedSampleRate = 48000;
+    const audio_channel_mask_t expectedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+    const audio_input_flags_t expectedFlags = AUDIO_INPUT_FLAG_FAST;
+    const std::string expectedIOProfile = "mixport_fast_input";
+
+    auto devices = mManager->getAvailableInputDevices();
+    sp<DeviceDescriptor> mic = nullptr;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+            mic = device;
+            break;
+        }
+    }
+    EXPECT_NE(nullptr, mic);
+
+    audio_format_t requestedFormat = AUDIO_FORMAT_PCM_16_BIT;
+    uint32_t requestedSampleRate = 48000;
+    audio_channel_mask_t requestedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+    audio_input_flags_t requestedFlags = AUDIO_INPUT_FLAG_FAST;
+    auto profile = mManager->getInputProfile(
+            mic, requestedSampleRate, requestedFormat, requestedChannelMask, requestedFlags);
+    EXPECT_EQ(expectedIOProfile, profile->getName());
+    EXPECT_EQ(expectedFormat, requestedFormat);
+    EXPECT_EQ(expectedSampleRate, requestedSampleRate);
+    EXPECT_EQ(expectedChannelMask, requestedChannelMask);
+    EXPECT_EQ(expectedFlags, profile->getFlags());
+}
+
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, MatchesMoreInputFlagsWhenPossible) {
     const audio_format_t expectedFormat = AUDIO_FORMAT_PCM_16_BIT;
     const uint32_t expectedSampleRate = 48000;
@@ -2653,7 +2683,7 @@
     const size_t lastConnectedDevicePortCount = mClient->getConnectedDevicePortCount();
     const size_t lastDisconnectedDevicePortCount = mClient->getDisconnectedDevicePortCount();
     EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
-                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE, port, AUDIO_FORMAT_DEFAULT));
+                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE, port, AUDIO_FORMAT_DEFAULT, false));
     EXPECT_EQ(lastConnectedDevicePortCount + 1, mClient->getConnectedDevicePortCount());
     EXPECT_EQ(lastDisconnectedDevicePortCount, mClient->getDisconnectedDevicePortCount());
     const audio_port_v7* devicePort = mClient->getLastConnectedDevicePort();
@@ -4050,6 +4080,296 @@
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
 }
 
+namespace {
+
+class AudioPolicyManagerTestClientVolumeChecker : public AudioPolicyManagerTestClient {
+public:
+    status_t setStreamVolume(audio_stream_type_t stream,
+                             float volume,
+                             bool /*muted*/,
+                             audio_io_handle_t /*output*/,
+                             int /*delayMs*/) override {
+        mLastStreamVolume[stream] = volume;
+        return NO_ERROR;
+    }
+
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& ports, float volume,
+                            bool /*muted*/, audio_io_handle_t /*output*/,
+                            int /*delayMs*/) override {
+        for (const auto& port : ports) {
+            mLastPortVolume[port] = volume;
+        }
+        return NO_ERROR;
+    }
+
+    status_t setVoiceVolume(float volume, int /*delayMs*/) override {
+        mLastVoiceVolume = volume;
+        return NO_ERROR;
+    }
+
+    float getLastPortVolume(audio_port_handle_t port) {
+        return mLastPortVolume[port];
+    }
+
+    float getLastStreamVolume(audio_stream_type_t stream) {
+        return mLastStreamVolume[stream];
+    }
+
+    float getLastVoiceVolume() const {
+        return mLastVoiceVolume;
+    }
+
+private:
+    std::unordered_map<audio_port_handle_t, float> mLastPortVolume;
+    std::unordered_map<audio_stream_type_t, float> mLastStreamVolume;
+    float mLastVoiceVolume;
+};
+
+}  // namespace
+
+class AudioPolicyManagerTestAbsoluteVolume : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    void TearDown() override;
+
+    AudioPolicyManagerTestClientVolumeChecker* mVolumeCheckerClient;
+
+    AudioPolicyManagerTestClient* getClient() override {
+        return mVolumeCheckerClient = new AudioPolicyManagerTestClientVolumeChecker();
+    }
+
+    void setVolumeIndexForAttributesForDrivingStream(bool withPortApi);
+    void setVolumeIndexForAttributesForNonDrivingStream(bool withPortApi);
+    void setVolumeIndexForDtmfAttributesOnSco(bool withPortApi);
+
+    audio_port_handle_t mOutputPortId = AUDIO_PORT_HANDLE_NONE;
+    static constexpr audio_attributes_t sMediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+    static constexpr audio_attributes_t sNotifAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_NOTIFICATION,
+    };
+    static constexpr audio_attributes_t sVoiceCallAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SPEECH,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION,
+    };
+    static constexpr audio_attributes_t sDtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+
+    static constexpr char sDefBtAddress[] = "00:11:22:33:44:55";
+};
+
+void AudioPolicyManagerTestAbsoluteVolume::SetUp() {
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUp());
+
+    mManager->setDeviceAbsoluteVolumeEnabled(AUDIO_DEVICE_OUT_USB_DEVICE, "", /*enabled=*/true,
+                                             AUDIO_STREAM_MUSIC);
+}
+
+void AudioPolicyManagerTestAbsoluteVolume::TearDown() {
+    mManager->setPhoneState(AUDIO_MODE_NORMAL);
+
+    ASSERT_EQ(NO_ERROR, mManager->stopOutput(mOutputPortId));
+    ASSERT_EQ(NO_ERROR, mManager->releaseOutput(mOutputPortId));
+
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::TearDown());
+}
+
+void AudioPolicyManagerTestAbsoluteVolume::setVolumeIndexForAttributesForDrivingStream(
+        bool withPortApi) {
+    DeviceIdVector selectedDeviceIds;
+    audio_io_handle_t mediaOutput = AUDIO_IO_HANDLE_NONE;
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_PCM_16_BIT));
+    ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                                             AUDIO_CHANNEL_OUT_STEREO, 48000,
+                                             AUDIO_OUTPUT_FLAG_NONE,
+                                             &mediaOutput, &mOutputPortId, sMediaAttr));
+    ASSERT_EQ(NO_ERROR, mManager->startOutput(mOutputPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->setVolumeIndexForAttributes(sMediaAttr, /*index=*/1,
+                                                              /*muted=*/false,
+                                                              AUDIO_DEVICE_OUT_USB_DEVICE));
+
+    if (withPortApi) {
+        EXPECT_EQ(1.f, mVolumeCheckerClient->getLastPortVolume(mOutputPortId));
+    } else {
+        EXPECT_EQ(1.f, mVolumeCheckerClient->getLastStreamVolume(AUDIO_STREAM_MUSIC));
+    }
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestAbsoluteVolume,
+                  SetVolumeIndexForAttributesForDrivingStreamWithPortApi,
+                  REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(com::android::media::audioserver,
+                                                      portid_volume_management))) {
+    setVolumeIndexForAttributesForDrivingStream(/*withPortApi=*/true);
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestAbsoluteVolume,
+                  SetVolumeIndexForAttributesForDrivingStreamWithStreamApi,
+                  REQUIRES_FLAGS_DISABLED(ACONFIG_FLAG(com::android::media::audioserver,
+                                                       portid_volume_management))) {
+    setVolumeIndexForAttributesForDrivingStream(/*withPortApi=*/false);
+}
+
+void AudioPolicyManagerTestAbsoluteVolume::setVolumeIndexForAttributesForNonDrivingStream(
+        bool withPortApi) {
+    DeviceIdVector selectedDeviceIds;
+    audio_io_handle_t notifOutput = AUDIO_IO_HANDLE_NONE;
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_PCM_16_BIT));
+    ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                                             AUDIO_CHANNEL_OUT_STEREO, 48000,
+                                             AUDIO_OUTPUT_FLAG_NONE,
+                                             &notifOutput, &mOutputPortId, sNotifAttr));
+    ASSERT_EQ(NO_ERROR, mManager->startOutput(mOutputPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->setVolumeIndexForAttributes(sNotifAttr, /*index=*/1,
+                                                              /*muted=*/false,
+                                                              AUDIO_DEVICE_OUT_USB_DEVICE));
+
+    if (withPortApi) {
+        EXPECT_GT(1.f, mVolumeCheckerClient->getLastPortVolume(mOutputPortId));
+    } else {
+        EXPECT_GT(1.f, mVolumeCheckerClient->getLastStreamVolume(AUDIO_STREAM_NOTIFICATION));
+    }
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestAbsoluteVolume,
+       SetVolumeIndexForAttributesForNonDrivingStreamWithPortApi,
+                  REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(com::android::media::audioserver,
+                                                      portid_volume_management))) {
+    setVolumeIndexForAttributesForNonDrivingStream(/*withPortApi=*/true);
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestAbsoluteVolume,
+                  SetVolumeIndexForAttributesForNonDrivingStreamWithStreamApi,
+                  REQUIRES_FLAGS_DISABLED(ACONFIG_FLAG(com::android::media::audioserver,
+                                                      portid_volume_management))) {
+    setVolumeIndexForAttributesForNonDrivingStream(/*withPortApi=*/false);
+}
+
+TEST_F(AudioPolicyManagerTestAbsoluteVolume, SetVolumeIndexForVoiceCallAttributesNoScoBle) {
+    mManager->setPhoneState(AUDIO_MODE_IN_COMMUNICATION);
+
+    DeviceIdVector selectedDeviceIds;
+    audio_io_handle_t voiceOutput = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                                             AUDIO_CHANNEL_OUT_STEREO, 48000,
+                                             AUDIO_OUTPUT_FLAG_PRIMARY,
+                                             &voiceOutput, &mOutputPortId, sVoiceCallAttr));
+    ASSERT_EQ(NO_ERROR, mManager->startOutput(mOutputPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->setVolumeIndexForAttributes(sVoiceCallAttr, /*index=*/1,
+                                                              /*muted=*/false,
+                                                              AUDIO_DEVICE_OUT_USB_DEVICE));
+
+    // setVoiceVolume is sent with actual value if no sco/ble device is connected
+    EXPECT_GT(1.f, mVolumeCheckerClient->getLastVoiceVolume());
+}
+
+TEST_F(AudioPolicyManagerTestAbsoluteVolume, SetVolumeIndexForVoiceCallAttributesOnSco) {
+    mManager->setPhoneState(AUDIO_MODE_IN_COMMUNICATION);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            sDefBtAddress, "", AUDIO_FORMAT_DEFAULT));
+
+    const AudioDeviceTypeAddr scoOutputDevice(AUDIO_DEVICE_OUT_BLUETOOTH_SCO, sDefBtAddress);
+    const AudioDeviceTypeAddrVector outputDevices = {scoOutputDevice};
+    ASSERT_EQ(NO_ERROR, mManager->setDevicesRoleForStrategy(
+            mManager->getStrategyForStream(AUDIO_STREAM_VOICE_CALL),
+            DEVICE_ROLE_PREFERRED, outputDevices));
+
+    DeviceIdVector selectedDeviceIds;
+    audio_io_handle_t voiceOutput = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                                             AUDIO_CHANNEL_OUT_STEREO, 48000,
+                                             AUDIO_OUTPUT_FLAG_PRIMARY,
+                                             &voiceOutput, &mOutputPortId, sVoiceCallAttr));
+    ASSERT_EQ(NO_ERROR, mManager->startOutput(mOutputPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->setVolumeIndexForAttributes(sVoiceCallAttr, /*index=*/1,
+                                                              /*muted=*/false,
+                                                              AUDIO_DEVICE_OUT_BLUETOOTH_SCO));
+
+    EXPECT_EQ(1.f, mVolumeCheckerClient->getLastVoiceVolume());
+
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           sDefBtAddress, "",
+                                                           AUDIO_FORMAT_DEFAULT));
+    EXPECT_EQ(NO_ERROR, mManager->clearDevicesRoleForStrategy(
+            mManager->getStrategyForStream(AUDIO_STREAM_VOICE_CALL),
+            DEVICE_ROLE_PREFERRED));
+}
+
+void AudioPolicyManagerTestAbsoluteVolume::setVolumeIndexForDtmfAttributesOnSco(bool withPortApi) {
+    mManager->setPhoneState(AUDIO_MODE_IN_COMMUNICATION);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            sDefBtAddress, "", AUDIO_FORMAT_DEFAULT));
+
+    const AudioDeviceTypeAddr scoOutputDevice(AUDIO_DEVICE_OUT_BLUETOOTH_SCO, sDefBtAddress);
+    const AudioDeviceTypeAddrVector outputDevices = {scoOutputDevice};
+    ASSERT_EQ(NO_ERROR, mManager->setDevicesRoleForStrategy(
+            mManager->getStrategyForStream(AUDIO_STREAM_VOICE_CALL),
+            DEVICE_ROLE_PREFERRED, outputDevices));
+
+    DeviceIdVector selectedDeviceIds;
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                                             AUDIO_CHANNEL_OUT_STEREO, 48000,
+                                             AUDIO_OUTPUT_FLAG_PRIMARY,
+                                             &dtmfOutput, &mOutputPortId, sDtmfAttr));
+    ASSERT_EQ(NO_ERROR, mManager->startOutput(mOutputPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->setVolumeIndexForAttributes(sDtmfAttr, /*index=*/1,
+                                                              /*muted=*/false,
+                                                              AUDIO_DEVICE_OUT_BLUETOOTH_SCO));
+
+    if (withPortApi) {
+        EXPECT_EQ(1.f, mVolumeCheckerClient->getLastPortVolume(mOutputPortId));
+    } else {
+        EXPECT_EQ(1.f, mVolumeCheckerClient->getLastStreamVolume(AUDIO_STREAM_DTMF));
+    }
+
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           sDefBtAddress, "",
+                                                           AUDIO_FORMAT_DEFAULT));
+    EXPECT_EQ(NO_ERROR, mManager->clearDevicesRoleForStrategy(
+            mManager->getStrategyForStream(AUDIO_STREAM_VOICE_CALL),
+            DEVICE_ROLE_PREFERRED));
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestAbsoluteVolume,
+                  SetVolumeIndexForDtmfAttributesOnScoWithPortApi,
+                  REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(com::android::media::audioserver,
+                                                      portid_volume_management))) {
+    setVolumeIndexForDtmfAttributesOnSco(/*withPortApi=*/true);
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestAbsoluteVolume,
+                  SetVolumeIndexForDtmfAttributesOnScoWithStreamApi,
+                  REQUIRES_FLAGS_DISABLED(ACONFIG_FLAG(com::android::media::audioserver,
+                                                      portid_volume_management))) {
+    setVolumeIndexForDtmfAttributesOnSco(/*withPortApi=*/false);
+}
+
 class AudioPolicyManagerTestBitPerfectBase : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     void SetUp() override;
@@ -4396,6 +4716,34 @@
 
 TEST_F_WITH_FLAGS(
         AudioPolicyManagerInputPreemptionTest,
+        SameDeviceAndSourceReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+        ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_VOICE_RECOGNITION;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
         LesserPriorityReusesInput,
         REQUIRES_FLAGS_ENABLED(
                 ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 9cb3608..55afbdc 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -113,7 +113,7 @@
                 <route type="mix" sink="Hdmi"
                        sources="primary output"/>
                 <route type="mix" sink="BT SCO"
-                       sources="mixport_bt_hfp_output,voip_rx"/>
+                       sources="mixport_bt_hfp_output,voip_rx,primary output"/>
                 <route type="mix" sink="mixport_bt_hfp_input"
                        sources="BT SCO Headset Mic"/>
                 <route type="mix" sink="BT A2DP Out"
diff --git a/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml b/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml
index efe1400..98299e6 100644
--- a/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml
@@ -43,6 +43,8 @@
                 </mixPort>
                 <mixPort name="compressed_offload" role="source"
                          flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD">
+                    <profile name="" format="AUDIO_FORMAT_PCM_FLOAT"
+                             samplingRates="48000 96000 384000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                     <profile name="" format="AUDIO_FORMAT_MP3"
                              samplingRates="8000 16000 24000 32000 44100 48000 96000"
                              channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 71b7fe5..225e211 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -73,11 +73,11 @@
         "libsensorprivacy",
         "libstagefright",
         "libstagefright_foundation",
+        "libtinyxml2",
         "libvendorsupport",
         "libxml2",
         "libyuv",
         "android.companion.virtual.virtualdevice_aidl-cpp",
-        "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
@@ -143,6 +143,8 @@
         "common/FrameProcessorBase.cpp",
         "common/hidl/HidlProviderInfo.cpp",
         "common/aidl/AidlProviderInfo.cpp",
+        "config/SharedSessionConfigUtils.cpp",
+        "config/SharedSessionConfigReader.cpp",
         "api1/Camera2Client.cpp",
         "api1/client2/Parameters.cpp",
         "api1/client2/FrameProcessor.cpp",
@@ -222,6 +224,7 @@
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
+        "libtinyxml2",
     ],
 
     include_dirs: [
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a975da2..874d79b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -42,7 +42,6 @@
 #include <aidl/AidlCameraService.h>
 #include <android-base/macros.h>
 #include <android-base/parseint.h>
-#include <android_companion_virtualdevice_flags.h>
 #include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
@@ -101,16 +100,7 @@
     const char* kVirtualDeviceFrontCameraId = "1";
     const char* kUnknownPackageName = "<unknown>";
 
-    int32_t getDeviceId(const android::CameraMetadata& cameraInfo) {
-        if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
-            return android::kDefaultDeviceId;
-        }
-
-        const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
-        return deviceIdEntry.data.i32[0];
-    }
-
-    static android::PermissionChecker::PermissionResult appOpModeToPermissionResult(int32_t res) {
+    android::PermissionChecker::PermissionResult appOpModeToPermissionResult(int32_t res) {
         switch (res) {
             case android::AppOpsManager::MODE_ERRORED:
                 return android::PermissionChecker::PERMISSION_HARD_DENIED;
@@ -143,7 +133,6 @@
 using hardware::camera2::utils::ConcurrentCameraIdCombination;
 
 namespace flags = com::android::internal::camera::flags;
-namespace vd_flags = android::companion::virtualdevice::flags;
 
 // ----------------------------------------------------------------------------
 // Logging support -- this is for debugging only
@@ -378,23 +367,21 @@
         const std::vector<std::string> &normalDeviceIds) {
     mNormalDeviceIdsWithoutSystemCamera.clear();
     for (auto &cameraId : normalDeviceIds) {
-        if (vd_flags::camera_device_awareness()) {
-            CameraMetadata cameraInfo;
-            status_t res = mCameraProviderManager->getCameraCharacteristics(
-                    cameraId, false, &cameraInfo,
-                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
-            int32_t deviceId = kDefaultDeviceId;
-            if (res != OK) {
-                ALOGW("%s: Not able to get camera characteristics for camera id %s",
-                      __FUNCTION__, cameraId.c_str());
-            } else {
-                deviceId = getDeviceId(cameraInfo);
-            }
-            // Cameras associated with non-default device id's (i.e., virtual cameras) can never be
-            // system cameras, so skip for non-default device id's.
-            if (deviceId != kDefaultDeviceId) {
-                continue;
-            }
+        CameraMetadata cameraInfo;
+        status_t res = mCameraProviderManager->getCameraCharacteristics(
+                cameraId, false, &cameraInfo,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+        int32_t deviceId = kDefaultDeviceId;
+        if (res != OK) {
+            ALOGW("%s: Not able to get camera characteristics for camera id %s",
+                  __FUNCTION__, cameraId.c_str());
+        } else {
+            deviceId = getDeviceId(cameraInfo);
+        }
+        // Cameras associated with non-default device id's (i.e., virtual cameras) can never be
+        // system cameras, so skip for non-default device id's.
+        if (deviceId != kDefaultDeviceId) {
+            continue;
         }
 
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
@@ -813,7 +800,7 @@
         const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         int32_t* numCameras) {
     ATRACE_CALL();
-    if (vd_flags::camera_device_awareness() && (clientAttribution.deviceId != kDefaultDeviceId)
+    if (clientAttribution.deviceId != kDefaultDeviceId
             && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
         *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(clientAttribution.deviceId);
         return Status::ok();
@@ -1276,7 +1263,7 @@
 
 std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt,
         int32_t deviceId, int32_t devicePolicy) {
-    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+    if (deviceId != kDefaultDeviceId
             && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
         std::optional<std::string> cameraIdOptional =
                 mVirtualDeviceCameraIdMapper.getActualCameraId(cameraIdInt, deviceId);
@@ -3234,6 +3221,56 @@
     }
 }
 
+void CameraService::updateSharedClientAccessPriorities(std::vector<int> sharedClientPids) {
+    Mutex::Autolock lock(mServiceLock);
+    if (!flags::camera_multi_client() || sharedClientPids.empty()) {
+        return;
+    }
+    std::vector<int> scores(sharedClientPids.size());
+    std::vector<int> states(sharedClientPids.size());
+    status_t err = ProcessInfoService::getProcessStatesScoresFromPids(sharedClientPids.size(),
+                &sharedClientPids[0], /*out*/&states[0], /*out*/&scores[0]);
+    if (err != OK) {
+        return;
+    }
+    for (size_t i = 0; i < sharedClientPids.size(); i++) {
+        auto clientDescriptorPtr = mActiveClientManager.getSharedClient(sharedClientPids[i]);
+        if (clientDescriptorPtr == nullptr) {
+            continue;
+        }
+        const auto& clientPriority = clientDescriptorPtr->getPriority();
+        int score = clientPriority.getScore();
+        int state = clientPriority.getState();
+        if ((score != scores[i])  || (state != states[i])){
+            clientDescriptorPtr->setPriority(resource_policy::ClientPriority(scores[i], states[i],
+                    false, 0));
+            notifySharedClientPrioritiesChanged(clientDescriptorPtr->getKey());
+        }
+    }
+}
+
+void CameraService::notifySharedClientPrioritiesChanged(const std::string& cameraId) {
+    if (!flags::camera_multi_client()) {
+        return;
+    }
+    auto primaryClientDesc = mActiveClientManager.getPrimaryClient(cameraId);
+    if (primaryClientDesc == nullptr) {
+        return;
+    }
+    auto primaryClient = primaryClientDesc->getValue();
+    if (primaryClient == nullptr) {
+        return;
+    }
+    auto highestPriorityClient = mActiveClientManager.getHighestPrioritySharedClient(cameraId);
+    if (highestPriorityClient == primaryClient) {
+        return;
+    }
+    highestPriorityClient->setPrimaryClient(true);
+    highestPriorityClient->notifyClientSharedAccessPriorityChanged(true);
+    primaryClient->setPrimaryClient(false);
+    primaryClient->notifyClientSharedAccessPriorityChanged(false);
+}
+
 Status CameraService::notifyDeviceStateChange(int64_t newState) {
     const int pid = getCallingPid();
     const int selfPid = getpid();
@@ -3599,71 +3636,6 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
-        /*out*/ bool *isSupported) {
-    ATRACE_CALL();
-
-    ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
-
-    switch (apiVersion) {
-        case API_VERSION_1:
-        case API_VERSION_2:
-            break;
-        default:
-            std::string msg = fmt::sprintf("Unknown API version %d", apiVersion);
-            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-    }
-
-    int portraitRotation;
-    auto deviceVersionAndTransport =
-            getDeviceVersion(cameraId,
-                    /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                    &portraitRotation);
-    if (deviceVersionAndTransport.first == -1) {
-        std::string msg = fmt::sprintf("Unknown camera ID %s", cameraId.c_str());
-        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-    }
-    if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
-        int deviceVersion = deviceVersionAndTransport.first;
-        switch (deviceVersion) {
-            case CAMERA_DEVICE_API_VERSION_1_0:
-            case CAMERA_DEVICE_API_VERSION_3_0:
-            case CAMERA_DEVICE_API_VERSION_3_1:
-                if (apiVersion == API_VERSION_2) {
-                    ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without "
-                            "shim", __FUNCTION__, cameraId.c_str(), deviceVersion);
-                    *isSupported = false;
-                } else { // if (apiVersion == API_VERSION_1) {
-                    ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always "
-                            "supported", __FUNCTION__, cameraId.c_str());
-                    *isSupported = true;
-                }
-                break;
-            case CAMERA_DEVICE_API_VERSION_3_2:
-            case CAMERA_DEVICE_API_VERSION_3_3:
-            case CAMERA_DEVICE_API_VERSION_3_4:
-            case CAMERA_DEVICE_API_VERSION_3_5:
-            case CAMERA_DEVICE_API_VERSION_3_6:
-            case CAMERA_DEVICE_API_VERSION_3_7:
-                ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
-                        __FUNCTION__, cameraId.c_str());
-                *isSupported = true;
-                break;
-            default: {
-                std::string msg = fmt::sprintf("Unknown device version %x for device %s",
-                        deviceVersion, cameraId.c_str());
-                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.c_str());
-            }
-        }
-    } else {
-        *isSupported = true;
-    }
-    return Status::ok();
-}
-
 Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
@@ -4435,6 +4407,10 @@
 
     sCameraService->mUidPolicy->registerMonitorUid(getClientUid(), /*openCamera*/ true);
 
+    if (flags::camera_multi_client() && mSharedMode) {
+        sCameraService->mUidPolicy->addSharedClientPid(getClientUid(), getClientCallingPid());
+    }
+
     // Notify listeners of camera open/close status
     sCameraService->updateOpenCloseStatus(mCameraIdStr, true /*open*/, getPackageName(),
             mSharedMode);
@@ -4599,6 +4575,10 @@
 
     sCameraService->mUidPolicy->unregisterMonitorUid(getClientUid(), /*closeCamera*/ true);
 
+    if (flags::camera_multi_client() && mSharedMode) {
+        sCameraService->mUidPolicy->removeSharedClientPid(getClientUid(), getClientCallingPid());
+    }
+
     // Notify listeners of camera open/close status
     sCameraService->updateOpenCloseStatus(mCameraIdStr, false /*open*/, getPackageName(),
             mSharedMode);
@@ -4637,21 +4617,21 @@
                             AppOpsManager::OP_CAMERA, attr.uid,
                             toString16(attr.packageName.value_or(""))));
                 });
-        ALOGV("checkOp returns: %d, %s ", res,
+        res = appOpModeToPermissionResult(appOpMode);
+        ALOGV("checkOp returns: %d, %s ", appOpMode,
               appOpMode == AppOpsManager::MODE_ALLOWED   ? "ALLOWED"
               : appOpMode == AppOpsManager::MODE_IGNORED ? "IGNORED"
               : appOpMode == AppOpsManager::MODE_ERRORED ? "ERRORED"
                                                          : "UNKNOWN");
-        res = appOpModeToPermissionResult(appOpMode);
     } else {
         int32_t appOpMode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, getClientUid(),
                                                     toString16(getPackageName()));
-        ALOGV("checkOp returns: %d, %s ", res,
+        res = appOpModeToPermissionResult(appOpMode);
+        ALOGV("checkOp returns: %d, %s ", appOpMode,
               appOpMode == AppOpsManager::MODE_ALLOWED   ? "ALLOWED"
               : appOpMode == AppOpsManager::MODE_IGNORED ? "IGNORED"
               : appOpMode == AppOpsManager::MODE_ERRORED ? "ERRORED"
                                                          : "UNKNOWN");
-        res = appOpModeToPermissionResult(appOpMode);
     }
 
     if (res == PermissionChecker::PERMISSION_HARD_DENIED) {
@@ -4674,8 +4654,6 @@
                           uid = static_cast<uid_t>(attr.uid);
                       });
             procState = getUidProcessState(uid);
-        } else if (flags::query_process_state()) {
-            procState = getUidProcessState(getClientUid());
         } else {
             procState = sCameraService->mUidPolicy->getProcState(getClientUid());
         }
@@ -4727,7 +4705,6 @@
     }
 
     if (!mSharedMode) {
-        ALOGW("%s: Invalid operation when camera is not opened in shared mode", __FUNCTION__);
         return INVALID_OPERATION;
     }
     *isPrimary = mIsPrimaryClient;
@@ -4742,7 +4719,6 @@
     }
 
     if (!mSharedMode) {
-        ALOGW("%s:Invalid operation when camera is not opened in shared mode", __FUNCTION__);
         return INVALID_OPERATION;
     }
     mIsPrimaryClient = isPrimary;
@@ -4778,12 +4754,13 @@
         mClient(client) {
 }
 
-void CameraService::Client::OpsCallback::opChanged(int32_t op,
-        const String16& packageName) {
+binder::Status CameraService::Client::OpsCallback::opChanged(int32_t op, int32_t,
+        const String16& packageName, const String16&) {
     sp<BasicClient> client = mClient.promote();
     if (client != NULL) {
         client->opChanged(op, packageName);
     }
+    return binder::Status::ok();
 }
 
 // ----------------------------------------------------------------------------
@@ -4869,21 +4846,35 @@
 void CameraService::UidPolicy::onUidStateChanged(uid_t uid, int32_t procState,
         int64_t procStateSeq __unused, int32_t capability __unused) {
     bool procStateChange = false;
+    std::vector<int> sharedPids;
     {
         Mutex::Autolock _l(mUidLock);
-        if (mMonitoredUids.find(uid) != mMonitoredUids.end() &&
-                mMonitoredUids[uid].procState != procState) {
-            mMonitoredUids[uid].procState = procState;
-            procStateChange = true;
+        if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
+            if (mMonitoredUids[uid].procState != procState) {
+                mMonitoredUids[uid].procState = procState;
+                procStateChange = true;
+            }
+            if (flags::camera_multi_client()) {
+                std::unordered_set<int> sharedClientPids = mMonitoredUids[uid].sharedClientPids;
+                if (!sharedClientPids.empty()) {
+                  sharedPids.assign(sharedClientPids.begin(), sharedClientPids.end());
+                }
+            }
         }
     }
 
+    sp<CameraService> service = mService.promote();
     if (procStateChange) {
-        sp<CameraService> service = mService.promote();
         if (service != nullptr) {
             service->notifyMonitoredUids();
         }
     }
+
+    if (flags::camera_multi_client() && !sharedPids.empty()) {
+        if (service != nullptr) {
+            service->updateSharedClientAccessPriorities(sharedPids);
+        }
+    }
 }
 
 /**
@@ -4896,6 +4887,7 @@
  */
 void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid, int32_t adj) {
     std::unordered_set<uid_t> notifyUidSet;
+    std::vector<int> sharedPids;
     {
         Mutex::Autolock _l(mUidLock);
         auto it = mMonitoredUids.find(uid);
@@ -4919,15 +4911,28 @@
                 }
             }
             it->second.procAdj = adj;
+            if (flags::camera_multi_client()) {
+                std::unordered_set<int> sharedClientPids = it->second.sharedClientPids;
+                if (!sharedClientPids.empty()) {
+                    sharedPids.assign(sharedClientPids.begin(), sharedClientPids.end());
+                }
+            }
         }
     }
 
+    sp<CameraService> service = mService.promote();
+
     if (notifyUidSet.size() > 0) {
-        sp<CameraService> service = mService.promote();
         if (service != nullptr) {
             service->notifyMonitoredUids(notifyUidSet);
         }
     }
+
+    if (flags::camera_multi_client() && !sharedPids.empty()) {
+        if (service != nullptr) {
+            service->updateSharedClientAccessPriorities(sharedPids);
+        }
+    }
 }
 
 /**
@@ -5061,6 +5066,20 @@
     updateOverrideUid(uid, callingPackage, false, false);
 }
 
+void CameraService::UidPolicy::addSharedClientPid(uid_t uid, int pid) {
+    Mutex::Autolock _l(mUidLock);
+    if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
+        mMonitoredUids[uid].sharedClientPids.insert(pid);
+    }
+}
+
+void CameraService::UidPolicy::removeSharedClientPid(uid_t uid, int pid) {
+    Mutex::Autolock _l(mUidLock);
+    if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
+        mMonitoredUids[uid].sharedClientPids.erase(pid);
+    }
+}
+
 void CameraService::UidPolicy::binderDied(const wp<IBinder>& /*who*/) {
     Mutex::Autolock _l(mUidLock);
     ALOGV("UidPolicy: ActivityManager has died");
@@ -5367,6 +5386,33 @@
     return descriptor->getValue();
 }
 
+sp<CameraService::BasicClient> CameraService::CameraClientManager::getHighestPrioritySharedClient(
+        const std::string& id) const {
+    if (!flags::camera_multi_client()) {
+        return sp<BasicClient>{nullptr};
+    }
+    auto clientDescriptor = get(id);
+    if (clientDescriptor == nullptr) {
+        ALOGV("CameraService::CameraClientManager::no other clients are using same camera");
+        return sp<BasicClient>{nullptr};
+    }
+    if (!clientDescriptor->getSharedMode()) {
+        return sp<BasicClient>{nullptr};
+    }
+    resource_policy::ClientPriority highestPriority = clientDescriptor->getPriority();
+    sp<BasicClient> highestPriorityClient = clientDescriptor->getValue();
+    if (highestPriorityClient.get() == nullptr) {
+        return sp<BasicClient>{nullptr};
+    }
+    for (auto& i : getAll()) {
+        if ((i->getKey() == id) && (i->getSharedMode()) && (i->getPriority() < highestPriority)) {
+            highestPriority = i->getPriority();
+            highestPriorityClient = i->getValue();
+        }
+    }
+    return highestPriorityClient;
+}
+
 void CameraService::CameraClientManager::remove(const CameraService::DescriptorPtr& value) {
     ClientManager::remove(value);
     if (!flags::camera_multi_client()) {
@@ -5374,29 +5420,17 @@
     }
     auto clientToRemove = value->getValue();
     if ((clientToRemove.get() != nullptr) && clientToRemove->mSharedMode) {
-      bool primaryClient = false;
-      status_t ret = clientToRemove->isPrimaryClient(&primaryClient);
-      if ((ret == OK) && primaryClient) {
+        bool primaryClient = false;
+        status_t ret = clientToRemove->isPrimaryClient(&primaryClient);
+        if ((ret == OK) && primaryClient) {
             // Primary client is being removed. Find the next higher priority
             // client to become primary client.
-            auto clientDescriptor = get(value->getKey());
-            if (clientDescriptor == nullptr) {
-                ALOGV("CameraService::CameraClientManager::no other clients are using same camera");
-                return;
-            }
-            resource_policy::ClientPriority highestPriority = clientDescriptor->getPriority();
-            sp<BasicClient> highestPriorityClient = clientDescriptor->getValue();
-            if (highestPriorityClient.get() != nullptr) {
-                for (auto& i : getAll()) {
-                    if ((i->getKey() == value->getKey()) && (i->getPriority() < highestPriority)) {
-                        highestPriority = i->getPriority();
-                        highestPriorityClient = i->getValue();
-                    }
-                }
+            auto highestPriorityClient = getHighestPrioritySharedClient(value->getKey());
+            if (highestPriorityClient != nullptr) {
                 highestPriorityClient->setPrimaryClient(true);
                 highestPriorityClient->notifyClientSharedAccessPriorityChanged(true);
             }
-       }
+        }
     }
 }
 
@@ -5911,7 +5945,7 @@
         return;
     }
 
-    if (vd_flags::camera_device_awareness() && status == StatusInternal::PRESENT) {
+    if (status == StatusInternal::PRESENT) {
         CameraMetadata cameraInfo;
         status_t res = mCameraProviderManager->getCameraCharacteristics(
                 cameraId, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index c4d2d67..55542b7 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -28,7 +28,6 @@
 #include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
 #include <binder/IActivityManager.h>
-#include <binder/IAppOpsCallback.h>
 #include <binder/IServiceManager.h>
 #include <binder/IUidObserver.h>
 #include <cutils/multiuser.h>
@@ -225,12 +224,6 @@
 
     virtual binder::Status    notifyDisplayConfigurationChange();
 
-    // OK = supports api of that version, -EOPNOTSUPP = does not support
-    virtual binder::Status    supportsCameraApi(
-            const std::string& cameraId, int32_t apiVersion,
-            /*out*/
-            bool *isSupported);
-
     virtual binder::Status    isHiddenPhysicalCamera(
             const std::string& cameraId,
             /*out*/
@@ -523,10 +516,11 @@
 
         std::unique_ptr<AppOpsManager>  mAppOpsManager = nullptr;
 
-        class OpsCallback : public BnAppOpsCallback {
+        class OpsCallback : public com::android::internal::app::BnAppOpsCallback {
         public:
             explicit OpsCallback(wp<BasicClient> client);
-            virtual void opChanged(int32_t op, const String16& packageName);
+            virtual binder::Status opChanged(int32_t op, int32_t uid,
+                                   const String16& packageName, const String16& persistentDeviceId);
 
         private:
             wp<BasicClient> mClient;
@@ -651,6 +645,12 @@
         sp<CameraService::BasicClient> getCameraClient(const std::string& id) const;
 
         /**
+         * Return a strong pointer to the highest priority client among all the clients which
+         * have opened this camera ID in shared mode, or empty if none exists.
+         */
+        sp<CameraService::BasicClient> getHighestPrioritySharedClient(const std::string& id) const;
+
+        /**
          * Return a string describing the current state.
          */
         std::string toString() const;
@@ -858,6 +858,9 @@
         void registerMonitorUid(uid_t uid, bool openCamera);
         void unregisterMonitorUid(uid_t uid, bool closeCamera);
 
+        void addSharedClientPid(uid_t uid, int pid);
+        void removeSharedClientPid(uid_t uid, int pid);
+
         // Implementation of IServiceManager::LocalRegistrationCallback
         virtual void onServiceRegistration(const String16& name,
                         const sp<IBinder>& binder) override;
@@ -875,6 +878,9 @@
             int32_t procAdj;
             bool hasCamera;
             size_t refCount;
+            // This field is only valid when camera has been opened in shared mode, to adjust the
+            // priority of active clients based on the latest process score and state.
+            std::unordered_set<int> sharedClientPids;
         };
 
         Mutex mUidLock;
@@ -1109,6 +1115,18 @@
             int oomScoreOffset, bool systemNativeClient);
 
     /**
+     * When multiple clients open the camera in shared mode, adjust the priority of active clients
+     * based on the latest process score and state.
+     */
+    void updateSharedClientAccessPriorities(std::vector<int> sharedClientPids);
+
+    /**
+     * Update all clients on any changes in the primary or secondary client status if the priority
+     * of any client changes when multiple clients are sharing a camera.
+     */
+    void notifySharedClientPrioritiesChanged(const std::string& cameraId);
+
+    /**
      * Returns the underlying camera Id string mapped to a camera id int
      * Empty string is returned when the cameraIdInt is invalid.
      */
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index ad1a84f..f86956c 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -45,7 +45,13 @@
 
             mTidMap[currentThreadId].cycles++;
 
-            if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
+            // If we are at 3/4 of reaching timeout, trigger a stack trace dump
+            // for all relevant processes by CameraServiceProxy.
+            if (mTidMap[currentThreadId].cycles == mMaxCycles * 3 / 4) {
+                ALOGW("CameraServiceWatchdog pre-watchdog for pid: %d tid: %d, clientPid %d",
+                        getpid(), currentThreadId, mClientPid);
+                mCameraServiceProxyWrapper->notifyWatchdog(mClientPid, mIsNativePid);
+            } else if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
                 std::string abortMessage = getAbortMessage(mTidMap[currentThreadId].functionName);
                 android_set_abort_message(abortMessage.c_str());
                 ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index 691a274..12a643c 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -41,8 +41,6 @@
 
 // Used to wrap the call of interest in start and stop calls
 #define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__)
-#define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
-        watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
 
 // Default cycles and cycle length values used to calculate permitted elapsed time
 const static size_t   kMaxCycles     = 650;
@@ -59,19 +57,15 @@
 
 public:
 
-    explicit CameraServiceWatchdog(const std::set<pid_t> &pids, const std::string &cameraId,
+    explicit CameraServiceWatchdog(
+            const std::set<pid_t> &pids, pid_t clientPid,
+            bool isNativePid, const std::string &cameraId,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
-                    mProviderPids(pids), mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
+                    mProviderPids(pids), mClientPid(clientPid), mIsNativePid(isNativePid),
+                    mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
                     mCycleLengthMs(kCycleLengthMs), mEnabled(true),
                     mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
 
-    explicit CameraServiceWatchdog (const std::set<pid_t> &pids, const std::string &cameraId,
-            size_t maxCycles, uint32_t cycleLengthMs, bool enabled,
-            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
-                    mProviderPids(pids), mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
-                    mCycleLengthMs(cycleLengthMs), mEnabled(enabled),
-                    mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
-
     virtual ~CameraServiceWatchdog() {};
 
     virtual void requestExit();
@@ -79,42 +73,6 @@
     /** Enables/disables the watchdog */
     void setEnabled(bool enable);
 
-    /** Used to wrap monitored calls in start and stop functions using custom timer values */
-    template<typename T>
-    auto watchThread(T func, uint32_t tid, const char* functionName, uint32_t cycles,
-            uint32_t cycleLength) {
-        decltype(func()) res;
-
-        if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
-            // Create another instance of the watchdog to prevent disruption
-            // of timer for current monitored calls
-
-            // Lock for mEnabled
-            mEnabledLock.lock();
-            sp<CameraServiceWatchdog> tempWatchdog = new CameraServiceWatchdog(
-                    mProviderPids, mCameraId, cycles, cycleLength, mEnabled,
-                    mCameraServiceProxyWrapper);
-            mEnabledLock.unlock();
-
-            status_t status = tempWatchdog->run("CameraServiceWatchdog");
-            if (status != OK) {
-                ALOGE("Unable to watch thread: %s (%d)", strerror(-status), status);
-                res = watchThread(func, tid, functionName);
-                return res;
-            }
-
-            res = tempWatchdog->watchThread(func, tid, functionName);
-            tempWatchdog->requestExit();
-            tempWatchdog.clear();
-        } else {
-            // If custom timer values are equivalent to set class timer values, use
-            // current thread
-            res = watchThread(func, tid, functionName);
-        }
-
-        return res;
-    }
-
     /** Used to wrap monitored calls in start and stop functions using class timer values */
     template<typename T>
     auto watchThread(T func, uint32_t tid, const char* functionName) {
@@ -154,6 +112,8 @@
     Mutex           mEnabledLock;       // Lock for enabled status
     Condition       mWatchdogCondition; // Condition variable for stop/start
     std::set<pid_t> mProviderPids;      // Process ID set of camera providers
+    pid_t           mClientPid;         // Process ID of the client
+    bool            mIsNativePid;       // Whether the client is a native process
     std::string     mCameraId;          // Camera Id the watchdog belongs to
     bool            mPause;             // True if tid map is empty
     uint32_t        mMaxCycles;         // Max cycles
diff --git a/services/camera/libcameraservice/FwkOnlyMetadataTags.h b/services/camera/libcameraservice/FwkOnlyMetadataTags.h
index 768afeb..bd1371c 100644
--- a/services/camera/libcameraservice/FwkOnlyMetadataTags.h
+++ b/services/camera/libcameraservice/FwkOnlyMetadataTags.h
@@ -36,7 +36,6 @@
         ANDROID_CONTROL_AWB_REGIONS_SET,
         ANDROID_CONTROL_ZOOM_METHOD,
         ANDROID_SCALER_CROP_REGION_SET,
-        ANDROID_EXTENSION_STRENGTH,
 };
 
 } //namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index fc987b2..abbab7a 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -57,7 +57,7 @@
 AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
       mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
-    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersion();
 }
 
 bool AidlCameraDeviceUser::initDevice() {
@@ -202,6 +202,20 @@
     return fromUStatus(ret);
 }
 
+ndk::ScopedAStatus AidlCameraDeviceUser::startStreaming(
+        const std::vector<int32_t>& in_streamIdxArray,
+        const std::vector<int32_t>& in_surfaceIdxArray, SSubmitInfo* _aidl_return){
+    USubmitInfo submitInfo;
+    UStatus ret = mDeviceRemote->startStreaming(in_streamIdxArray, in_surfaceIdxArray, &submitInfo);
+    if (!ret.isOk()) {
+        ALOGE("%s: Failed to start streaming: %s", __FUNCTION__, ret.toString8().c_str());
+        return fromUStatus(ret);
+    }
+    mRequestId = submitInfo.mRequestId;
+    convertToAidl(submitInfo, _aidl_return);
+    return ScopedAStatus::ok();
+}
+
 ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
     UStatus ret = mDeviceRemote->flush(_aidl_return);
     return fromUStatus(ret);
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
index 8fa33f7..25464a5 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -99,6 +99,9 @@
 
     ndk::ScopedAStatus isPrimaryClient(bool* _aidl_return) override;
 
+    ndk::ScopedAStatus startStreaming(const std::vector<int32_t>& in_streamIdxArray,
+            const std::vector<int32_t>& in_surfaceIdxArray, SSubmitInfo* _aidl_return) override;
+
   private:
     bool initDevice();
 
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 46e2280..e916b9c 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -86,7 +86,7 @@
 
 AidlCameraService::AidlCameraService(::android::CameraService* cameraService):
       mCameraService(cameraService) {
-    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersion();
 }
 ScopedAStatus AidlCameraService::getCameraCharacteristics(const std::string& in_cameraId,
                                                           SCameraMetadata* _aidl_return) {
@@ -283,16 +283,6 @@
         return convertToAidl(serviceRet);
     }
 
-    cameraStatusAndIds->erase(std::remove_if(cameraStatusAndIds->begin(),
-                                             cameraStatusAndIds->end(),
-            [this](const hardware::CameraStatus& s) {
-                bool supportsHAL3 = false;
-                binder::Status sRet =
-                            mCameraService->supportsCameraApi(s.cameraId,
-                                    UICameraService::API_VERSION_2, &supportsHAL3);
-                return !sRet.isOk() || !supportsHAL3;
-            }), cameraStatusAndIds->end());
-
     return SStatus::NO_ERROR;
 }
 ndk::ScopedAStatus AidlCameraService::removeListener(
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 9b916bf..836692c 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -71,7 +71,7 @@
                         false /*systemNativeClient - since no ndk for api1*/, cameraDeviceId,
                         api1CameraId, cameraFacing, sensorOrientation, servicePid,
                         overrideForPerfClass, rotationOverride, sharedMode,
-                        /*legacyClient*/ true),
+                        /*isVendorClient*/ false, /*legacyClient*/ true),
       mParameters(api1CameraId, cameraFacing),
       mInitialized(false),
       mLatestRequestIds(kMaxRequestIds),
@@ -452,7 +452,7 @@
     int callingPid = getCallingPid();
     if (callingPid != mCallingPid && callingPid != mServicePid) return res;
 
-    if (mDevice == 0) return res;
+    if (mDevice == nullptr) return res;
 
     ALOGV("Camera %d: Shutting down", mCameraId);
 
@@ -470,11 +470,11 @@
         l.mParameters.state = Parameters::DISCONNECTED;
     }
 
-    mFrameProcessor->requestExit();
-    mCaptureSequencer->requestExit();
-    mJpegProcessor->requestExit();
-    mZslProcessor->requestExit();
-    mCallbackProcessor->requestExit();
+    if (mFrameProcessor != nullptr) mFrameProcessor->requestExit();
+    if (mCaptureSequencer != nullptr) mCaptureSequencer->requestExit();
+    if (mJpegProcessor != nullptr) mJpegProcessor->requestExit();
+    if (mZslProcessor != nullptr) mZslProcessor->requestExit();
+    if (mCallbackProcessor != nullptr) mCallbackProcessor->requestExit();
 
     ALOGV("Camera %d: Waiting for threads", mCameraId);
 
@@ -483,22 +483,24 @@
         // complete callbacks that re-enter Camera2Client
         mBinderSerializationLock.unlock();
 
-        mFrameProcessor->join();
-        mCaptureSequencer->join();
-        mJpegProcessor->join();
-        mZslProcessor->join();
-        mCallbackProcessor->join();
+        if (mFrameProcessor != nullptr) mFrameProcessor->join();
+        if (mCaptureSequencer != nullptr) mCaptureSequencer->join();
+        if (mJpegProcessor != nullptr) mJpegProcessor->join();
+        if (mZslProcessor != nullptr) mZslProcessor->join();
+        if (mCallbackProcessor != nullptr) mCallbackProcessor->join();
 
         mBinderSerializationLock.lock();
     }
 
     ALOGV("Camera %d: Deleting streams", mCameraId);
 
-    mStreamingProcessor->deletePreviewStream();
-    mStreamingProcessor->deleteRecordingStream();
-    mJpegProcessor->deleteStream();
-    mCallbackProcessor->deleteStream();
-    mZslProcessor->deleteStream();
+    if (mStreamingProcessor != nullptr) {
+        mStreamingProcessor->deletePreviewStream();
+        mStreamingProcessor->deleteRecordingStream();
+    }
+    if (mJpegProcessor != nullptr) mJpegProcessor->deleteStream();
+    if (mCallbackProcessor != nullptr) mCallbackProcessor->deleteStream();
+    if (mZslProcessor != nullptr) mZslProcessor->deleteStream();
 
     ALOGV("Camera %d: Disconnecting device", mCameraId);
 
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 2fbf49e..6439101 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -19,6 +19,7 @@
 //#define LOG_NDEBUG 0
 
 #include <com_android_graphics_libgui_flags.h>
+#include <gui/CpuConsumer.h>
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
@@ -114,20 +115,9 @@
     if (!mCallbackToApp && mCallbackConsumer == 0) {
         // Create CPU buffer queue endpoint, since app hasn't given us one
         // Make it async to avoid disconnect deadlocks
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
+        std::tie(mCallbackConsumer, mCallbackWindow) = CpuConsumer::create(kCallbackHeapCount);
         mCallbackConsumer->setFrameAvailableListener(this);
         mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
-        mCallbackWindow = mCallbackConsumer->getSurface();
-#else
-        sp<IGraphicBufferProducer> producer;
-        sp<IGraphicBufferConsumer> consumer;
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mCallbackConsumer = new CpuConsumer(consumer, kCallbackHeapCount);
-        mCallbackConsumer->setFrameAvailableListener(this);
-        mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
-        mCallbackWindow = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 3a0489c..a9e045a 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -94,20 +94,9 @@
 
     if (mCaptureConsumer == 0) {
         // Create CPU buffer queue endpoint
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        mCaptureConsumer = new CpuConsumer(1);
+        std::tie(mCaptureConsumer, mCaptureWindow) = CpuConsumer::create(1);
         mCaptureConsumer->setFrameAvailableListener(this);
         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
-        mCaptureWindow = mCaptureConsumer->getSurface();
-#else
-        sp<IGraphicBufferProducer> producer;
-        sp<IGraphicBufferConsumer> consumer;
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mCaptureConsumer = new CpuConsumer(consumer, 1);
-        mCaptureConsumer->setFrameAvailableListener(this);
-        mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
-        mCaptureWindow = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     // Since ashmem heaps are rounded up to page size, don't reallocate if
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 5bcb8e8..97ec11a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -60,6 +60,7 @@
 using namespace camera2;
 using namespace camera3;
 using camera3::camera_stream_rotation_t::CAMERA_STREAM_ROTATION_0;
+using hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
 
 namespace flags = com::android::internal::camera::flags;
 
@@ -91,9 +92,10 @@
                         attributionAndPermissionUtils, clientAttribution, callingPid,
                         systemNativeClient, cameraId, /*API1 camera ID*/ -1, cameraFacing,
                         sensorOrientation, servicePid, overrideForPerfClass, rotationOverride,
-                        sharedMode),
+                        sharedMode, isVendorClient),
       mInputStream(),
       mStreamingRequestId(REQUEST_ID_NONE),
+      mStreamingRequestLastFrameNumber(NO_IN_FLIGHT_REPEATING_FRAMES),
       mRequestIdCounter(0),
       mOverrideForPerfClass(overrideForPerfClass),
       mOriginalCameraId(originalCameraId),
@@ -118,13 +120,23 @@
         return res;
     }
 
-    mFrameProcessor = new FrameProcessorBase(mDevice);
-    std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
-    res = mFrameProcessor->run(threadName.c_str());
-    if (res != OK) {
-        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return res;
+    if (flags::camera_multi_client() && mSharedMode) {
+        // In shared camera device mode, there can be more than one clients and
+        // frame processor thread is started by shared camera device.
+        mFrameProcessor = mDevice->getSharedFrameProcessor();
+        if (mFrameProcessor == nullptr) {
+            ALOGE("%s: Unable to start frame processor thread", __FUNCTION__);
+            return UNKNOWN_ERROR;
+        }
+    } else {
+        mFrameProcessor = new FrameProcessorBase(mDevice);
+        std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
+        res = mFrameProcessor->run(threadName.c_str());
+        if (res != OK) {
+            ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
     }
 
     mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
@@ -187,8 +199,10 @@
             mHighResolutionSensors.insert(physicalId);
         }
     }
-    int32_t resultMQSize =
-            property_get_int32("ro.vendor.camera.res.fmq.size", /*default*/METADATA_QUEUE_SIZE);
+    size_t fmqHalSize = mDevice->getCaptureResultFMQSize();
+    size_t resultMQSize =
+            property_get_int32("ro.camera.resultFmqSize", /*default*/0);
+    resultMQSize = resultMQSize > 0 ? resultMQSize : fmqHalSize;
     res = CreateMetadataQueue(&mResultMetadataQueue, resultMQSize);
     if (res != OK) {
         ALOGE("%s: Creating result metadata queue failed: %s(%d)", __FUNCTION__,
@@ -292,6 +306,98 @@
     return intersection;
 }
 
+binder::Status CameraDeviceClient::startStreaming(const std::vector<int>& streamIds,
+            const std::vector<int>& surfaceIds,
+            /*out*/
+            hardware::camera2::utils::SubmitInfo *submitInfo) {
+    ATRACE_CALL();
+    ALOGV("%s-start of function. Stream list size %zu. Surface list size %zu", __FUNCTION__,
+            streamIds.size(), surfaceIds.size());
+
+    binder::Status res = binder::Status::ok();
+    status_t err;
+    if ( !(res = checkPidStatus(__FUNCTION__) ).isOk()) {
+        return res;
+    }
+
+    Mutex::Autolock icl(mBinderSerializationLock);
+
+    if (!mDevice.get()) {
+        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+    }
+
+    if (!flags::camera_multi_client() || !mSharedMode) {
+        ALOGE("%s: Camera %s: Invalid operation.", __FUNCTION__, mCameraIdStr.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, "Invalid operation");
+    }
+
+    if (streamIds.empty() || surfaceIds.empty()) {
+        ALOGE("%s: Camera %s: Sent empty streamIds or surface Ids. Rejecting request.",
+              __FUNCTION__, mCameraIdStr.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Empty Stream or surface Ids");
+    }
+
+    if (streamIds.size() != surfaceIds.size()) {
+        ALOGE("%s: Camera %s: Sent different size array for stream and surface Ids.",
+              __FUNCTION__, mCameraIdStr.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Stream and surface Ids are not of same size");
+    }
+
+    submitInfo->mRequestId = mRequestIdCounter;
+    SurfaceMap surfaceMap;
+    Vector<int32_t> outputStreamIds;
+    for (size_t i = 0; i < streamIds.size(); i++) {
+        int streamId = streamIds[i];
+        int surfaceIdx = surfaceIds[i];
+
+        ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
+        if (index < 0) {
+            ALOGE("%s: Camera %s: Tried to start streaming with a surface that"
+                    " we have not called createStream on: stream %d",
+                    __FUNCTION__, mCameraIdStr.c_str(), streamId);
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                    "Start streaming targets Surface that is not part of current capture session");
+        }
+
+        const auto& surfaces = mConfiguredOutputs.valueAt(index).getSurfaces();
+        if ((size_t)surfaceIdx >= surfaces.size()) {
+            ALOGE("%s: Camera %s: Tried to start streaming with a surface that"
+                    " we have not called createStream on: stream %d, surfaceIdx %d",
+                     __FUNCTION__, mCameraIdStr.c_str(), streamId, surfaceIdx);
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                    "Start streaming targets Surface has invalid surface index");
+        }
+
+        res = insertSurfaceLocked(surfaces[surfaceIdx], &surfaceMap, &outputStreamIds, nullptr);
+
+        if (!res.isOk()) {
+            return res;
+        }
+    }
+
+    mRequestIdCounter++;
+    int sharedReqID;
+
+    err = mDevice->startStreaming(submitInfo->mRequestId, surfaceMap, &sharedReqID,
+            &(submitInfo->mLastFrameNumber));
+    if (err != OK) {
+        std::string msg = fmt::sprintf(
+            "Camera %s:  Got error %s (%d) after trying to start streaming request",
+            mCameraIdStr.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+    } else {
+        Mutex::Autolock idLock(mStreamingRequestIdLock);
+        mStreamingRequestId = submitInfo->mRequestId;
+        mSharedStreamingRequest = {sharedReqID, submitInfo->mRequestId};
+    }
+
+    markClientActive();
+    ALOGV("%s: Camera %s: End of function", __FUNCTION__, mCameraIdStr.c_str());
+    return binder::Status::ok();
+}
+
 binder::Status CameraDeviceClient::submitRequestList(
         const std::vector<hardware::camera2::CaptureRequest>& requests,
         bool streaming,
@@ -318,6 +424,12 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Empty request list");
     }
 
+    if (flags::camera_multi_client() && mSharedMode && !mIsPrimaryClient) {
+        ALOGE("%s: Camera %s: This client is not a primary client of the shared camera device.",
+              __FUNCTION__, mCameraIdStr.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, "Invalid Operation.");
+    }
+
     List<const CameraDeviceBase::PhysicalCameraSettingsList> metadataRequestList;
     std::list<SurfaceMap> surfaceMapList;
     submitInfo->mRequestId = mRequestIdCounter;
@@ -594,9 +706,16 @@
     }
     mRequestIdCounter++;
 
+    int32_t sharedReqID;
     if (streaming) {
-        err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
-                &(submitInfo->mLastFrameNumber));
+        if (flags::camera_multi_client() && mSharedMode) {
+            err = mDevice->setSharedStreamingRequest(*metadataRequestList.begin(),
+                    *surfaceMapList.begin(), &sharedReqID, &(submitInfo->mLastFrameNumber));
+        } else {
+            err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
+                    &(submitInfo->mLastFrameNumber));
+        }
+
         if (err != OK) {
             std::string msg = fmt::sprintf(
                 "Camera %s:  Got error %s (%d) after trying to set streaming request",
@@ -607,10 +726,19 @@
         } else {
             Mutex::Autolock idLock(mStreamingRequestIdLock);
             mStreamingRequestId = submitInfo->mRequestId;
+            if (flags::camera_multi_client() && mSharedMode) {
+                mSharedStreamingRequest = {sharedReqID, submitInfo->mRequestId};
+                markClientActive();
+            }
         }
     } else {
-        err = mDevice->captureList(metadataRequestList, surfaceMapList,
-                &(submitInfo->mLastFrameNumber));
+        if (flags::camera_multi_client() && mSharedMode) {
+            err = mDevice->setSharedCaptureRequest(*metadataRequestList.begin(),
+                    *surfaceMapList.begin(), &sharedReqID, &(submitInfo->mLastFrameNumber));
+         } else {
+            err = mDevice->captureList(metadataRequestList, surfaceMapList,
+                    &(submitInfo->mLastFrameNumber));
+        }
         if (err != OK) {
             std::string msg = fmt::sprintf(
                 "Camera %s: Got error %s (%d) after trying to submit capture request",
@@ -619,6 +747,10 @@
             res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
                     msg.c_str());
         }
+        if (flags::camera_multi_client() && mSharedMode) {
+            mSharedRequestMap[sharedReqID] = submitInfo->mRequestId;
+            markClientActive();
+        }
         ALOGV("%s: requestId = %d ", __FUNCTION__, submitInfo->mRequestId);
     }
 
@@ -652,12 +784,19 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
-    err = mDevice->clearStreamingRequest(lastFrameNumber);
+    if (flags::camera_multi_client() && mSharedMode) {
+        err = mDevice->clearSharedStreamingRequest(lastFrameNumber);
+    } else {
+        err = mDevice->clearStreamingRequest(lastFrameNumber);
+    }
 
     if (err == OK) {
         ALOGV("%s: Camera %s: Successfully cleared streaming request",
                 __FUNCTION__, mCameraIdStr.c_str());
         mStreamingRequestId = REQUEST_ID_NONE;
+        if (flags::camera_multi_client() && mSharedMode) {
+            mStreamingRequestLastFrameNumber = *lastFrameNumber;
+        }
     } else {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error clearing streaming request: %s (%d)",
@@ -1019,12 +1158,9 @@
 
         int mirrorMode = outputConfiguration.getMirrorMode(surface);
         sp<Surface> outSurface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
-                isStreamInfoValid, outSurface, surface
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                .graphicBufferProducer
-#endif
-                , mCameraIdStr,
+        res = SessionConfigurationUtils::createConfiguredSurface(streamInfo,
+                isStreamInfoValid, outSurface,
+                flagtools::convertParcelableSurfaceTypeToSurface(surface), mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                 streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
@@ -1045,7 +1181,7 @@
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<int> surfaceIds;
     if (flags::camera_multi_client() && mSharedMode) {
-        err = mDevice->getSharedStreamId(outputConfiguration, &streamId);
+        err = mDevice->getSharedStreamId(streamInfo, &streamId);
         if (err == OK) {
             err = mDevice->addSharedSurfaces(streamId, streamInfos, surfaceHolders, &surfaceIds);
         }
@@ -1053,7 +1189,8 @@
         bool isDepthCompositeStream =
                 camera3::DepthCompositeStream::isDepthCompositeStream(surfaceHolders[0].mSurface);
         bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
-                surfaceHolders[0].mSurface);
+                surfaceHolders[0].mSurface, mDevice->isCompositeHeicDisabled(),
+                mDevice->isCompositeHeicUltraHDRDisabled());
         bool isJpegRCompositeStream =
             camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaceHolders[0].mSurface) &&
             !mDevice->isCompositeJpegRDisabled();
@@ -1105,8 +1242,8 @@
         int i = 0;
         for (auto& surfaceKey : surfaceKeys) {
 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-            ALOGV("%s: mStreamMap add surfaceKey %lu streamId %d, surfaceId %d",
-                    __FUNCTION__, surfaceKey, streamId, i);
+            ALOGV("%s: mStreamMap add surfaceKey %" PRIu64 " streamId %d, surfaceId %d",
+                  __FUNCTION__, surfaceKey, streamId, i);
 #else
             ALOGV("%s: mStreamMap add surfaceKey %p streamId %d, surfaceId %d",
                     __FUNCTION__, surfaceKey.get(), streamId, i);
@@ -1410,15 +1547,10 @@
         OutputStreamInfo outInfo;
         sp<Surface> outSurface;
         int mirrorMode = outputConfiguration.getMirrorMode(newOutputsMap.valueAt(i));
-        res = SessionConfigurationUtils::createSurfaceFromGbp(
+        res = SessionConfigurationUtils::createConfiguredSurface(
                 outInfo,
                 /*isStreamInfoValid*/ false, outSurface,
-                newOutputsMap
-                        .valueAt(i)
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                        .graphicBufferProducer
-#endif
-                ,
+                flagtools::convertParcelableSurfaceTypeToSurface(newOutputsMap.valueAt(i)),
                 mCameraIdStr, mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed,
                 dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
                 /*respectSurfaceSize*/ false);
@@ -1598,6 +1730,10 @@
                 "Camera %s: Error flushing device: %s (%d)", mCameraIdStr.c_str(), strerror(-err),
                 err);
     }
+    if (flags::camera_multi_client() && mSharedMode) {
+        mSharedRequestMap.clear();
+        mStreamingRequestLastFrameNumber = *lastFrameNumber;
+    }
     return res;
 }
 
@@ -1815,15 +1951,11 @@
 
         sp<Surface> outSurface;
         int mirrorMode = outputConfiguration.getMirrorMode(surface);
-        res = SessionConfigurationUtils::createSurfaceFromGbp(
+        res = SessionConfigurationUtils::createConfiguredSurface(
                 mStreamInfoMap[streamId], true /*isStreamInfoValid*/, outSurface,
-                surface
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                .graphicBufferProducer
-#endif
-                , mCameraIdStr, mDevice->infoPhysical(physicalId),
-                sensorPixelModesUsed, dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode,
-                colorSpace, /*respectSurfaceSize*/ false);
+                flagtools::convertParcelableSurfaceTypeToSurface(surface), mCameraIdStr,
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/ false);
 
         if (!res.isOk()) return res;
 
@@ -1851,8 +1983,8 @@
                      "Could not get the SurfaceKey");
             }
 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-            ALOGV("%s: mStreamMap add surface_key %lu streamId %d, surfaceId %d", __FUNCTION__,
-                    surfaceKey, streamId, consumerSurfaceIds[i]);
+            ALOGV("%s: mStreamMap add surface_key %" PRIu64 " streamId %d, surfaceId %d",
+                  __FUNCTION__, surfaceKey, streamId, consumerSurfaceIds[i]);
 #else
             ALOGV("%s: mStreamMap add surface_key %p streamId %d, surfaceId %d", __FUNCTION__,
                     surfaceKey.get(), streamId, consumerSurfaceIds[i]);
@@ -1895,19 +2027,17 @@
 }
 
 status_t CameraDeviceClient::CreateMetadataQueue(
-        std::unique_ptr<MetadataQueue>* metadata_queue, uint32_t default_size_bytes) {
+        std::unique_ptr<MetadataQueue>* metadata_queue, size_t size_bytes) {
         if (metadata_queue == nullptr) {
             ALOGE("%s: metadata_queue is nullptr", __FUNCTION__);
             return BAD_VALUE;
         }
 
-        int32_t size = default_size_bytes;
-
         *metadata_queue =
-                std::make_unique<MetadataQueue>(static_cast<size_t>(size),
+                std::make_unique<MetadataQueue>(size_bytes,
                         /*configureEventFlagWord*/ false);
         if (!(*metadata_queue)->isValid()) {
-            ALOGE("%s: Creating metadata queue (size %d) failed.", __FUNCTION__, size);
+            ALOGE("%s: Creating metadata queue (size %zu) failed.", __FUNCTION__, size_bytes);
             return NO_INIT;
         }
 
@@ -2047,7 +2177,9 @@
             sp<Surface> s = new Surface(surface, false /*controlledByApp*/);
 #endif
             isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
-                                camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+                                camera3::HeicCompositeStream::isHeicCompositeStream(
+                                        s, mDevice->isCompositeHeicDisabled(),
+                                        mDevice->isCompositeHeicUltraHDRDisabled()) ||
                                 (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
                                  !mDevice->isCompositeJpegRDisabled());
             if (isCompositeStream) {
@@ -2202,8 +2334,26 @@
                                      const CaptureResultExtras& resultExtras) {
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
-
     bool skipClientNotification = false;
+    if (flags::camera_multi_client() && mSharedMode && (resultExtras.requestId != -1)) {
+        int clientReqId;
+        bool matchStreamingRequest = matchSharedStreamingRequest(resultExtras.requestId);
+        bool matchCaptureRequest = matchSharedCaptureRequest(resultExtras.requestId);
+        if (matchStreamingRequest) {
+            clientReqId = mSharedStreamingRequest.second;
+        } else if (matchCaptureRequest) {
+            clientReqId = mSharedRequestMap[resultExtras.requestId];
+            mSharedRequestMap.erase(resultExtras.requestId);
+        } else {
+            return;
+        }
+        CaptureResultExtras mutableResultExtras = resultExtras;
+        mutableResultExtras.requestId = clientReqId;
+        if (remoteCb != 0) {
+            remoteCb->onDeviceError(errorCode, mutableResultExtras);
+        }
+        return;
+    }
     {
         // Access to the composite stream map must be synchronized
         Mutex::Autolock l(mCompositeLock);
@@ -2266,10 +2416,30 @@
         nsecs_t timestamp) {
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
-    if (remoteCb != 0) {
-        remoteCb->onCaptureStarted(resultExtras, timestamp);
+    CaptureResultExtras mutableResultExtras = resultExtras;
+    if (flags::camera_multi_client() && mSharedMode) {
+        int clientReqId;
+        bool matchStreamingRequest = matchSharedStreamingRequest(resultExtras.requestId);
+        bool matchCaptureRequest = matchSharedCaptureRequest(resultExtras.requestId);
+        if (matchStreamingRequest) {
+            clientReqId = mSharedStreamingRequest.second;
+        } else if (matchCaptureRequest) {
+            clientReqId = mSharedRequestMap[resultExtras.requestId];
+        } else {
+            return;
+        }
+        mutableResultExtras.requestId = clientReqId;
     }
-    Camera2ClientBase::notifyShutter(resultExtras, timestamp);
+
+    if (remoteCb != 0) {
+        remoteCb->onCaptureStarted(mutableResultExtras, timestamp);
+    }
+    Camera2ClientBase::notifyShutter(mutableResultExtras, timestamp);
+    if (flags::camera_multi_client() && mSharedMode) {
+        // When camera is opened in shared mode, composite streams are not
+        // supported.
+        return;
+    }
 
     // Access to the composite stream map must be synchronized
     Mutex::Autolock l(mCompositeLock);
@@ -2310,13 +2480,36 @@
     if (mDevice == 0) return;
 
     nsecs_t startTime = systemTime();
-    if (!flags::camera_multi_client() || sCameraService->isOnlyClient(this)){
-        ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
-
-        if (mFrameProcessor.get() != nullptr) {
+    if (mFrameProcessor.get() != nullptr) {
             mFrameProcessor->removeListener(
                     camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
                     camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
+    }
+
+    if (flags::camera_multi_client() && mSharedMode) {
+        for (auto streamInfo : mStreamInfoMap) {
+            int streamToDelete = streamInfo.first;
+            std::vector<size_t> removedSurfaceIds;
+            for (size_t i = 0; i < mStreamMap.size(); ++i) {
+                if (streamToDelete == mStreamMap.valueAt(i).streamId()) {
+                    removedSurfaceIds.push_back(mStreamMap.valueAt(i).surfaceId());
+                }
+            }
+            status_t err = mDevice->removeSharedSurfaces(streamToDelete, removedSurfaceIds);
+            if (err != OK) {
+                std::string msg = fmt::sprintf("Camera %s: Unexpected error %s (%d) when removing"
+                        "shared surfaces from stream %d", mCameraIdStr.c_str(), strerror(-err),
+                        err, streamToDelete);
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            }
+        }
+    }
+
+    if (!flags::camera_multi_client() || !mSharedMode ||
+            (mSharedMode && sCameraService->isOnlyClient(this))){
+        ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
+
+        if (mFrameProcessor.get() != nullptr) {
             mFrameProcessor->requestExit();
             ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
             mFrameProcessor->join();
@@ -2398,9 +2591,67 @@
     return retVal;
 }
 
+bool CameraDeviceClient::matchSharedStreamingRequest(int reqId) {
+    if (!flags::camera_multi_client() || !mSharedMode) {
+        return false;
+    }
+    // In shared mode, check if the result req id matches the streaming request
+    // sent by client.
+    if (reqId == mSharedStreamingRequest.first) {
+        return true;
+    }
+    return false;
+}
+
+bool CameraDeviceClient::matchSharedCaptureRequest(int reqId) {
+    if (!flags::camera_multi_client() || !mSharedMode) {
+        return false;
+    }
+    // In shared mode, only primary clients can send the capture request. If the
+    // result req id does not match the streaming request id, check against the
+    // capture request ids sent by the primary client.
+    if (mIsPrimaryClient) {
+        auto iter = mSharedRequestMap.find(reqId);
+        if (iter != mSharedRequestMap.end()) {
+            return true;
+        }
+    }
+    return false;
+}
+
 void CameraDeviceClient::onResultAvailable(const CaptureResult& result) {
     ATRACE_CALL();
     ALOGVV("%s E", __FUNCTION__);
+    CaptureResult mutableResult = result;
+    bool matchStreamingRequest, matchCaptureRequest, sharedStreamingLastFrame;
+    if (flags::camera_multi_client() && mSharedMode) {
+        int clientReqId;
+        matchStreamingRequest = matchSharedStreamingRequest(result.mResultExtras.requestId);
+        matchCaptureRequest = matchSharedCaptureRequest(result.mResultExtras.requestId);
+        if (matchStreamingRequest) {
+            clientReqId = mSharedStreamingRequest.second;
+            // When a client stops streaming using cancelRequest, we still need to deliver couple
+            // more capture results to the client, till the lastframe number returned by the
+            // cancelRequest. Therefore, only clean the shared streaming request once all the frames for
+            // the repeating request have been delivered to the client.
+            sharedStreamingLastFrame = (mStreamingRequestId == REQUEST_ID_NONE)
+                    && (result.mResultExtras.frameNumber >= mStreamingRequestLastFrameNumber);
+            if (sharedStreamingLastFrame) {
+                mSharedStreamingRequest.first = REQUEST_ID_NONE;
+                mSharedStreamingRequest.second = REQUEST_ID_NONE;
+            }
+        } else if (matchCaptureRequest) {
+            clientReqId = mSharedRequestMap[result.mResultExtras.requestId];
+            mSharedRequestMap.erase(result.mResultExtras.requestId);
+        } else {
+            return;
+        }
+        mutableResult.mResultExtras.requestId = clientReqId;
+        if (mutableResult.mMetadata.update(ANDROID_REQUEST_ID, &clientReqId, 1) != OK) {
+            ALOGE("%s Failed to set request ID in metadata.", __FUNCTION__);
+            return;
+        }
+    }
 
     // Thread-safe. No lock necessary.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
@@ -2410,33 +2661,72 @@
         // Vendor clients need to modify metadata and also this call is in process
         // before going through FMQ to vendor clients. So don't use FMQ here.
         if (!mIsVendorClient && flags::fmq_metadata()) {
-            fmqMetadataSize = writeResultMetadataIntoResultQueue(result.mMetadata);
+            fmqMetadataSize = writeResultMetadataIntoResultQueue(mutableResult.mMetadata);
         }
         hardware::camera2::impl::CameraMetadataNative resultMetadata;
         CameraMetadataInfo resultInfo;
         if (fmqMetadataSize == 0) {
             // The flag was off / we're serving VNDK shim call or FMQ write failed.
-            resultMetadata = result.mMetadata;
+            resultMetadata = mutableResult.mMetadata;
             resultInfo.set<CameraMetadataInfo::metadata>(resultMetadata);
         } else {
             resultInfo.set<CameraMetadataInfo::fmqSize>(fmqMetadataSize);
         }
 
         std::vector<PhysicalCaptureResultInfo> physicalMetadatas =
-                convertToFMQ(result.mPhysicalMetadatas);
+                convertToFMQ(mutableResult.mPhysicalMetadatas);
 
-        remoteCb->onResultReceived(resultInfo, result.mResultExtras,
+        remoteCb->onResultReceived(resultInfo, mutableResult.mResultExtras,
                 physicalMetadatas);
+        if (flags::camera_multi_client() && mSharedMode) {
+            // If all the capture requests for this client has been processed,
+            // send onDeviceidle callback.
+            if ((mSharedStreamingRequest.first == REQUEST_ID_NONE) && mSharedRequestMap.empty() ) {
+                markClientIdle();
+            }
+        }
     }
 
     // Access to the composite stream map must be synchronized
     Mutex::Autolock l(mCompositeLock);
     for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
-        mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
+        mCompositeStreamMap.valueAt(i)->onResultAvailable(mutableResult);
     }
     ALOGVV("%s X", __FUNCTION__);
 }
 
+void CameraDeviceClient::markClientActive() {
+    Mutex::Autolock l(mDevice->mSharedDeviceActiveLock);
+    if (mDeviceActive) {
+        // Already in active state.
+        return;
+    }
+    status_t res = startCameraStreamingOps();
+    if (res != OK) {
+        ALOGE("%s: Camera %s: Error starting camera streaming ops: %d", __FUNCTION__,
+                mCameraIdStr.c_str(), res);
+    }
+    mDeviceActive = true;
+}
+
+void CameraDeviceClient::markClientIdle() {
+    Mutex::Autolock l(mDevice->mSharedDeviceActiveLock);
+    if (!mDeviceActive) {
+        // Already in idle state.
+        return;
+    }
+    sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
+    if (remoteCb != NULL) {
+        remoteCb->onDeviceIdle();
+    }
+    status_t res = finishCameraStreamingOps();
+    if (res != OK) {
+        ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
+                mCameraIdStr.c_str(), res);
+    }
+    mDeviceActive = false;
+}
+
 binder::Status CameraDeviceClient::checkPidStatus(const char* checkLocation) {
     if (mDisconnected) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 4ad3c49..f2ae3ab 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -98,6 +98,11 @@
     virtual binder::Status cancelRequest(int requestId,
             /*out*/
             int64_t* lastFrameNumber = NULL) override;
+    virtual binder::Status startStreaming(
+            const std::vector<int>& streamIds,
+            const std::vector<int>& surfaceIds,
+            /*out*/
+            hardware::camera2::utils::SubmitInfo *submitInfo = nullptr) override;
 
     virtual binder::Status beginConfigure() override;
 
@@ -267,7 +272,7 @@
             int8_t, android::hardware::common::fmq::SynchronizedReadWrite>;
     using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
     status_t CreateMetadataQueue(
-            std::unique_ptr<MetadataQueue>* metadata_queue, uint32_t default_size);
+            std::unique_ptr<MetadataQueue>* metadata_queue, size_t size_bytes);
     // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
     // streamId specifies the index of the stream the surface belongs to, and the
     // surfaceId specifies the index of the surface within the stream. (one stream
@@ -329,6 +334,11 @@
     // Surface only
     status_t getSurfaceKey(sp<Surface> surface, SurfaceKey* out) const;
 
+    bool matchSharedStreamingRequest(int reqId);
+    bool matchSharedCaptureRequest(int reqId);
+    void markClientActive();
+    void markClientIdle();
+
     // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
     KeyedVector<SurfaceKey, StreamSurfaceId> mStreamMap;
 
@@ -350,6 +360,9 @@
     // Streaming request ID
     int32_t mStreamingRequestId;
     Mutex mStreamingRequestIdLock;
+    std::pair<int32_t, int32_t> mSharedStreamingRequest;
+    std::map<int32_t, int32_t> mSharedRequestMap;
+    int64_t mStreamingRequestLastFrameNumber;
     static const int32_t REQUEST_ID_NONE = -1;
 
     int32_t mRequestIdCounter;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 14618c4..4b732bc 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -616,20 +616,10 @@
         return NO_INIT;
     }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    std::tie(mBlobConsumer, mBlobSurface) =
+            CpuConsumer::create(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
     mBlobConsumer->setFrameAvailableListener(this);
     mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
-    mBlobSurface = mBlobConsumer->getSurface();
-#else
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-    mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
-    mBlobConsumer->setFrameAvailableListener(this);
-    mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
-    mBlobSurface = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
             id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
@@ -648,18 +638,11 @@
         return ret;
     }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mDepthConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    std::tie(mDepthConsumer, mDepthSurface) =
+            CpuConsumer::create(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
     mDepthConsumer->setFrameAvailableListener(this);
     mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
-    mDepthSurface = mDepthConsumer->getSurface();
-#else
-    BufferQueue::createBufferQueue(&producer, &consumer);
-    mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
-    mDepthConsumer->setFrameAvailableListener(this);
-    mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
-    mDepthSurface = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
             kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index e1de010..f9b6d5b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -122,14 +122,20 @@
     mMainImageSurface.clear();
 }
 
-bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
-    return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) ||
-                (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace))) &&
+bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo,
+                                                    bool isCompositeHeicDisabled,
+                                                    bool isCompositeHeicUltraHDRDisabled) {
+    return (((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) &&
+              !isCompositeHeicDisabled) ||
+             (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace) &&
+              !isCompositeHeicUltraHDRDisabled)) &&
             (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
 }
 
-bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
-    ANativeWindow *anw = surface.get();
+bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface>& surface,
+                                                bool isCompositeHeicDisabled,
+                                                bool isCompositeHeicUltraHDRDisabled) {
+    ANativeWindow* anw = surface.get();
     status_t err;
     int format;
     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
@@ -147,8 +153,10 @@
         return false;
     }
 
-    return ((format == HAL_PIXEL_FORMAT_BLOB) && ((dataspace == HAL_DATASPACE_HEIF) ||
-                (dataspace == static_cast<int>(kUltraHDRDataSpace))));
+    return ((format == HAL_PIXEL_FORMAT_BLOB) &&
+            ((dataspace == HAL_DATASPACE_HEIF && !isCompositeHeicDisabled) ||
+             (dataspace == static_cast<int>(kUltraHDRDataSpace) &&
+              !isCompositeHeicUltraHDRDisabled)));
 }
 
 status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
@@ -185,26 +193,15 @@
         return NO_INIT;
     }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mAppSegmentSupported) {
-        mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+        std::tie(mAppSegmentConsumer, mAppSegmentSurface) =
+                CpuConsumer::create(kMaxAcquiredAppSegment);
         mAppSegmentConsumer->setFrameAvailableListener(this);
         mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
-        mAppSegmentSurface = mAppSegmentConsumer->getSurface();
     }
-    sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr ?
-        mAppSegmentSurface->getIGraphicBufferProducer() : nullptr;
-#else
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    if (mAppSegmentSupported) {
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
-        mAppSegmentConsumer->setFrameAvailableListener(this);
-        mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
-        mAppSegmentSurface = new Surface(producer);
-    }
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr
+                                                  ? mAppSegmentSurface->getIGraphicBufferProducer()
+                                                  : nullptr;
 
     if (mAppSegmentSupported) {
         std::vector<int> sourceSurfaceId;
@@ -235,13 +232,9 @@
             return res;
         }
     } else {
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        mMainImageConsumer = new CpuConsumer(1);
-        producer = mMainImageConsumer->getSurface()->getIGraphicBufferProducer();
-#else
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mMainImageConsumer = new CpuConsumer(consumer, 1);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<Surface> surface;
+        std::tie(mMainImageConsumer, surface) = CpuConsumer::create(1);
+        producer = surface->getIGraphicBufferProducer();
         mMainImageConsumer->setFrameAvailableListener(this);
         mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
     }
@@ -2559,6 +2552,17 @@
                      break;
                  }
 
+                 case MediaCodec::CB_METRICS_FLUSHED:
+                 case MediaCodec::CB_REQUIRED_RESOURCES_CHANGED:
+                 {
+                    // Nothing to do. Informational. Safe to ignore.
+                    break;
+                 }
+
+                 case MediaCodec::CB_CRYPTO_ERROR:
+                 // unexpected as we are not using crypto
+                 case MediaCodec::CB_LARGE_FRAME_OUTPUT_AVAILABLE:
+                 // unexpected as we are not using large frames
                  default: {
                      ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
                      break;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index ed70d56..f29a93b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -45,8 +45,11 @@
             wp<hardware::camera2::ICameraDeviceCallbacks> cb);
     ~HeicCompositeStream() override;
 
-    static bool isHeicCompositeStream(const sp<Surface> &surface);
-    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
+    static bool isHeicCompositeStream(const sp<Surface>& surface, bool isCompositeHeicDisabled,
+                                      bool isCompositeHeicUltraHDRDisabled);
+    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo,
+                                          bool isCompositeHeicDisabled,
+                                          bool isCompositeHeicUltraHDRDisabled);
 
     status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -89,7 +92,7 @@
     static bool isInMemoryTempFileSupported();
 
     // HDR Gainmap subsampling
-    static const size_t kGainmapScale = 4;
+    static constexpr size_t kGainmapScale = 4;
 
 protected:
 
@@ -145,9 +148,9 @@
     size_t            mGridRows, mGridCols, mGainmapGridRows, mGainmapGridCols;
     bool              mUseGrid, mGainmapUseGrid; // Whether to use framework YUV frame tiling.
 
-    static const int64_t kNoFrameDropMaxPtsGap = -1000000;
-    static const int32_t kNoGridOpRate = 30;
-    static const int32_t kGridOpRate = 120;
+    static constexpr int64_t kNoFrameDropMaxPtsGap = -1000000;
+    static constexpr int32_t kNoGridOpRate = 30;
+    static constexpr int32_t kGridOpRate = 120;
 
     void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo, bool isGainmap);
     void onHeicInputFrameAvailable(int32_t index, bool isGainmap);// Only called for YUV input mode.
@@ -243,17 +246,17 @@
     static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
     void updateCodecQualityLocked(int32_t quality);
 
-    static const nsecs_t kWaitDuration = 10000000; // 10 ms
-    static const int32_t kDefaultJpegQuality = 99;
-    static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
-    static const android_dataspace kAppSegmentDataSpace =
+    static constexpr nsecs_t kWaitDuration = 10000000; // 10 ms
+    static constexpr int32_t kDefaultJpegQuality = 99;
+    static constexpr auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+    static constexpr android_dataspace kAppSegmentDataSpace =
             static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
-    static const android_dataspace kHeifDataSpace =
+    static constexpr android_dataspace kHeifDataSpace =
             static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
     android_dataspace mInternalDataSpace = kHeifDataSpace;
     // Use the limit of pipeline depth in the API sepc as maximum number of acquired
     // app segment buffers.
-    static const uint32_t kMaxAcquiredAppSegment = 8;
+    static constexpr uint32_t kMaxAcquiredAppSegment = 8;
 
     int               mAppSegmentStreamId, mAppSegmentSurfaceId;
     sp<CpuConsumer>   mAppSegmentConsumer;
@@ -268,7 +271,7 @@
     bool              mYuvBufferAcquired; // Only applicable to HEVC codec
     std::queue<int64_t> mMainImageFrameNumbers;
 
-    static const int32_t        kMaxOutputSurfaceProducerCount = 1;
+    static constexpr int32_t    kMaxOutputSurfaceProducerCount = 1;
     sp<Surface>                 mOutputSurface;
     sp<StreamSurfaceListener>   mStreamSurfaceListener;
     int32_t                     mDequeuedOutputBufferCnt;
@@ -328,39 +331,39 @@
     bool mHDRGainmapEnabled = false;
 
     // UltraHDR tonemap color and format aspects
-    static const uhdr_img_fmt_t kUltraHdrInputFmt = UHDR_IMG_FMT_24bppYCbCrP010;
-    static const uhdr_color_gamut kUltraHdrInputGamut = UHDR_CG_BT_2100;
-    static const uhdr_color_transfer kUltraHdrInputTransfer = UHDR_CT_HLG;
-    static const uhdr_color_range kUltraHdrInputRange = UHDR_CR_FULL_RANGE;
+    static constexpr uhdr_img_fmt_t kUltraHdrInputFmt = UHDR_IMG_FMT_24bppYCbCrP010;
+    static constexpr uhdr_color_gamut kUltraHdrInputGamut = UHDR_CG_BT_2100;
+    static constexpr uhdr_color_transfer kUltraHdrInputTransfer = UHDR_CT_HLG;
+    static constexpr uhdr_color_range kUltraHdrInputRange = UHDR_CR_FULL_RANGE;
 
-    static const uhdr_img_fmt_t kUltraHdrOutputFmt = UHDR_IMG_FMT_12bppYCbCr420;
-    static const uhdr_color_gamut kUltraHdrOutputGamut = UHDR_CG_DISPLAY_P3;
-    static const uhdr_color_transfer kUltraHdrOutputTransfer = UHDR_CT_SRGB;
-    static const uhdr_color_range kUltraHdrOutputRange = UHDR_CR_FULL_RANGE;
+    static constexpr uhdr_img_fmt_t kUltraHdrOutputFmt = UHDR_IMG_FMT_12bppYCbCr420;
+    static constexpr uhdr_color_gamut kUltraHdrOutputGamut = UHDR_CG_DISPLAY_P3;
+    static constexpr uhdr_color_transfer kUltraHdrOutputTransfer = UHDR_CT_SRGB;
+    static constexpr uhdr_color_range kUltraHdrOutputRange = UHDR_CR_FULL_RANGE;
 
-    static const auto kUltraHDRDataSpace =
+    static constexpr auto kUltraHDRDataSpace =
         aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR;
 
     // MediaMuxer/Codec color and format aspects for base image and gainmap metadata
-    static const int32_t kCodecColorFormat = COLOR_FormatYUV420Flexible;
-    static const ColorAspects::Primaries kCodecColorPrimaries =
+    static constexpr int32_t kCodecColorFormat = COLOR_FormatYUV420Flexible;
+    static constexpr ColorAspects::Primaries kCodecColorPrimaries =
         ColorAspects::Primaries::PrimariesEG432;
-    static const ColorAspects::MatrixCoeffs kCodecColorMatrix =
+    static constexpr ColorAspects::MatrixCoeffs kCodecColorMatrix =
         ColorAspects::MatrixCoeffs::MatrixUnspecified;
-    static const ColorAspects::Transfer kCodecColorTransfer =
+    static constexpr ColorAspects::Transfer kCodecColorTransfer =
         ColorAspects::Transfer::TransferSRGB;
-    static const ColorAspects::Range kCodecColorRange =
+    static constexpr ColorAspects::Range kCodecColorRange =
         ColorAspects::Range::RangeFull;
 
     // MediaMuxer/Codec color and format aspects for gainmap as per ISO 23008-12:2024
-    static const int32_t kCodecGainmapColorFormat = COLOR_FormatYUV420Flexible;
-    static const ColorAspects::Primaries kCodecGainmapColorPrimaries =
+    static constexpr int32_t kCodecGainmapColorFormat = COLOR_FormatYUV420Flexible;
+    static constexpr ColorAspects::Primaries kCodecGainmapColorPrimaries =
         ColorAspects::Primaries::PrimariesUnspecified;
-    static const ColorAspects::MatrixCoeffs kCodecGainmapColorMatrix =
+    static constexpr ColorAspects::MatrixCoeffs kCodecGainmapColorMatrix =
         ColorAspects::MatrixCoeffs::MatrixUnspecified;
-    static const ColorAspects::Transfer kCodecGainmapColorTransfer =
+    static constexpr ColorAspects::Transfer kCodecGainmapColorTransfer =
         ColorAspects::Transfer::TransferUnspecified;
-    static const ColorAspects::Range kCodecGainmapColorRange =
+    static constexpr ColorAspects::Range kCodecGainmapColorRange =
         ColorAspects::Range::RangeFull;
 
 
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index e0d7604..b8c9244 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -16,7 +16,7 @@
 
 #define LOG_TAG "Camera3-JpegRCompositeStream"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
@@ -25,6 +25,7 @@
 #include "utils/SessionConfigurationUtils.h"
 
 #include <com_android_graphics_libgui_flags.h>
+#include <gui/CpuConsumer.h>
 #include <gui/Surface.h>
 #include <hardware/gralloc.h>
 #include <system/graphics-base-v1.0.h>
@@ -575,20 +576,10 @@
             mStaticInfo, mP010DynamicRange,
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mP010Consumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    std::tie(mP010Consumer, mP010Surface) =
+            CpuConsumer::create(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
     mP010Consumer->setFrameAvailableListener(this);
     mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
-    mP010Surface = mP010Consumer->getSurface();
-#else
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-    mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
-    mP010Consumer->setFrameAvailableListener(this);
-    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
-    mP010Surface = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
             static_cast<android_dataspace>(mP010DataSpace), rotation,
@@ -606,18 +597,11 @@
     }
 
     if (mSupportInternalJpeg) {
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        std::tie(mBlobConsumer, mBlobSurface) =
+                CpuConsumer::create(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
         mBlobConsumer->setFrameAvailableListener(this);
         mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
-        mBlobSurface = mBlobConsumer->getSurface();
-#else
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
-        mBlobConsumer->setFrameAvailableListener(this);
-        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
-        mBlobSurface = new Surface(producer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
         std::vector<int> blobSurfaceId;
         ret = device->createStream(mBlobSurface, width, height, format,
                 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 03abf71..83fa587 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -24,7 +24,8 @@
 #include <utils/Trace.h>
 
 #include <cutils/properties.h>
-#include <gui/Surface.h>
+#include <gui/BufferItem.h>
+#include <gui/BufferItemConsumer.h>
 #include <gui/Surface.h>
 
 #include <android/hardware/ICameraService.h>
@@ -59,7 +60,7 @@
         const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
         const std::string& cameraId, int api1CameraId, int cameraFacing, int sensorOrientation,
         int servicePid, bool overrideForPerfClass, int rotationOverride, bool sharedMode,
-        bool legacyClient)
+        bool isVendorClient, bool legacyClient)
     : TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientAttribution,
                   callingPid, systemNativeClient, cameraId, api1CameraId, cameraFacing,
                   sensorOrientation, servicePid, rotationOverride, sharedMode),
@@ -74,6 +75,7 @@
     mInitialClientPid = TClientBase::mCallingPid;
     mOverrideForPerfClass = overrideForPerfClass;
     mLegacyClient = legacyClient;
+    mIsVendorClient = isVendorClient;
 }
 
 template <typename TClientBase>
@@ -115,20 +117,23 @@
                     new HidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mRotationOverride, mLegacyClient);
+                            TClientBase::mRotationOverride, mIsVendorClient,
+                            mLegacyClient);
             break;
         case IPCTransport::AIDL:
             if (flags::camera_multi_client() && TClientBase::mSharedMode) {
                 mDevice = AidlCamera3SharedDevice::getInstance(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mRotationOverride, mLegacyClient);
+                            TClientBase::mRotationOverride, mIsVendorClient,
+                            mLegacyClient);
             } else {
                 mDevice =
                     new AidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mRotationOverride, mLegacyClient);
+                            TClientBase::mRotationOverride, mIsVendorClient,
+                            mLegacyClient);
             }
             break;
         default:
@@ -304,7 +309,7 @@
 void Camera2ClientBase<TClientBase>::detachDevice() {
     if (mDevice == 0) return;
     if (flags::camera_multi_client() && TClientBase::mSharedMode) {
-        mDevice->disconnectClient(TClientBase::getClientUid());
+        mDevice->disconnectClient(TClientBase::getClientCallingPid());
     } else {
         mDevice->disconnect();
     }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index cb30199..4a70330 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -57,7 +57,7 @@
                       bool systemNativeClient, const std::string& cameraId, int api1CameraId,
                       int cameraFacing, int sensorOrientation, int servicePid,
                       bool overrideForPerfClass, int rotationOverride,  bool sharedMode,
-                      bool legacyClient = false);
+                      bool isVendorClient, bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -142,6 +142,7 @@
     pid_t mInitialClientPid;
     bool mOverrideForPerfClass = false;
     bool mLegacyClient = false;
+    bool mIsVendorClient = false;
     std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
 
     virtual sp<IBinder> asBinderWrapper() {
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 4fe9444..68e783b 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -30,10 +30,13 @@
 #include "hardware/camera2.h"
 #include "camera/CameraMetadata.h"
 #include "camera/CaptureResult.h"
+#if not WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
 #include "gui/IGraphicBufferProducer.h"
+#endif
 #include "device3/Camera3StreamInterface.h"
 #include "device3/StatusTracker.h"
 #include "binder/Status.h"
+#include "FrameProcessorBase.h"
 #include "FrameProducer.h"
 #include "utils/IPCTransport.h"
 #include "utils/SessionConfigurationUtils.h"
@@ -47,7 +50,6 @@
 typedef enum camera_stream_configuration_mode {
     CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
     CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
-    CAMERA_STREAM_CONFIGURATION_SHARED_MODE = 2,
     CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
 } camera_stream_configuration_mode_t;
 
@@ -108,6 +110,8 @@
     virtual const CameraMetadata& infoPhysical(const std::string& physicalId) const = 0;
 
     virtual bool isCompositeJpegRDisabled() const { return false; };
+    virtual bool isCompositeHeicDisabled() const { return false; }
+    virtual bool isCompositeHeicUltraHDRDisabled() const { return false; }
 
     struct PhysicalCameraSettings {
         std::string cameraId;
@@ -301,7 +305,8 @@
      * In shared session mode, this function retrieves the stream ID associated with a specific
      * output configuration.
      */
-    virtual status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) = 0;
+    virtual status_t getSharedStreamId(const android::camera3::OutputStreamInfo &config,
+            int *streamId) = 0;
 
     /**
      * In shared session mode, this function add surfaces to an existing shared stream ID.
@@ -316,6 +321,49 @@
     virtual status_t removeSharedSurfaces(int streamId, const std::vector<size_t> &surfaceIds) = 0;
 
     /**
+     * In shared session mode, this function retrieves the frame processor.
+     */
+    virtual sp<camera2::FrameProcessorBase> getSharedFrameProcessor() = 0;
+
+    /**
+     * Submit a shared streaming request for streaming.
+     * Output lastFrameNumber is the last frame number of the previous streaming request.
+     */
+    virtual status_t setSharedStreamingRequest(
+            const PhysicalCameraSettingsList &request,
+            const SurfaceMap &surfaceMap, int32_t *sharedReqID,
+            int64_t *lastFrameNumber = NULL) = 0;
+
+    /**
+     * Clear the shared streaming request slot.
+     * Output lastFrameNumber is the last frame number of the previous streaming request.
+     */
+    virtual status_t clearSharedStreamingRequest(int64_t *lastFrameNumber = NULL) = 0;
+
+    /**
+     * In shared session mode, only primary clients can change the capture
+     * parameters through capture request or repeating request. When the primary
+     * client sends the capture request to the camera device, the request ID is
+     * overridden by the camera device to maintain unique ID. This API is
+     * similar to captureList API, with only difference that the request ID is
+     * changed by the device before submitting the request to HAL.
+     * Output sharedReqID is the request ID actually used.
+     * Output lastFrameNumber is the expected last frame number of the list of requests.
+     */
+    virtual status_t setSharedCaptureRequest(const PhysicalCameraSettingsList &request,
+                                 const SurfaceMap &surfaceMap, int32_t *sharedReqID,
+                                 int64_t *lastFrameNumber = NULL) = 0;
+
+    /**
+     * Submit a start streaming request.
+     * Output lastFrameNumber is the last frame number of the previous streaming request.
+     */
+    virtual status_t startStreaming(const int32_t reqId, const SurfaceMap &surfaceMap,
+            int32_t *sharedReqID, int64_t *lastFrameNumber = NULL) = 0;
+
+    virtual int32_t getCaptureResultFMQSize() = 0;
+
+    /**
      * Take the currently-defined set of streams and configure the HAL to use
      * them. This is a long-running operation (may be several hundered ms).
      *
@@ -548,6 +596,9 @@
     virtual status_t injectSessionParams(
         const CameraMetadata& sessionParams) = 0;
 
+    // Lock to synchronize onDeviceActive and onDeviceIdle callbacks when camera
+    // has been opened in shared mode.
+    mutable Mutex mSharedDeviceActiveLock;
 protected:
     bool mImageDumpMask = 0;
     std::vector<int64_t> mStreamUseCaseOverrides;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index a8d7480..536f56a 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -23,6 +23,8 @@
 
 #include "CameraProviderManager.h"
 
+#include "config/SharedSessionConfigReader.h"
+
 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
 
 #include <algorithm>
@@ -33,7 +35,6 @@
 #include <dlfcn.h>
 #include <future>
 #include <inttypes.h>
-#include <android_companion_virtualdevice_flags.h>
 #include <android_companion_virtualdevice_build_flags.h>
 #include <android/binder_libbinder.h>
 #include <android/binder_manager.h>
@@ -397,6 +398,30 @@
     return deviceInfo->isCompositeJpegRDisabled();
 }
 
+bool CameraProviderManager::isCompositeHeicDisabled(const std::string &id) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+    return isCompositeHeicDisabledLocked(id);
+}
+
+bool CameraProviderManager::isCompositeHeicDisabledLocked(const std::string &id) const {
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return false;
+
+    return deviceInfo->isCompositeHeicDisabled();
+}
+
+bool CameraProviderManager::isCompositeHeicUltraHDRDisabled(const std::string &id) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+    return isCompositeHeicUltraHDRDisabledLocked(id);
+}
+
+bool CameraProviderManager::isCompositeHeicUltraHDRDisabledLocked(const std::string &id) const {
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return false;
+
+    return deviceInfo->isCompositeHeicUltraHDRDisabled();
+}
+
 status_t CameraProviderManager::getResourceCost(const std::string &id,
         CameraResourceCost* cost) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -2070,11 +2095,8 @@
     int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
     if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_3) {
         versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
-    } else if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_4) {
-        if (flags::feature_combination_baklava()) {
+        if (flags::feature_combination_baklava() && getVNDKVersion() > 35) {
             versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_BAKLAVA;
-        } else {
-            versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
         }
     }
     res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
@@ -2090,63 +2112,72 @@
     return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSharedSessionConfigurationTags() {
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSharedSessionConfigurationTags(
+        const std::string &cameraId) {
     status_t res = OK;
     if (flags::camera_multi_client()) {
+        SharedSessionConfigReader configReader;
+        ErrorCode status =
+                configReader.parseSharedSessionConfig(
+                                    (std::string(SHARED_SESSION_FILE_PATH)
+                                     + std::string(SHARED_SESSION_FILE_NAME)).c_str());
+        if (status != 0) {
+            ALOGE("%s: failed to initialize SharedSessionConfigReader with ErrorCode %s",
+                  __FUNCTION__, SharedSessionConfigUtils::toString(status));
+            return BAD_VALUE;
+        }
         const int32_t sharedColorSpaceTag = ANDROID_SHARED_SESSION_COLOR_SPACE;
         const int32_t sharedOutputConfigurationsTag = ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS;
         auto& c = mCameraCharacteristics;
-        uint8_t colorSpace = 0;
+        int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+        status = configReader.getColorSpace(&colorSpace);
+        if (status != 0) {
+            ALOGE("%s: failed to get color space from config reader with ErrorCode %s",
+                  __FUNCTION__, SharedSessionConfigUtils::toString(status));
+            return BAD_VALUE;
+        }
 
         res = c.update(sharedColorSpaceTag, &colorSpace, 1);
+        if (res != OK) {
+            ALOGE("%s: failed to update sharedColorSpaceTag with error %d", __FUNCTION__, res);
+            return res;
+        }
 
-        // ToDo: b/372321187 Hardcoding the shared session configuration. Update the code to
-        // take these values from XML instead.
+        std::vector<SharedSessionConfigReader::SharedSessionConfig> outputConfigurations;
+        status = configReader.getAvailableSharedSessionConfigs(cameraId.c_str(),
+                                                               &outputConfigurations);
+        if (status != 0) {
+            ALOGE("%s: failed to get output configurations from config reader with ErrorCode %s",
+                  __FUNCTION__, SharedSessionConfigUtils::toString(status));
+            return BAD_VALUE;
+        }
+
         std::vector<int64_t> sharedOutputConfigEntries;
-        int64_t surfaceType1 =  OutputConfiguration::SURFACE_TYPE_IMAGE_READER;
-        int64_t width = 1280;
-        int64_t height = 800;
-        int64_t format1 = HAL_PIXEL_FORMAT_RGBA_8888;
-        int64_t mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
-        int64_t timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT;
-        int64_t usage1 = 3;
-        int64_t dataspace = 0;
-        int64_t useReadoutTimestamp = 0;
-        int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
-        int64_t physicalCamIdLen = 0;
 
-        // Stream 1 configuration hardcoded
-        sharedOutputConfigEntries.push_back(surfaceType1);
-        sharedOutputConfigEntries.push_back(width);
-        sharedOutputConfigEntries.push_back(height);
-        sharedOutputConfigEntries.push_back(format1);
-        sharedOutputConfigEntries.push_back(mirrorMode);
-        sharedOutputConfigEntries.push_back(useReadoutTimestamp);
-        sharedOutputConfigEntries.push_back(timestampBase);
-        sharedOutputConfigEntries.push_back(dataspace);
-        sharedOutputConfigEntries.push_back(usage1);
-        sharedOutputConfigEntries.push_back(streamUseCase);
-        sharedOutputConfigEntries.push_back(physicalCamIdLen);
-
-        // Stream 2 configuration hardcoded
-        int64_t surfaceType2 =  OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW;
-        int64_t format2 = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-        int64_t usage2 = 0;
-
-        sharedOutputConfigEntries.push_back(surfaceType2);
-        sharedOutputConfigEntries.push_back(width);
-        sharedOutputConfigEntries.push_back(height);
-        sharedOutputConfigEntries.push_back(format2);
-        sharedOutputConfigEntries.push_back(mirrorMode);
-        sharedOutputConfigEntries.push_back(useReadoutTimestamp);
-        sharedOutputConfigEntries.push_back(timestampBase);
-        sharedOutputConfigEntries.push_back(dataspace);
-        sharedOutputConfigEntries.push_back(usage2);
-        sharedOutputConfigEntries.push_back(streamUseCase);
-        sharedOutputConfigEntries.push_back(physicalCamIdLen);
+        for (auto outputConfig : outputConfigurations) {
+            sharedOutputConfigEntries.push_back(outputConfig.surfaceType);
+            sharedOutputConfigEntries.push_back(outputConfig.width);
+            sharedOutputConfigEntries.push_back(outputConfig.height);
+            sharedOutputConfigEntries.push_back(outputConfig.format);
+            sharedOutputConfigEntries.push_back(outputConfig.mirrorMode);
+            sharedOutputConfigEntries.push_back(outputConfig.useReadoutTimestamp);
+            sharedOutputConfigEntries.push_back(outputConfig.timestampBase);
+            sharedOutputConfigEntries.push_back(outputConfig.dataSpace);
+            sharedOutputConfigEntries.push_back(outputConfig.usage);
+            sharedOutputConfigEntries.push_back(outputConfig.streamUseCase);
+            if (strcmp(outputConfig.physicalCameraId.c_str(), "")) {
+                sharedOutputConfigEntries.push_back(outputConfig.physicalCameraId.length());
+                for (char c : outputConfig.physicalCameraId) {
+                    sharedOutputConfigEntries.push_back(c);
+                }
+            } else {
+                sharedOutputConfigEntries.push_back(/* physical camera id len */ 0);
+            }
+        }
 
         res = c.update(sharedOutputConfigurationsTag, sharedOutputConfigEntries.data(),
-                sharedOutputConfigEntries.size());
+                       sharedOutputConfigEntries.size());
     }
     return res;
 }
@@ -2245,6 +2276,10 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags(bool maxResolution) {
+    if (mCompositeHeicDisabled) {
+        return OK;
+    }
+
     int32_t scalerStreamSizesTag =
             SessionConfigurationUtils::getAppropriateModeTag(
                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
@@ -3613,8 +3648,7 @@
 }
 
 bool CameraProviderManager::isVirtualCameraHalEnabled() {
-    return vd_flags::virtual_camera_service_discovery() &&
-           vd_flags::virtual_camera_service_build_flag();
+    return vd_flags::virtual_camera_service_build_flag();
 }
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 11985f5..b42f5a8 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -44,6 +44,8 @@
 #include <binder/IServiceManager.h>
 #include <camera/VendorTagDescriptor.h>
 
+#include "config/SharedSessionConfigUtils.h"
+
 namespace android {
 
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
@@ -88,7 +90,6 @@
 #define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
 #define CAMERA_DEVICE_API_VERSION_1_2 HARDWARE_DEVICE_API_VERSION(1, 2)
 #define CAMERA_DEVICE_API_VERSION_1_3 HARDWARE_DEVICE_API_VERSION(1, 3)
-#define CAMERA_DEVICE_API_VERSION_1_4 HARDWARE_DEVICE_API_VERSION(1, 4)
 #define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
 #define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
 #define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
@@ -284,6 +285,16 @@
     bool isCompositeJpegRDisabled(const std::string &id) const;
 
     /**
+     * Return true if the camera device has no composite HEIC support.
+     */
+    bool isCompositeHeicDisabled(const std::string &id) const;
+
+    /**
+     * Return true if the camera device has no composite HEIC Ultra HDR support.
+     */
+    bool isCompositeHeicUltraHDRDisabled(const std::string &id) const;
+
+    /**
      * Return the resource cost of this camera device
      */
     status_t getResourceCost(const std::string &id,
@@ -633,6 +644,7 @@
             bool hasFlashUnit() const { return mHasFlashUnit; }
             bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
             bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
+            bool isCompositeHeicDisabled() const { return mCompositeHeicDisabled; }
             bool isCompositeHeicUltraHDRDisabled() const { return mCompositeHeicUltraHDRDisabled; }
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
@@ -691,14 +703,16 @@
                     mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
                     mHasFlashUnit(false), mSupportNativeZoomRatio(false),
                     mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false),
-                    mCompositeHeicUltraHDRDisabled(false) {}
+                    mCompositeHeicDisabled(false), mCompositeHeicUltraHDRDisabled(false) {}
             virtual ~DeviceInfo() {}
         protected:
 
             bool mHasFlashUnit; // const after constructor
             bool mSupportNativeZoomRatio; // const after constructor
             const std::vector<std::string>& mPublicCameraIds;
-            bool mCompositeJpegRDisabled, mCompositeHeicUltraHDRDisabled;
+            bool mCompositeJpegRDisabled;
+            bool mCompositeHeicDisabled;
+            bool mCompositeHeicUltraHDRDisabled;
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
@@ -782,7 +796,7 @@
             status_t addColorCorrectionAvailableModesTag(CameraMetadata& ch);
             status_t addAePriorityModeTags();
             status_t addSessionConfigQueryVersionTag();
-            status_t addSharedSessionConfigurationTags();
+            status_t addSharedSessionConfigurationTags(const std::string &cameraId);
             bool isAutomotiveDevice();
 
             static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
@@ -907,6 +921,8 @@
     ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
 
     bool isCompositeJpegRDisabledLocked(const std::string &id) const;
+    bool isCompositeHeicDisabledLocked(const std::string &id) const;
+    bool isCompositeHeicUltraHDRDisabledLocked(const std::string &id) const;
 
     // Map external providers to USB devices in order to handle USB hotplug
     // events for lazy HALs
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 88998c6..b9e8cdd 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -23,13 +23,13 @@
 #include <cutils/properties.h>
 
 #include <aidlcommonsupport/NativeHandle.h>
-#include <android_companion_virtualdevice_flags.h>
 #include <android/binder_manager.h>
 #include <android/hardware/ICameraService.h>
 #include <camera_metadata_hidden.h>
 
 #include "device3/DistortionMapper.h"
 #include "device3/ZoomRatioMapper.h"
+#include <filesystem>
 #include <utils/AttributionAndPermissionUtils.h>
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
@@ -42,7 +42,6 @@
 
 namespace SessionConfigurationUtils = ::android::camera3::SessionConfigurationUtils;
 namespace flags = com::android::internal::camera::flags;
-namespace vd_flags = android::companion::virtualdevice::flags;
 
 using namespace aidl::android::hardware;
 using namespace hardware::camera;
@@ -134,10 +133,14 @@
     }
 
     mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(AIBinder_DeathRecipient_new(binderDied));
+    AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), /*onUnlinked*/ [](void *cookie) {
+            AIBinderCookie *binderCookie = reinterpret_cast<AIBinderCookie *>(cookie);
+            delete binderCookie;
+        });
 
-    if (!vd_flags::virtual_camera_service_discovery() || interface->isRemote()) {
-        binder_status_t link =
-                AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(), this);
+    if (interface->isRemote()) {
+        binder_status_t link = AIBinder_linkToDeath(
+            interface->asBinder().get(), mDeathRecipient.get(), new AIBinderCookie{this});
         if (link != STATUS_OK) {
             ALOGW("%s: Unable to link to provider '%s' death notifications (%d)", __FUNCTION__,
                   mProviderName.c_str(), link);
@@ -201,9 +204,12 @@
 }
 
 void AidlProviderInfo::binderDied(void *cookie) {
-    AidlProviderInfo *provider = reinterpret_cast<AidlProviderInfo *>(cookie);
-    ALOGI("Camera provider '%s' has died; removing it", provider->mProviderInstance.c_str());
-    provider->mManager->removeProvider(std::string(provider->mProviderInstance));
+    AIBinderCookie* binderCookie = reinterpret_cast<AIBinderCookie*>(cookie);
+    sp<AidlProviderInfo> provider = binderCookie->providerInfo.promote();
+    if (provider != nullptr) {
+        ALOGI("Camera provider '%s' has died; removing it", provider->mProviderInstance.c_str());
+        provider->mManager->removeProvider(provider->mProviderInstance);
+    }
 }
 
 status_t AidlProviderInfo::setUpVendorTags() {
@@ -317,7 +323,7 @@
 
     interface->setCallback(mCallbacks);
     auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(),
-            this);
+            new AIBinderCookie{this});
     if (link != STATUS_OK) {
         ALOGW("%s: Unable to link to provider '%s' death notifications",
                 __FUNCTION__, mProviderName.c_str());
@@ -491,7 +497,20 @@
     int resV = validate_camera_metadata_structure(buffer, &expectedSize);
     if (resV == OK || resV == CAMERA_METADATA_VALIDATION_SHIFTED) {
         set_camera_metadata_vendor_id(buffer, mProviderTagid);
-        mCameraCharacteristics = buffer;
+        if (flags::metadata_resize_fix()) {
+            //b/379388099: Create a CameraCharacteristics object slightly larger
+            //to accommodate framework addition/modification. This is to
+            //optimize memory because the CameraMetadata::update() doubles the
+            //memory footprint, which could be significant if original
+            //CameraCharacteristics is already large.
+            mCameraCharacteristics = {
+                    get_camera_metadata_entry_count(buffer) + CHARACTERISTICS_EXTRA_ENTRIES,
+                    get_camera_metadata_data_count(buffer) + CHARACTERISTICS_EXTRA_DATA_SIZE
+            };
+            mCameraCharacteristics.append(buffer);
+        } else {
+            mCameraCharacteristics = buffer;
+        }
     } else {
         ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
         return;
@@ -518,6 +537,8 @@
 
     mCompositeJpegRDisabled = mCameraCharacteristics.exists(
             ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+    mCompositeHeicDisabled = mCameraCharacteristics.exists(
+            ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
     mCompositeHeicUltraHDRDisabled = mCameraCharacteristics.exists(
             ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS);
 
@@ -695,7 +716,20 @@
             int res = validate_camera_metadata_structure(pBuffer, &expectedSize);
             if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
                 set_camera_metadata_vendor_id(pBuffer, mProviderTagid);
-                mPhysicalCameraCharacteristics[id] = pBuffer;
+                if (flags::metadata_resize_fix()) {
+                    //b/379388099: Create a CameraCharacteristics object slightly larger
+                    //to accommodate framework addition/modification. This is to
+                    //optimize memory because the CameraMetadata::update() doubles the
+                    //memory footprint, which could be significant if original
+                    //CameraCharacteristics is already large.
+                    mPhysicalCameraCharacteristics[id] = {
+                          get_camera_metadata_entry_count(pBuffer) + CHARACTERISTICS_EXTRA_ENTRIES,
+                          get_camera_metadata_data_count(pBuffer) + CHARACTERISTICS_EXTRA_DATA_SIZE
+                    };
+                    mPhysicalCameraCharacteristics[id].append(pBuffer);
+                } else {
+                    mPhysicalCameraCharacteristics[id] = pBuffer;
+                }
             } else {
                 ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
                 return;
@@ -733,8 +767,11 @@
                 {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, ANDROID_CONTROL_AE_TARGET_FPS_RANGE});
     }
 
-    if (flags::camera_multi_client() && isAutomotiveDevice()) {
-        addSharedSessionConfigurationTags();
+    std::filesystem::path sharedSessionConfigFilePath =
+            std::string(SHARED_SESSION_FILE_PATH) + std::string(SHARED_SESSION_FILE_NAME);
+    if (flags::camera_multi_client() && std::filesystem::exists(sharedSessionConfigFilePath)
+            && mSystemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        addSharedSessionConfigurationTags(id);
     }
 
     if (!kEnableLazyHal) {
@@ -846,10 +883,11 @@
 
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
-    auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
-            mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
-            mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
-            checkSessionParams, mAdditionalKeysForFeatureQuery, &earlyExit);
+    auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(
+            configuration, mId, mCameraCharacteristics, mCompositeJpegRDisabled,
+            mCompositeHeicDisabled, mCompositeHeicUltraHDRDisabled, getMetadata, mPhysicalIds,
+            streamConfiguration, overrideForPerfClass, mProviderTagid, checkSessionParams,
+            mAdditionalKeysForFeatureQuery, &earlyExit);
 
     if (!bRes.isOk()) {
         return UNKNOWN_ERROR;
@@ -957,10 +995,11 @@
         camera3::metadataGetter getMetadata, CameraMetadata* outChars) {
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
-    auto res = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
-            mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
-            mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
-            /*checkSessionParams*/true, mAdditionalKeysForFeatureQuery, &earlyExit);
+    auto res = SessionConfigurationUtils::convertToHALStreamCombination(
+            configuration, mId, mCameraCharacteristics, mCompositeJpegRDisabled,
+            mCompositeHeicDisabled, mCompositeHeicUltraHDRDisabled, getMetadata, mPhysicalIds,
+            streamConfiguration, overrideForPerfClass, mProviderTagid,
+            /*checkSessionParams*/ true, mAdditionalKeysForFeatureQuery, &earlyExit);
 
     if (!res.isOk()) {
         return UNKNOWN_ERROR;
@@ -1045,7 +1084,9 @@
             SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
                     cameraId, deviceInfo,
-                    mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
+                    mManager->isCompositeJpegRDisabledLocked(cameraId),
+                    mManager->isCompositeHeicDisabledLocked(cameraId),
+                    mManager->isCompositeHeicUltraHDRDisabledLocked(cameraId), getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, mProviderTagid,
                     /*checkSessionParams*/false, /*additionalKeys*/{},
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
index 1983cc3..3eb7e44 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -168,8 +168,29 @@
                 *halCameraIdsAndStreamCombinations,
         bool *earlyExit);
     std::shared_ptr<AidlProviderCallbacks> mCallbacks = nullptr;
+    struct AIBinderCookie {
+        wp<AidlProviderInfo> providerInfo;
+    };
     ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
 
+    // Estimate based on the camera framework modification on camera
+    // characteristics:
+    // - Flash strength: 4 entries
+    // - Dynamic depth: 6 entries
+    // - Heic: 6 entries
+    // - Rotation and crop: 1 entry
+    // - Autoframing: 1 entry
+    // - Pre-correction active array size: 1 entry
+    // - Zoom ratio: 1 entry
+    // - Readout timestamp: 1 entry
+    // - color correction modes: 1 entry
+    // - AE priority modes: 1 entry
+    // - Torch strength level: 2 entries
+    // - Session config query version: 1 entry
+    //
+    // Total: 26 entries. Round up to 64 entries.
+    static constexpr size_t CHARACTERISTICS_EXTRA_ENTRIES = 64;
+    static constexpr size_t CHARACTERISTICS_EXTRA_DATA_SIZE = 1024; // in bytes
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/config/SharedSessionConfigReader.cpp b/services/camera/libcameraservice/config/SharedSessionConfigReader.cpp
new file mode 100644
index 0000000..2ea5ffa
--- /dev/null
+++ b/services/camera/libcameraservice/config/SharedSessionConfigReader.cpp
@@ -0,0 +1,239 @@
+//
+// Copyright 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#define LOG_TAG "SharedSessionConfigReader"
+
+#include "SharedSessionConfigReader.h"
+
+#include <fstream>
+#include <utils/Log.h>
+
+using tinyxml2::XML_SUCCESS;
+using tinyxml2::XMLDocument;
+namespace android {
+
+ErrorCode SharedSessionConfigReader::parseSharedSessionConfig(
+        const char* sharedSessionConfigFilePath) {
+    if (!mCameraIdToSharedSessionConfigs.empty()) {
+        ALOGV("mCameraIdToSharedSessionConfigs already initialized.");
+        return ErrorCode::STATUS_OK;
+    }
+
+    XMLDocument xmlDoc;
+
+    // load and parse the configuration file
+    xmlDoc.LoadFile(sharedSessionConfigFilePath);
+    if (xmlDoc.ErrorID() != XML_SUCCESS) {
+        ALOGE("%s: Failed to load/parse the configuration file: %s, with error: %s", __FUNCTION__,
+              sharedSessionConfigFilePath, xmlDoc.ErrorStr());
+        return ErrorCode::ERROR_READ_CONFIG_FILE;
+    }
+
+    ErrorCode status = parseSharedSessionConfigFromXMLDocument(xmlDoc);
+    if (status != ErrorCode::STATUS_OK) {
+        ALOGE("%s: Error while parsing XML elements of file at: %s", __FUNCTION__,
+              sharedSessionConfigFilePath);
+        return status;
+    }
+
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigReader::parseSharedSessionConfigFromXMLDocument(
+        const XMLDocument& xmlDoc) {
+    const XMLElement* rootElem = xmlDoc.RootElement();
+    if (strcmp(rootElem->Name(), "SharedCameraSessionConfigurations")) {
+        ALOGE("%s: Expected root element to be 'SharedCameraSessionConfigurations'. Instead got %s",
+              __FUNCTION__, rootElem->Name());
+        return ErrorCode::ERROR_READ_CONFIG_FILE;
+    }
+
+    ErrorCode status;
+    const char* colorSpaceStr = rootElem->Attribute("colorSpace");
+    status = SharedSessionConfigUtils::getColorSpaceFromStr(colorSpaceStr, &mColorSpace);
+    if (status != ErrorCode::STATUS_OK) {
+        ALOGE("%s: getColorSpaceFromStr has returned an error: %s", __FUNCTION__,
+              SharedSessionConfigUtils::toString(status));
+        return status;
+    }
+
+    std::unordered_map<std::string, std::vector<SharedSessionConfig>>
+            cameraIdToSharedSessionConfigs;
+
+    for (const XMLElement* sharedConfigElem =
+                 rootElem->FirstChildElement("SharedCameraSessionConfiguration");
+            sharedConfigElem != nullptr;
+            sharedConfigElem =
+                 sharedConfigElem->NextSiblingElement("SharedCameraSessionConfiguration")) {
+
+        const char* cameraId = sharedConfigElem->Attribute("cameraId");
+        if (cameraId == nullptr || !strcmp(cameraId, "")) {
+            ALOGE("%s: cameraId attribute is empty", __FUNCTION__);
+            return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+        }
+
+        for (const XMLElement* outputConfigElem =
+                     sharedConfigElem->FirstChildElement("OutputConfiguration");
+                outputConfigElem != nullptr;
+                outputConfigElem = outputConfigElem->NextSiblingElement("OutputConfiguration")) {
+            int64_t surfaceType;
+            const XMLElement* surfaceTypeXml = outputConfigElem->FirstChildElement("surfaceType");
+            status = SharedSessionConfigUtils::getSurfaceTypeFromXml(surfaceTypeXml, &surfaceType);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getSurfaceTypeFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t width;
+            const XMLElement* widthXml = outputConfigElem->FirstChildElement("width");
+            status = SharedSessionConfigUtils::getWidthFromXml(widthXml, &width);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getWidthFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t height;
+            const XMLElement* heightXml = outputConfigElem->FirstChildElement("height");
+            status = SharedSessionConfigUtils::getHeightFromXml(heightXml, &height);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getHeightFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            std::string physicalCameraId;
+            const XMLElement* physicalCameraIdXml =
+                    outputConfigElem->FirstChildElement("physicalCameraId");
+            status = SharedSessionConfigUtils::getPhysicalCameraIdFromXml(physicalCameraIdXml,
+                                                                          &physicalCameraId);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getPhysicalCameraIdFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t streamUseCase;
+            const XMLElement* streamUseCaseXml =
+                    outputConfigElem->FirstChildElement("streamUseCase");
+            status = SharedSessionConfigUtils::getStreamUseCaseFromXml(streamUseCaseXml,
+                                                                       &streamUseCase);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getStreamUseCaseFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t timestampBase;
+            const XMLElement* timestampBaseXml =
+                    outputConfigElem->FirstChildElement("timestampBase");
+            status = SharedSessionConfigUtils::getTimestampBaseFromXml(timestampBaseXml,
+                                                                       &timestampBase);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getTimestampBaseFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t mirrorMode;
+            const XMLElement* mirrorModeXml = outputConfigElem->FirstChildElement("mirrorMode");
+            status = SharedSessionConfigUtils::getMirrorModeFromXml(mirrorModeXml, &mirrorMode);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getMirrorModeFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            bool useReadoutTimestamp;
+            const XMLElement* useReadoutTimestampXml =
+                    outputConfigElem->FirstChildElement("useReadoutTimestamp");
+            status = SharedSessionConfigUtils::getUseReadoutTimestampFromXml(useReadoutTimestampXml,
+                                                                             &useReadoutTimestamp);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getUseReadoutTimestampFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t format;
+            const XMLElement* formatXml = outputConfigElem->FirstChildElement("format");
+            status = SharedSessionConfigUtils::getFormatFromXml(formatXml, &format, surfaceType);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getFormatFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t usage;
+            const XMLElement* usageXml = outputConfigElem->FirstChildElement("usage");
+            status = SharedSessionConfigUtils::getUsageFromXml(usageXml, &usage, surfaceType);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getUsageFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            int64_t dataSpace;
+            const XMLElement* dataSpaceXml = outputConfigElem->FirstChildElement("dataSpace");
+            status = SharedSessionConfigUtils::getDataSpaceFromXml(dataSpaceXml, &dataSpace);
+            if (status != ErrorCode::STATUS_OK) {
+                ALOGE("%s: getUsageFromXml has returned an error: %s", __FUNCTION__,
+                      SharedSessionConfigUtils::toString(status));
+                return status;
+            }
+
+            cameraIdToSharedSessionConfigs[cameraId].push_back(
+                    SharedSessionConfig{surfaceType, width, height, physicalCameraId, streamUseCase,
+                                        timestampBase, mirrorMode, useReadoutTimestamp, format,
+                                        usage, dataSpace});
+        }
+    }
+
+    if (cameraIdToSharedSessionConfigs.empty()) {
+        ALOGE("%s: No elements with tag 'SharedCameraSessionConfiguration' in file", __FUNCTION__);
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    mCameraIdToSharedSessionConfigs = cameraIdToSharedSessionConfigs;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigReader::getColorSpace(/* out */ int32_t* colorSpace) {
+    *colorSpace = mColorSpace;
+    return ErrorCode::STATUS_OK;
+}
+
+// Returns the cameraConfig parameters.
+ErrorCode SharedSessionConfigReader::getAvailableSharedSessionConfigs(
+        const char* cameraId, /* out */ std::vector<SharedSessionConfig>* availableConfigurations) {
+    if (mCameraIdToSharedSessionConfigs.empty()) {
+        ALOGE("%s: mCameraIdToSharedSessionConfigs is empty. Call initialize() first.",
+              __FUNCTION__);
+        return ErrorCode::ERROR_CONFIG_READER_UNINITIALIZED;
+    }
+
+    if (!mCameraIdToSharedSessionConfigs.contains(cameraId)) {
+        ALOGE("%s: cameraId: %s not found in mCameraIdToSharedSessionConfigs.", __FUNCTION__,
+              cameraId);
+        return ErrorCode::ERROR_BAD_PARAMETER;
+    }
+
+    *availableConfigurations = mCameraIdToSharedSessionConfigs[cameraId];
+    return ErrorCode::STATUS_OK;
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/config/SharedSessionConfigReader.h b/services/camera/libcameraservice/config/SharedSessionConfigReader.h
new file mode 100644
index 0000000..aa52236
--- /dev/null
+++ b/services/camera/libcameraservice/config/SharedSessionConfigReader.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_SHAREDSESSIONCONFIGREADER_H_
+#define ANDROID_SERVERS_CAMERA_SHAREDSESSIONCONFIGREADER_H_
+
+#include "SharedSessionConfigUtils.h"
+
+#include <string>
+#include "tinyxml2.h"
+#include <vector>
+#include <unordered_map>
+
+using tinyxml2::XMLDocument;
+using tinyxml2::XMLElement;
+namespace android {
+
+class SharedSessionConfigReader {
+public:
+
+    // Struct for shared session configurations.
+    struct SharedSessionConfig {
+        // TODO: add documentation for each field.
+        int64_t surfaceType;
+        int64_t width;
+        int64_t height;
+        std::string physicalCameraId;
+        int64_t streamUseCase;
+        int64_t timestampBase;
+        int64_t mirrorMode;
+        bool useReadoutTimestamp;
+        int64_t format;
+        int64_t usage;
+        int64_t dataSpace;
+    };
+
+    // Reads shared session config files and stores parsed results in mColorSpace and
+    // mCameraIdToSharedSessionConfigs.
+    ErrorCode parseSharedSessionConfig(const char* sharedSessionConfigFilePath);
+
+    // Reads shared session config files and stores parsed results in mColorSpace and
+    // mCameraIdToSharedSessionConfigs.
+    ErrorCode parseSharedSessionConfigFromXMLDocument(const XMLDocument& xmlDoc);
+
+    // Return color space of a camera device.
+    ErrorCode getColorSpace(int32_t* colorSpace);
+
+    // Return all available shared configs for a cameraId.
+    ErrorCode getAvailableSharedSessionConfigs(
+            const char* cameraId, std::vector<SharedSessionConfig>* availableConfigurations);
+
+private:
+
+    // shared color space of devices
+    int32_t mColorSpace;
+
+    // stores parsed configs, mapped from cameraId to available session configs.
+    std::unordered_map<std::string, std::vector<SharedSessionConfig>>
+            mCameraIdToSharedSessionConfigs;
+
+    // processes xml and populates mCameraIdToColorSpace and mCameraIdToSharedSessionConfigs. Called
+    // by initialize().
+    ErrorCode readConfig(const XMLElement* rootElem);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_SERVERS_CAMERA_SHAREDSESSIONCONFIGREADER_H_
diff --git a/services/camera/libcameraservice/config/SharedSessionConfigUtils.cpp b/services/camera/libcameraservice/config/SharedSessionConfigUtils.cpp
new file mode 100644
index 0000000..a9ccdc9
--- /dev/null
+++ b/services/camera/libcameraservice/config/SharedSessionConfigUtils.cpp
@@ -0,0 +1,324 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SharedSessionConfigUtils"
+
+#include "SharedSessionConfigUtils.h"
+
+#include <inttypes.h>
+#include <sstream>
+#include <utils/Log.h>
+
+namespace android {
+
+const char* SharedSessionConfigUtils::toString(ErrorCode errorCode) {
+    switch (errorCode) {
+        case ErrorCode::STATUS_OK:
+            return "STATUS_OK";
+        case ErrorCode::ERROR_READ_CONFIG_FILE:
+            return "ERROR_READ_CONFIG_FILE";
+        case ErrorCode::ERROR_CONFIG_FILE_FORMAT:
+            return "ERROR_CONFIG_FILE_FORMAT";
+        case ErrorCode::ERROR_CONFIG_READER_UNINITIALIZED:
+            return "ERROR_CONFIG_READER_UNINITIALIZED";
+        case ErrorCode::ERROR_BAD_PARAMETER:
+            return "ERROR_BAD_PARAMETER";
+        default:
+            ALOGE("%s: Called toString on an unknown ErrorCode. This should never happen",
+                  __FUNCTION__);
+            return "";
+    }
+}
+
+ErrorCode SharedSessionConfigUtils::getColorSpaceFromStr(const char* colorSpaceStr,
+                                                         /* out */ int32_t* colorSpace) {
+    if (colorSpaceStr == nullptr || !strcmp(colorSpaceStr, "")) {
+        *colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+        return ErrorCode::STATUS_OK;
+    }
+
+    int32_t colorSpaceInt = (int32_t) std::strtol(colorSpaceStr, nullptr, 0);
+    if (VALID_COLOR_SPACES.find(colorSpaceInt) == VALID_COLOR_SPACES.end()) {
+        ALOGE("%s: colorSpace %" PRId32 " is invalid: ", __FUNCTION__, colorSpaceInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_COLOR_SPACES).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *colorSpace = colorSpaceInt;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getSurfaceTypeFromXml(const XMLElement* surfaceTypeXml,
+                                                          /* out */ int64_t* surfaceType) {
+    if (surfaceTypeXml == nullptr || surfaceTypeXml->GetText() == nullptr
+            || !strcmp(surfaceTypeXml->GetText(), "")) {
+        ALOGE("%s: surface type field must be populated", __FUNCTION__);
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    int64_t surfaceTypeInt = std::strtol(surfaceTypeXml->GetText(), nullptr, 0);
+    if (VALID_SURFACE_TYPES.find(surfaceTypeInt) == VALID_SURFACE_TYPES.end()) {
+        ALOGE("%s: surfaceType %" PRId64 " is invalid: ", __FUNCTION__, surfaceTypeInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_SURFACE_TYPES).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *surfaceType = surfaceTypeInt;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getWidthFromXml(const XMLElement* widthXml,
+                                                    /* out */ int64_t* width) {
+    if (widthXml == nullptr || widthXml->GetText() == nullptr
+            || !strcmp(widthXml->GetText(), "")) {
+        ALOGE("%s: width field must be populated", __FUNCTION__);
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    const char* widthStr = widthXml->GetText();
+    *width = std::strtol(widthStr, nullptr, 0);
+    if (*width <= 0) {
+        ALOGE("%s: width value is invalid", __FUNCTION__);
+    }
+
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getHeightFromXml(const XMLElement* heightXml,
+                                                     /* out */ int64_t* height) {
+    if (heightXml == nullptr || heightXml->GetText() == nullptr
+            || !strcmp(heightXml->GetText(), "")) {
+        ALOGE("%s: height field must be populated", __FUNCTION__);
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    const char* heightStr = heightXml->GetText();
+    *height = std::strtol(heightStr, nullptr, 0);
+    if (*height <= 0) {
+        ALOGE("%s: height value is invalid", __FUNCTION__);
+    }
+
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getPhysicalCameraIdFromXml(
+        const XMLElement* physicalCameraIdXml, /* out */ std::string* physicalCameraId) {
+    *physicalCameraId =
+            (physicalCameraIdXml == nullptr || physicalCameraIdXml->GetText() == nullptr)
+                    ? "": physicalCameraIdXml->GetText();
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getStreamUseCaseFromXml(const XMLElement* streamUseCaseXml,
+                                                            /* out */ int64_t* streamUseCase) {
+    if (streamUseCaseXml == nullptr || streamUseCaseXml->GetText() == nullptr
+            || !strcmp(streamUseCaseXml->GetText(), "")) {
+        *streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+        return ErrorCode::STATUS_OK;
+    }
+
+    int64_t streamUseCaseInt = std::strtol(streamUseCaseXml->GetText(), nullptr, 0);
+    if (VALID_STREAM_USE_CASES.find(streamUseCaseInt) == VALID_STREAM_USE_CASES.end()) {
+        ALOGE("%s: streamUseCase %" PRId64 " is invalid: ", __FUNCTION__, streamUseCaseInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_STREAM_USE_CASES).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *streamUseCase = streamUseCaseInt;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getTimestampBaseFromXml(const XMLElement* timestampBaseXml,
+                                                            /* out */ int64_t* timestampBase) {
+    if (timestampBaseXml == nullptr || timestampBaseXml->GetText() == nullptr
+            || !strcmp(timestampBaseXml->GetText(), "")) {
+        *timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT;
+        return ErrorCode::STATUS_OK;
+    }
+
+    int64_t timestampBaseInt = std::strtol(timestampBaseXml->GetText(), nullptr, 0);
+    if (VALID_TIMESTAMP_BASES.find(timestampBaseInt) == VALID_TIMESTAMP_BASES.end()) {
+        ALOGE("%s: timestampBase %" PRId64 " is invalid: ", __FUNCTION__, timestampBaseInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_TIMESTAMP_BASES).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *timestampBase = timestampBaseInt;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getMirrorModeFromXml(const XMLElement* mirrorModeXml,
+                                                         /* out */ int64_t* mirrorMode) {
+    if (mirrorModeXml == nullptr || mirrorModeXml->GetText() == nullptr
+            || !strcmp(mirrorModeXml->GetText(), "")) {
+        *mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
+        return ErrorCode::STATUS_OK;
+    }
+
+    int64_t mirrorModeInt = std::strtol(mirrorModeXml->GetText(), nullptr, 0);
+    if (VALID_MIRROR_MODES.find(mirrorModeInt) == VALID_MIRROR_MODES.end()) {
+        ALOGE("%s: mirrorMode %" PRId64 " is invalid: ", __FUNCTION__, mirrorModeInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_MIRROR_MODES).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *mirrorMode = mirrorModeInt;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getUseReadoutTimestampFromXml(
+        const XMLElement* useReadoutTimestampXml, /* out */ bool* useReadoutTimestamp) {
+    if (useReadoutTimestampXml != nullptr && useReadoutTimestampXml->GetText() != nullptr
+            && strcmp(useReadoutTimestampXml->GetText(), "")) {
+        const char* useReadoutTimestampStr = useReadoutTimestampXml->GetText();
+        if (!strcmp(useReadoutTimestampStr, "1")) {
+            *useReadoutTimestamp = true;
+            return ErrorCode::STATUS_OK;
+        } else if (strcmp(useReadoutTimestampStr, "0")) {
+            ALOGE("%s: useReadoutTimestamp string %s is invalid: ", __FUNCTION__,
+                  useReadoutTimestampStr);
+            ALOGE("%s: Expected one of: {0, 1}", __FUNCTION__);
+            return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+        }
+    }
+
+    *useReadoutTimestamp = false;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getFormatFromXml(const XMLElement* formatXml,
+                                                     /* out */ int64_t* format,
+                                                     int64_t surfaceType) {
+    if (surfaceType != OutputConfiguration::SURFACE_TYPE_IMAGE_READER) {
+        // if surface type is not image reader, format must default to impl defined enum.
+        *format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+        return ErrorCode::STATUS_OK;
+    }
+
+    if (formatXml == nullptr || formatXml->GetText() == nullptr
+            || !strcmp(formatXml->GetText(), "")) {
+        ALOGE("%s: format field must be populated", __FUNCTION__);
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    int64_t formatInt = std::strtol(formatXml->GetText(), nullptr, 0);
+    if (VALID_FORMATS.find(formatInt) == VALID_FORMATS.end()) {
+        ALOGE("%s: format %" PRId64 " is invalid: ", __FUNCTION__, formatInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_FORMATS).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *format = formatInt;
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getUsageFromXml(const XMLElement* usageXml,
+                                                    /* out */ int64_t* usage,
+                                                    int64_t surfaceType) {
+    if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE) {
+        // if surface type is SURFACE_TYPE_SURFACE_TEXTURE, usage must default to
+        // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE.
+        *usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+        return ErrorCode::STATUS_OK;
+    }
+
+    if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
+        // if surface type is SURFACE_TYPE_SURFACE_VIEW, usage must default to
+        // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY.
+        *usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY;
+        return ErrorCode::STATUS_OK;
+    }
+
+    if (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER
+            || surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) {
+        // if surface type is SURFACE_TYPE_MEDIA_RECORDER or SURFACE_TYPE_MEDIA_CODEC, usage must
+        // default to AHARDWAREBUFFER_USAGE_VIDEO_ENCODE
+        *usage = AHARDWAREBUFFER_USAGE_VIDEO_ENCODE;
+        return ErrorCode::STATUS_OK;
+    }
+
+    if (usageXml == nullptr || usageXml->GetText() == nullptr
+            || !strcmp(usageXml->GetText(), "")) {
+        *usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER;
+        return ErrorCode::STATUS_OK;
+    }
+
+    const char* usageStr = usageXml->GetText();
+    std::vector<std::string> usageFlags = splitString(usageStr, '|');
+
+    for (std::string usageFlagStr : usageFlags) {
+        int64_t usageFlag = std::strtol(usageFlagStr.c_str(), nullptr, 0);
+        if (VALID_USAGES.find(usageFlag) == VALID_USAGES.end()) {
+            ALOGE("%s: usage %" PRId64 " is invalid: ", __FUNCTION__, usageFlag);
+            ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_USAGES).c_str());
+            return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+        }
+
+        *usage |= usageFlag;
+    }
+
+    return ErrorCode::STATUS_OK;
+}
+
+ErrorCode SharedSessionConfigUtils::getDataSpaceFromXml(const XMLElement* dataSpaceXml,
+                                                        /* out */ int64_t* dataSpace) {
+    if (dataSpaceXml == nullptr || dataSpaceXml->GetText() == nullptr
+            || !strcmp(dataSpaceXml->GetText(), "")) {
+        *dataSpace = HAL_DATASPACE_UNKNOWN;
+        return ErrorCode::STATUS_OK;
+    }
+
+    int64_t dataSpaceInt = std::strtol(dataSpaceXml->GetText(), nullptr, 0);
+    if (VALID_DATA_SPACES.find(dataSpaceInt) == VALID_DATA_SPACES.end()) {
+        ALOGE("%s: dataSpace %" PRId64 " is invalid: ", __FUNCTION__, dataSpaceInt);
+        ALOGE("%s: Expected one of: %s", __FUNCTION__, setToString(VALID_DATA_SPACES).c_str());
+        return ErrorCode::ERROR_CONFIG_FILE_FORMAT;
+    }
+
+    *dataSpace = dataSpaceInt;
+    return ErrorCode::STATUS_OK;
+}
+
+std::vector<std::string> SharedSessionConfigUtils::splitString(std::string inputString,
+                                                               char delimiter) {
+    std::vector<std::string> tokens;
+    std::istringstream iss(inputString);
+    std::string token;
+
+    while (std::getline(iss, token, delimiter)) {
+        tokens.push_back(token);
+    }
+
+    return tokens;
+}
+
+std::string SharedSessionConfigUtils::setToString(const std::set<int64_t>& s) {
+    std::ostringstream oss;
+    oss << "{";
+
+    for (auto it = s.begin(); it != s.end();) {
+        oss << *it;
+
+        if (++it != s.end()) {
+            oss << ", ";
+        }
+    }
+
+    oss << "}";
+    return oss.str();
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/config/SharedSessionConfigUtils.h b/services/camera/libcameraservice/config/SharedSessionConfigUtils.h
new file mode 100644
index 0000000..2efe1dd
--- /dev/null
+++ b/services/camera/libcameraservice/config/SharedSessionConfigUtils.h
@@ -0,0 +1,274 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_SHAREDSESSIONCONFIGUTILS_H_
+#define ANDROID_SERVERS_CAMERA_SHAREDSESSIONCONFIGUTILS_H_
+
+#define SHARED_SESSION_FILE_PATH "system_ext/etc/"
+#define SHARED_SESSION_FILE_NAME "shared_session_config.xml"
+
+#include <android/hardware_buffer.h>
+#include <camera/camera2/OutputConfiguration.h>
+#include <system/camera_metadata.h>
+#include <system/graphics.h>
+
+#include <set>
+#include <string>
+#include "tinyxml2.h"
+#include <vector>
+
+using tinyxml2::XMLElement;
+namespace android {
+
+enum ErrorCode : uint8_t {
+    // OK status.
+    STATUS_OK = 0,
+
+    // Error status. Cannot read the config file (config file missing or not
+    // accessible)
+    ERROR_READ_CONFIG_FILE = 1,
+
+    // Error status. Config file format doesn't match.
+    ERROR_CONFIG_FILE_FORMAT = 2,
+
+    // Error status. Config reader hasn't been initialized.
+    ERROR_CONFIG_READER_UNINITIALIZED = 3,
+
+    // Error status. Bad parameter.
+    ERROR_BAD_PARAMETER = 4,
+};
+
+inline const std::set<int64_t> VALID_COLOR_SPACES = {
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB,
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3,
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG,
+};
+
+inline const std::set<int64_t> VALID_SURFACE_TYPES = {
+        OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW,
+        OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE,
+        OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER,
+        OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC,
+        OutputConfiguration::SURFACE_TYPE_IMAGE_READER,
+};
+
+inline const std::set<int64_t> VALID_STREAM_USE_CASES = {
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW,
+        ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START,
+};
+
+inline const std::set<int64_t> VALID_TIMESTAMP_BASES = {
+        OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+        OutputConfiguration::TIMESTAMP_BASE_SENSOR,
+        OutputConfiguration::TIMESTAMP_BASE_MONOTONIC,
+        OutputConfiguration::TIMESTAMP_BASE_REALTIME,
+        OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
+        OutputConfiguration::TIMESTAMP_BASE_MAX,
+};
+
+inline const std::set<int64_t> VALID_MIRROR_MODES = {
+        OutputConfiguration::MIRROR_MODE_AUTO,
+        OutputConfiguration::MIRROR_MODE_NONE,
+        OutputConfiguration::MIRROR_MODE_H,
+        OutputConfiguration::MIRROR_MODE_V,
+};
+
+inline const std::set<int64_t> VALID_FORMATS = {
+        HAL_PIXEL_FORMAT_RGBA_8888,
+        HAL_PIXEL_FORMAT_RGBX_8888,
+        HAL_PIXEL_FORMAT_RGB_888,
+        HAL_PIXEL_FORMAT_RGB_565,
+        HAL_PIXEL_FORMAT_BGRA_8888,
+        HAL_PIXEL_FORMAT_YCBCR_422_SP,
+        HAL_PIXEL_FORMAT_YCRCB_420_SP,
+        HAL_PIXEL_FORMAT_YCBCR_422_I,
+        HAL_PIXEL_FORMAT_RGBA_FP16,
+        HAL_PIXEL_FORMAT_RAW16,
+        HAL_PIXEL_FORMAT_BLOB,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+        HAL_PIXEL_FORMAT_YCBCR_420_888,
+        HAL_PIXEL_FORMAT_RAW_OPAQUE,
+        HAL_PIXEL_FORMAT_RAW10,
+        HAL_PIXEL_FORMAT_RAW12,
+        HAL_PIXEL_FORMAT_RGBA_1010102,
+        HAL_PIXEL_FORMAT_Y8,
+        HAL_PIXEL_FORMAT_Y16,
+        HAL_PIXEL_FORMAT_YV12,
+        HAL_PIXEL_FORMAT_DEPTH_16,
+        HAL_PIXEL_FORMAT_DEPTH_24,
+        HAL_PIXEL_FORMAT_DEPTH_24_STENCIL_8,
+        HAL_PIXEL_FORMAT_DEPTH_32F,
+        HAL_PIXEL_FORMAT_DEPTH_32F_STENCIL_8,
+        HAL_PIXEL_FORMAT_STENCIL_8,
+        HAL_PIXEL_FORMAT_YCBCR_P010,
+        HAL_PIXEL_FORMAT_HSV_888,
+};
+
+inline const std::set<int64_t> VALID_USAGES = {
+        AHARDWAREBUFFER_USAGE_CPU_READ_NEVER,
+        AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+        AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
+        AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER,
+        AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY,
+        AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+        AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER,
+        AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT,
+        AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY,
+        AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT,
+        AHARDWAREBUFFER_USAGE_VIDEO_ENCODE,
+        AHARDWAREBUFFER_USAGE_SENSOR_DIRECT_DATA,
+        AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER,
+        AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP,
+        AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE,
+        AHARDWAREBUFFER_USAGE_FRONT_BUFFER,
+        AHARDWAREBUFFER_USAGE_VENDOR_0,
+        AHARDWAREBUFFER_USAGE_VENDOR_1,
+        AHARDWAREBUFFER_USAGE_VENDOR_2,
+        AHARDWAREBUFFER_USAGE_VENDOR_3,
+        AHARDWAREBUFFER_USAGE_VENDOR_4,
+        AHARDWAREBUFFER_USAGE_VENDOR_5,
+        AHARDWAREBUFFER_USAGE_VENDOR_6,
+        AHARDWAREBUFFER_USAGE_VENDOR_7,
+        AHARDWAREBUFFER_USAGE_VENDOR_8,
+        AHARDWAREBUFFER_USAGE_VENDOR_9,
+        AHARDWAREBUFFER_USAGE_VENDOR_10,
+        AHARDWAREBUFFER_USAGE_VENDOR_11,
+        AHARDWAREBUFFER_USAGE_VENDOR_12,
+        AHARDWAREBUFFER_USAGE_VENDOR_13,
+        AHARDWAREBUFFER_USAGE_VENDOR_14,
+        AHARDWAREBUFFER_USAGE_VENDOR_15,
+        AHARDWAREBUFFER_USAGE_VENDOR_16,
+        AHARDWAREBUFFER_USAGE_VENDOR_17,
+        AHARDWAREBUFFER_USAGE_VENDOR_18,
+};
+
+inline const std::set<int64_t> VALID_DATA_SPACES = {
+        HAL_DATASPACE_UNKNOWN,
+        HAL_DATASPACE_ARBITRARY,
+        HAL_DATASPACE_STANDARD_UNSPECIFIED,
+        HAL_DATASPACE_STANDARD_BT709,
+        HAL_DATASPACE_STANDARD_BT601_625,
+        HAL_DATASPACE_STANDARD_BT601_625_UNADJUSTED,
+        HAL_DATASPACE_STANDARD_BT601_525,
+        HAL_DATASPACE_STANDARD_BT601_525_UNADJUSTED,
+        HAL_DATASPACE_STANDARD_BT2020,
+        HAL_DATASPACE_STANDARD_BT2020_CONSTANT_LUMINANCE,
+        HAL_DATASPACE_STANDARD_BT470M,
+        HAL_DATASPACE_STANDARD_FILM,
+        HAL_DATASPACE_STANDARD_DCI_P3,
+        HAL_DATASPACE_STANDARD_ADOBE_RGB,
+        HAL_DATASPACE_TRANSFER_UNSPECIFIED,
+        HAL_DATASPACE_TRANSFER_LINEAR,
+        HAL_DATASPACE_TRANSFER_SRGB,
+        HAL_DATASPACE_TRANSFER_SMPTE_170M,
+        HAL_DATASPACE_TRANSFER_GAMMA2_2,
+        HAL_DATASPACE_TRANSFER_GAMMA2_6,
+        HAL_DATASPACE_TRANSFER_GAMMA2_8,
+        HAL_DATASPACE_TRANSFER_ST2084,
+        HAL_DATASPACE_TRANSFER_HLG,
+        HAL_DATASPACE_RANGE_UNSPECIFIED,
+        HAL_DATASPACE_RANGE_FULL,
+        HAL_DATASPACE_RANGE_LIMITED,
+        HAL_DATASPACE_RANGE_EXTENDED,
+        HAL_DATASPACE_SRGB_LINEAR,
+        HAL_DATASPACE_V0_SRGB_LINEAR,
+        HAL_DATASPACE_V0_SCRGB_LINEAR,
+        HAL_DATASPACE_SRGB,
+        HAL_DATASPACE_V0_SRGB,
+        HAL_DATASPACE_V0_SCRGB,
+        HAL_DATASPACE_JFIF,
+        HAL_DATASPACE_V0_JFIF,
+        HAL_DATASPACE_BT601_625,
+        HAL_DATASPACE_V0_BT601_625,
+        HAL_DATASPACE_BT601_525,
+        HAL_DATASPACE_V0_BT601_525,
+        HAL_DATASPACE_BT709,
+        HAL_DATASPACE_V0_BT709,
+        HAL_DATASPACE_DCI_P3_LINEAR,
+        HAL_DATASPACE_DCI_P3,
+        HAL_DATASPACE_DISPLAY_P3_LINEAR,
+        HAL_DATASPACE_DISPLAY_P3,
+        HAL_DATASPACE_ADOBE_RGB,
+        HAL_DATASPACE_BT2020_LINEAR,
+        HAL_DATASPACE_BT2020,
+        HAL_DATASPACE_BT2020_PQ,
+        HAL_DATASPACE_DEPTH,
+        HAL_DATASPACE_SENSOR,
+};
+
+class SharedSessionConfigUtils {
+public:
+
+    // toString function for ErrorCode enum.
+    static const char* toString(ErrorCode errorCode);
+
+    // Convert string representation of colorspace to its int value.
+    static ErrorCode getColorSpaceFromStr(const char* colorSpaceStr, int32_t* colorSpace);
+
+    // Convert string representation of surface type to its int value.
+    static ErrorCode getSurfaceTypeFromXml(const XMLElement* surfaceTypeXml, int64_t* surfaceType);
+
+    // Convert string representation of width to its int value.
+    static ErrorCode getWidthFromXml(const XMLElement* widthXml, int64_t* width);
+
+    // Convert string representation of height to its int value.
+    static ErrorCode getHeightFromXml(const XMLElement* heightXml, int64_t* height);
+
+    // Convert string representation of physical cameraId to its std::string value.
+    static ErrorCode getPhysicalCameraIdFromXml(const XMLElement* physicalCameraIdXml,
+                                                std::string* physicalCameraId);
+
+    // Convert string representation of stream use case to its int64 value.
+    static ErrorCode getStreamUseCaseFromXml(const XMLElement* streamUseCaseXml,
+                                             int64_t* streamUseCase);
+
+    // Convert string representation of timestamp base to its int value.
+    static ErrorCode getTimestampBaseFromXml(const XMLElement* timestampBaseXml,
+                                             int64_t* timestampBase);
+
+    // Convert string representation of mirror mode to its int value.
+    static ErrorCode getMirrorModeFromXml(const XMLElement* mirrorModeXml, int64_t* mirrorMode);
+
+    // Convert string representation of use readout timestamp to its bool value.
+    static ErrorCode getUseReadoutTimestampFromXml(const XMLElement* useReadoutTimestampXml,
+                                                   bool* useReadoutTimestamp);
+
+    // Convert string representation of format to its int value.
+    static ErrorCode getFormatFromXml(const XMLElement* formatXml, int64_t* format,
+                                      int64_t surfaceType);
+
+    // Convert string representation of usage to its int64 value.
+    static ErrorCode getUsageFromXml(const XMLElement* usageXml, int64_t* usage,
+                                     int64_t surfaceType);
+
+    // Convert string representation of data space to its int value.
+    static ErrorCode getDataSpaceFromXml(const XMLElement* dataSpaceXml, int64_t* dataSpace);
+
+    static std::vector<std::string> splitString(std::string inputString, char delimiter);
+
+    static std::string setToString(const std::set<int64_t>& s);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_SERVERS_CAMERA_SHAREDSESSIONCONFIGUTILS_H_
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index fb8e5d0..4da892f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -89,10 +89,20 @@
 
 namespace android {
 
+namespace {
+
+bool shouldInjectFakeStream(const CameraMetadata& info) {
+    // Do not inject fake stream for a virtual camera (i.e., camera belonging to virtual devices),
+    // as it can handle zero streams properly.
+    return getDeviceId(info) == kDefaultDeviceId;
+}
+
+} // namespace
+
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string &id, bool overrideForPerfClass, int rotationOverride,
-        bool legacyClient):
+        bool isVendorClient, bool legacyClient):
         AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
@@ -100,6 +110,8 @@
         mOperatingMode(NO_MODE),
         mIsConstrainedHighSpeedConfiguration(false),
         mIsCompositeJpegRDisabled(false),
+        mIsCompositeHeicDisabled(false),
+        mIsCompositeHeicUltraHDRDisabled(false),
         mStatus(STATUS_UNINITIALIZED),
         mStatusWaiters(0),
         mUsePartialResult(false),
@@ -126,6 +138,9 @@
 {
     ATRACE_CALL();
     ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.c_str());
+    int callingUid = getCallingUid();
+    bool isCalledByNativeService = (callingUid == AID_MEDIA);
+    mIsNativeClient = isCalledByNativeService || isVendorClient;
 }
 
 Camera3Device::~Camera3Device()
@@ -258,7 +273,8 @@
 
     /** Start watchdog thread */
     mCameraServiceWatchdog = new CameraServiceWatchdog(
-            manager->getProviderPids(), mId, mCameraServiceProxyWrapper);
+            manager->getProviderPids(), mAttributionAndPermissionUtils->getCallingPid(),
+            mIsNativeClient, mId, mCameraServiceProxyWrapper);
     res = mCameraServiceWatchdog->run("CameraServiceWatchdog");
     if (res != OK) {
         SET_ERR_L("Unable to start camera service watchdog thread: %s (%d)",
@@ -2502,11 +2518,13 @@
 
     // Workaround for device HALv3.2 or older spec bug - zero streams requires
     // adding a fake stream instead.
-    // TODO: Bug: 17321404 for fixing the HAL spec and removing this workaround.
-    if (mOutputStreams.size() == 0) {
-        addFakeStreamLocked();
-    } else {
-        tryRemoveFakeStreamLocked();
+    // TODO(b/17321404): Fix the HAL spec and remove this workaround.
+    if (shouldInjectFakeStream(mDeviceInfo)) {
+        if (mOutputStreams.size() == 0) {
+            addFakeStreamLocked();
+        } else {
+            tryRemoveFakeStreamLocked();
+        }
     }
 
     // Override stream use case based on "adb shell command"
@@ -2987,6 +3005,36 @@
     return retVal;
 }
 
+const sp<Camera3Device::CaptureRequest> Camera3Device::getOngoingRepeatingRequestLocked() {
+    ALOGV("%s", __FUNCTION__);
+
+    if (mRequestThread != NULL) {
+        return mRequestThread->getOngoingRepeatingRequest();
+    }
+
+    return nullptr;
+}
+
+status_t Camera3Device::updateOngoingRepeatingRequestLocked(const SurfaceMap& surfaceMap) {
+    ALOGV("%s", __FUNCTION__);
+
+    if (mRequestThread != NULL) {
+        return mRequestThread->updateOngoingRepeatingRequest(surfaceMap);
+    }
+
+    return INVALID_OPERATION;
+}
+
+int64_t Camera3Device::getRepeatingRequestLastFrameNumberLocked() {
+    ALOGV("%s", __FUNCTION__);
+
+    if (mRequestThread != NULL) {
+        return mRequestThread->getRepeatingRequestLastFrameNumber();
+    }
+
+    return hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
+}
+
 void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
         int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
         const std::unordered_map<std::string, CameraMetadata>& physicalMetadata,
@@ -3141,7 +3189,7 @@
         mRotationOverride(rotationOverride),
         mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
-    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersion();
 }
 
 Camera3Device::RequestThread::~RequestThread() {}
@@ -4251,6 +4299,60 @@
     return mLatestRequestInfo;
 }
 
+const sp<Camera3Device::CaptureRequest> Camera3Device::RequestThread::getOngoingRepeatingRequest() {
+    ATRACE_CALL();
+    Mutex::Autolock l(mRequestLock);
+
+    ALOGV("RequestThread::%s", __FUNCTION__);
+    if (mRepeatingRequests.empty()) {
+        return nullptr;
+    }
+
+    return *mRepeatingRequests.begin();
+}
+
+status_t Camera3Device::RequestThread::updateOngoingRepeatingRequest(const SurfaceMap& surfaceMap) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mRequestLock);
+    if (mRepeatingRequests.empty()) {
+        return INVALID_OPERATION;
+    }
+
+    sp<CaptureRequest> curRequest = *mRepeatingRequests.begin();
+    std::vector<int32_t> outputStreamIds;
+    Vector<sp<camera3::Camera3OutputStreamInterface>> outputStreams;
+    for (const auto& [key, value] : surfaceMap) {
+        outputStreamIds.push_back(key);
+    }
+    for (auto id : outputStreamIds) {
+        sp<Camera3Device> parent = mParent.promote();
+        if (parent == nullptr) {
+            ALOGE("%s: parent does not exist!", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+        sp<Camera3OutputStreamInterface> stream = parent->mOutputStreams.get(id);
+        if (stream == nullptr) {
+            CLOGE("Request references unknown stream %d",id);
+            return BAD_VALUE;
+        }
+        outputStreams.push(stream);
+    }
+    curRequest->mOutputStreams = outputStreams;
+    curRequest->mOutputSurfaces = surfaceMap;
+
+    ALOGV("RequestThread::%s", __FUNCTION__);
+    return OK;
+
+}
+
+int64_t Camera3Device::RequestThread::getRepeatingRequestLastFrameNumber() {
+    ATRACE_CALL();
+    Mutex::Autolock l(mRequestLock);
+
+    ALOGV("RequestThread::%s", __FUNCTION__);
+    return mRepeatingLastFrameNumber;
+}
+
 bool Camera3Device::RequestThread::isStreamPending(
         sp<Camera3StreamInterface>& stream) {
     ATRACE_CALL();
@@ -5830,4 +5932,4 @@
     return OK;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5d3c010..608161f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -38,6 +38,7 @@
 
 #include "common/CameraDeviceBase.h"
 #include "common/DepthPhotoProcessor.h"
+#include "common/FrameProcessorBase.h"
 #include "device3/BufferUtils.h"
 #include "device3/StatusTracker.h"
 #include "device3/Camera3BufferManager.h"
@@ -92,7 +93,7 @@
     explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& id, bool overrideForPerfClass, int rotationOverride,
-            bool legacyClient = false);
+            bool isVendorClient, bool legacyClient = false);
 
     virtual ~Camera3Device();
     // Delete and optionally close native handles and clear the input vector afterward
@@ -122,6 +123,24 @@
     virtual status_t initialize(sp<CameraProviderManager> /*manager*/,
             const std::string& /*monitorTags*/) = 0;
 
+    static constexpr int32_t METADATA_QUEUE_SIZE = 1 << 20;
+
+    template <typename FMQType>
+    static size_t calculateFMQSize(const std::unique_ptr<FMQType> &fmq) {
+        if (fmq == nullptr) {
+            ALOGE("%s: result metadata queue hasn't been initialized", __FUNCTION__);
+            return METADATA_QUEUE_SIZE;
+        }
+        size_t quantumSize = fmq->getQuantumSize();
+        size_t quantumCount = fmq->getQuantumCount();
+        if ((quantumSize == 0) || (quantumCount == 0) ||
+                ((std::numeric_limits<size_t>::max() / quantumSize) < quantumCount)) {
+            ALOGE("%s: Error with FMQ quantum count / quantum size, quantum count %zu"
+                    "quantum count %zu", __FUNCTION__, quantumSize, quantumCount);
+            return METADATA_QUEUE_SIZE;
+        }
+        return fmq->getQuantumSize() * fmq->getQuantumCount();
+    }
     status_t disconnect() override;
     status_t dump(int fd, const Vector<String16> &args) override;
     status_t startWatchingTags(const std::string &tags) override;
@@ -130,6 +149,10 @@
     const CameraMetadata& info() const override;
     const CameraMetadata& infoPhysical(const std::string& physicalId) const override;
     bool isCompositeJpegRDisabled() const override { return mIsCompositeJpegRDisabled; };
+    bool isCompositeHeicDisabled() const override { return mIsCompositeHeicDisabled; }
+    bool isCompositeHeicUltraHDRDisabled() const override {
+        return mIsCompositeHeicUltraHDRDisabled;
+    }
 
     // Capture and setStreamingRequest will configure streams if currently in
     // idle state
@@ -197,7 +220,7 @@
 
     virtual status_t beginConfigure() override {return OK;};
 
-    virtual status_t getSharedStreamId(const OutputConfiguration& /*config*/,
+    virtual status_t getSharedStreamId(const OutputStreamInfo& /*config*/,
             int* /*streamId*/) override {return INVALID_OPERATION;};
 
     virtual status_t addSharedSurfaces(int /*streamId*/,
@@ -208,6 +231,25 @@
     virtual status_t removeSharedSurfaces(int /*streamId*/,
             const std::vector<size_t>& /*surfaceIds*/) override {return INVALID_OPERATION;};
 
+    virtual status_t setSharedStreamingRequest(
+            const PhysicalCameraSettingsList& /*request*/, const SurfaceMap& /*surfaceMap*/,
+            int32_t* /*sharedReqID*/, int64_t* /*lastFrameNumber = NULL*/) override {
+        return INVALID_OPERATION;
+    };
+
+    virtual status_t clearSharedStreamingRequest(int64_t* /*lastFrameNumber = NULL*/) override {
+        return INVALID_OPERATION;
+    };
+
+    virtual status_t setSharedCaptureRequest(const PhysicalCameraSettingsList& /*request*/,
+            const SurfaceMap& /*surfaceMap*/, int32_t* /*sharedReqID*/,
+            int64_t* /*lastFrameNumber = NULL*/) override {return INVALID_OPERATION;};
+
+    virtual sp<camera2::FrameProcessorBase> getSharedFrameProcessor() override {return nullptr;};
+
+    virtual status_t startStreaming(const int32_t /*reqId*/, const SurfaceMap& /*surfaceMap*/,
+            int32_t* /*sharedReqID*/, int64_t* /*lastFrameNumber = NULL*/)
+            override {return INVALID_OPERATION;};
     status_t configureStreams(const CameraMetadata& sessionParams,
             int operatingMode =
             camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) override;
@@ -226,7 +268,7 @@
     // Transitions to the idle state on success
     status_t waitUntilDrained() override;
 
-    status_t setNotifyCallback(wp<NotificationListener> listener) override;
+    virtual status_t setNotifyCallback(wp<NotificationListener> listener) override;
     bool     willNotify3A() override;
     status_t waitForNextFrame(nsecs_t timeout) override;
     status_t getNextResult(CaptureResult *frame) override;
@@ -596,6 +638,8 @@
     CameraMetadata             mDeviceInfo;
     bool                       mSupportNativeZoomRatio;
     bool                       mIsCompositeJpegRDisabled;
+    bool                       mIsCompositeHeicDisabled;
+    bool                       mIsCompositeHeicUltraHDRDisabled;
     std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
 
     CameraMetadata             mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
@@ -763,6 +807,22 @@
     };
 
     /**
+     * Get the first repeating request in the ongoing repeating request list.
+     */
+    const sp<CaptureRequest> getOngoingRepeatingRequestLocked();
+
+    /**
+     * Update the first repeating request in the ongoing repeating request list
+     * with the surface map provided.
+     */
+    status_t updateOngoingRepeatingRequestLocked(const SurfaceMap& surfaceMap);
+
+    /**
+     * Get the repeating request last frame number.
+     */
+    int64_t getRepeatingRequestLastFrameNumberLocked();
+
+    /**
      * Get the last request submitted to the hal by the request thread.
      *
      * Must be called with mLock held.
@@ -1067,6 +1127,20 @@
          **/
         void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
 
+        /**
+         * Get the first repeating request in the ongoing repeating request list.
+         */
+        const sp<CaptureRequest> getOngoingRepeatingRequest();
+
+        /**
+         * Update the first repeating request in the ongoing repeating request list
+         * with the surface map provided.
+         */
+        status_t updateOngoingRepeatingRequest(const SurfaceMap& surfaceMap);
+
+        // Get the repeating request last frame number.
+        int64_t getRepeatingRequestLastFrameNumber();
+
       protected:
 
         virtual bool threadLoop();
@@ -1574,6 +1648,10 @@
     // Flag to indicate that we shouldn't forward extension related metadata
     bool mSupportsExtensionKeys = false;
 
+    // If the client is a native client, either opened through vndk, or caling
+    // Pid is a platform service.
+    bool mIsNativeClient;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 999f563..0c77303 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -257,15 +257,8 @@
     mLastTimestamp = 0;
 
     if (mConsumer.get() == 0) {
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        sp<BufferItemConsumer> bufferItemConsumer = new BufferItemConsumer(mUsage);
-        sp<IGraphicBufferProducer> producer =
-                bufferItemConsumer->getSurface()->getIGraphicBufferProducer();
-#else
-        sp<IGraphicBufferProducer> producer;
-        sp<IGraphicBufferConsumer> consumer;
-        BufferQueue::createBufferQueue(&producer, &consumer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        auto [bufferItemConsumer, surface] = BufferItemConsumer::create(mUsage);
+        sp<IGraphicBufferProducer> producer = surface->getIGraphicBufferProducer();
 
         int minUndequeuedBuffers = 0;
         res = producer->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
@@ -301,17 +294,15 @@
         mConsumer->setMaxAcquiredBufferCount(mTotalBufferCount);
 
 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
-        mSurface = mConsumer->getSurface();
+        mSurface = surface;
 #else
-        mProducer = mConsumer->getSurface()->getIGraphicBufferProducer();
+        mProducer = producer;
 #endif // WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
 
 #else
-        mConsumer = new BufferItemConsumer(consumer, mUsage,
-                                           mTotalBufferCount);
+        std::tie(mConsumer, surface) = BufferItemConsumer::create(mUsage, mTotalBufferCount);
+        mProducer = surface->getIGraphicBufferProducer();
         mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
-
-        mProducer = producer;
 #endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         mConsumer->setBufferFreedListener(this);
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 8f3249d..673b946 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -131,6 +131,18 @@
             dataSpace(_dataSpace), consumerUsage(_consumerUsage),
             sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
             streamUseCase(_streamUseCase), timestampBase(_timestampBase), colorSpace(_colorSpace) {}
+        bool operator == (const OutputStreamInfo& other) const {
+            return (width == other.width &&
+                    height == other.height &&
+                    format == other.format &&
+                    (other.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
+                    (dataSpace == other.dataSpace && consumerUsage == other.consumerUsage)) &&
+                    sensorPixelModesUsed == other.sensorPixelModesUsed &&
+                    dynamicRangeProfile == other.dynamicRangeProfile &&
+                    colorSpace == other.colorSpace &&
+                    streamUseCase == other.streamUseCase &&
+                    timestampBase == other.timestampBase);
+    }
 };
 
 // A holder containing a surface and its corresponding mirroring mode
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index a360abf..3e4470e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -95,18 +95,8 @@
     // the output's attachBuffer().
     mMaxConsumerBuffers++;
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mBufferItemConsumer = sp<BufferItemConsumer>::make(consumerUsage, mMaxConsumerBuffers);
-    mSurface = mBufferItemConsumer->getSurface();
-#else
-    // Create BufferQueue for input
-    sp<IGraphicBufferProducer> bqProducer;
-    sp<IGraphicBufferConsumer> bqConsumer;
-    BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
-
-    mBufferItemConsumer = new BufferItemConsumer(bqConsumer, consumerUsage, mMaxConsumerBuffers);
-    mSurface = new Surface(bqProducer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    std::tie(mBufferItemConsumer, mSurface) =
+            BufferItemConsumer::create(consumerUsage, mMaxConsumerBuffers);
 
     if (mBufferItemConsumer == nullptr) {
         return NO_MEMORY;
@@ -474,17 +464,18 @@
         mMutex.unlock();
         res = surface->attachBuffer(anb);
         mMutex.lock();
+        //During buffer attach 'mMutex' is not held which makes the removal of
+        //"surface" possible. Check whether this is the case and continue.
+        if (surface.get() == nullptr) {
+            res = OK;
+            continue;
+        }
         if (res != OK) {
             SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)", __FUNCTION__,
                     surface.get(), strerror(-res), res);
             // TODO: might need to detach/cleanup the already attached buffers before return?
             return res;
         }
-        //During buffer attach 'mMutex' is not held which makes the removal of
-        //"gbp" possible. Check whether this is the case and continue.
-        if (mHeldBuffers[surface] == nullptr) {
-            continue;
-        }
         mHeldBuffers[surface]->insert(gb);
         SP_LOGV("%s: Attached buffer %p on output %p.", __FUNCTION__, gb.get(), surface.get());
     }
@@ -772,4 +763,4 @@
 
 } // namespace android
 
-#endif  // USE_NEW_STREAM_SPLITTER
\ No newline at end of file
+#endif  // USE_NEW_STREAM_SPLITTER
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index e52e9a2..5db9550 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -174,9 +174,9 @@
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& id, bool overrideForPerfClass, int rotationOverride,
-        bool legacyClient) :
+        bool isVendorClient, bool legacyClient) :
         Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                overrideForPerfClass, rotationOverride, legacyClient) {
+                overrideForPerfClass, rotationOverride, isVendorClient, legacyClient) {
     mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
@@ -215,6 +215,8 @@
     }
     mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId);
     mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId);
+    mIsCompositeHeicDisabled = manager->isCompositeHeicDisabled(mId);
+    mIsCompositeHeicUltraHDRDisabled = manager->isCompositeHeicUltraHDRDisabled(mId);
 
     std::vector<std::string> physicalCameraIds;
     bool isLogical = manager->isLogicalCamera(mId, &physicalCameraIds);
@@ -352,6 +354,10 @@
     return initializeCommonLocked(manager);
 }
 
+int32_t AidlCamera3Device::getCaptureResultFMQSize() {
+    return Camera3Device::calculateFMQSize<AidlResultMetadataQueue>(mResultMetadataQueue);
+}
+
 ::ndk::ScopedAStatus AidlCamera3Device::AidlCameraDeviceCallbacks::processCaptureResult(
             const std::vector<camera::device::CaptureResult>& results) {
     sp<AidlCamera3Device> p = mParent.promote();
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 474dfc7..1406114 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -43,7 +43,7 @@
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& id, bool overrideForPerfClass, int rotationOverride,
-            bool legacyClient = false);
+            bool isVendorClient, bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
 
@@ -74,6 +74,7 @@
 
     virtual status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags)
             override;
+    virtual int32_t getCaptureResultFMQSize() override;
 
     class AidlHalInterface : public Camera3Device::HalInterface {
      public:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
index 5bd8d8c..5b91a5e 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
@@ -45,11 +45,13 @@
     } \
   } while (0)
 
+#include <gui/BufferItemConsumer.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <cstring>
 #include "../../common/aidl/AidlProviderInfo.h"
 #include "utils/SessionConfigurationUtils.h"
+
 #include "AidlCamera3SharedDevice.h"
 
 using namespace android::camera3;
@@ -57,26 +59,49 @@
 
 namespace android {
 
+class OpaqueConsumerListener : public BufferItemConsumer::FrameAvailableListener {
+public:
+    OpaqueConsumerListener(const wp<BufferItemConsumer>& consumer) : mConsumer(consumer) {}
+
+    virtual void onFrameAvailable(const BufferItem&) {
+        sp<BufferItemConsumer> consumer = mConsumer.promote();
+        if (consumer == nullptr) {
+            return;
+        }
+        BufferItem item;
+        consumer->acquireBuffer(&item, 0);
+        consumer->releaseBuffer(item, Fence::NO_FENCE);
+    }
+    virtual void onFrameReplaced(const BufferItem&) {}
+    virtual void onFrameDequeued(const uint64_t) {}
+    virtual void onFrameCancelled(const uint64_t) {}
+    virtual void onFrameDetached(const uint64_t) {}
+
+    wp<BufferItemConsumer> mConsumer;
+};
+
 // Metadata android.info.availableSharedOutputConfigurations has list of shared output
 // configurations. Each output configuration has minimum of 11 entries of size long
 // followed by the physical camera id if present.
 // See android.info.availableSharedOutputConfigurations for details.
 static const int SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES = 11;
 std::map<std::string, sp<AidlCamera3SharedDevice>> AidlCamera3SharedDevice::sSharedDevices;
-std::map<std::string, std::unordered_set<int>> AidlCamera3SharedDevice::sClientsUid;
+std::map<std::string, std::unordered_set<int>> AidlCamera3SharedDevice::sClientsPid;
+Mutex AidlCamera3SharedDevice::sSharedClientsLock;
 sp<AidlCamera3SharedDevice> AidlCamera3SharedDevice::getInstance(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& id, bool overrideForPerfClass, int rotationOverride,
-        bool legacyClient) {
-    if (sClientsUid[id].empty()) {
+        bool isVendorClient, bool legacyClient) {
+    Mutex::Autolock l(sSharedClientsLock);
+    if (sClientsPid[id].empty()) {
         AidlCamera3SharedDevice* sharedDevice = new AidlCamera3SharedDevice(
                 cameraServiceProxyWrapper, attributionAndPermissionUtils, id, overrideForPerfClass,
-                rotationOverride, legacyClient);
+                rotationOverride, isVendorClient, legacyClient);
         sSharedDevices[id] = sharedDevice;
     }
     if (attributionAndPermissionUtils != nullptr) {
-        sClientsUid[id].insert(attributionAndPermissionUtils->getCallingUid());
+        sClientsPid[id].insert(attributionAndPermissionUtils->getCallingPid());
     }
     return sSharedDevices[id];
 }
@@ -85,22 +110,39 @@
         const std::string& monitorTags) {
     ATRACE_CALL();
     status_t res = OK;
-
+    Mutex::Autolock l(mSharedDeviceLock);
     if (mStatus == STATUS_UNINITIALIZED) {
         res = AidlCamera3Device::initialize(manager, monitorTags);
         if (res == OK) {
             mSharedOutputConfigurations = getSharedOutputConfiguration();
+            wp<NotificationListener> weakThis(this);
+            res = AidlCamera3Device::setNotifyCallback(weakThis);
+            if (res != OK) {
+                ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
+                        __FUNCTION__, mId.c_str(), strerror(-res), res);
+                return res;
+            }
+            mFrameProcessor = new camera2::FrameProcessorBase(this);
+            std::string threadName = std::string("CDU-") + mId + "-FrameProc";
+            res = mFrameProcessor->run(threadName.c_str());
+            if (res != OK) {
+                ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                return res;
+            }
         }
     }
     return res;
 }
 
-status_t AidlCamera3SharedDevice::disconnectClient(int clientUid) {
-    if (sClientsUid[mId].erase(clientUid) == 0) {
+status_t AidlCamera3SharedDevice::disconnectClient(int clientPid) {
+    Mutex::Autolock l(mSharedDeviceLock);
+    if (sClientsPid[mId].erase(clientPid) == 0) {
         ALOGW("%s: Camera %s: Client %d is not connected to shared device", __FUNCTION__,
-                mId.c_str(), clientUid);
+                mId.c_str(), clientPid);
     }
-    if (sClientsUid[mId].empty()) {
+
+    if (sClientsPid[mId].empty()) {
         return Camera3Device::disconnect();
     }
     return OK;
@@ -108,11 +150,11 @@
 
 std::vector<OutputConfiguration> AidlCamera3SharedDevice::getSharedOutputConfiguration() {
     std::vector<OutputConfiguration> sharedConfigs;
-    uint8_t colorspace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+    int32_t colorspace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
     camera_metadata_entry sharedSessionColorSpace = mDeviceInfo.find(
             ANDROID_SHARED_SESSION_COLOR_SPACE);
     if (sharedSessionColorSpace.count > 0) {
-        colorspace = *sharedSessionColorSpace.data.u8;
+        colorspace = *sharedSessionColorSpace.data.i32;
     }
     camera_metadata_entry sharedSessionConfigs = mDeviceInfo.find(
             ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS);
@@ -170,6 +212,7 @@
 }
 
 status_t AidlCamera3SharedDevice::beginConfigure() {
+    Mutex::Autolock l(mSharedDeviceLock);
     status_t res;
     int i = 0;
 
@@ -177,13 +220,19 @@
         return OK;
     }
 
+    mSharedSurfaces.clear();
+    mOpaqueConsumers.clear();
+    mSharedSurfaceIds.clear();
+    mStreamInfoMap.clear();
+
     for (auto config : mSharedOutputConfigurations) {
         std::vector<SurfaceHolder> consumers;
-        android_dataspace dataSpace;
+        android_dataspace dataspace = (android_dataspace)config.getDataspace();
+
         if (config.getColorSpace()
                 != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED
                 && config.getFormat() != HAL_PIXEL_FORMAT_BLOB) {
-            if (!dataSpaceFromColorSpace(&dataSpace, config.getColorSpace())) {
+            if (!dataSpaceFromColorSpace(&dataspace, config.getColorSpace())) {
                 std::string msg = fmt::sprintf("Camera %s: color space %d not supported, "
                     " failed to convert to data space", mId.c_str(), config.getColorSpace());
                 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -199,39 +248,47 @@
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return INVALID_OPERATION;
         }
-        sp<IGraphicBufferProducer> producer;
-        sp<IGraphicBufferConsumer> consumer;
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mSharedSurfaces[i] = new Surface(producer);
+
+        auto [consumer, surface] = BufferItemConsumer::create(AHARDWAREBUFFER_USAGE_CAMERA_READ);
+        mOpaqueConsumers.push_back(consumer);
+        mSharedSurfaces.push_back(surface);
+
+        sp<OpaqueConsumerListener> consumerListener = sp<OpaqueConsumerListener>::make(
+                mOpaqueConsumers[i]);
+        mOpaqueConsumers[i]->setFrameAvailableListener(consumerListener);
         consumers.push_back({mSharedSurfaces[i], config.getMirrorMode()});
-        mSharedStreams[i] = new Camera3SharedOutputStream(mNextStreamId, consumers,
+        sp<Camera3SharedOutputStream> newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 config.getWidth(),config.getHeight(), config.getFormat(), config.getUsage(),
-                dataSpace, static_cast<camera_stream_rotation_t>(config.getRotation()),
+                dataspace, static_cast<camera_stream_rotation_t>(config.getRotation()),
                 mTimestampOffset, config.getPhysicalCameraId(), overriddenSensorPixelModes,
                 getTransportType(), config.getSurfaceSetID(), mUseHalBufManager,
                 config.getDynamicRangeProfile(), config.getStreamUseCase(),
                 mDeviceTimeBaseIsRealtime, config.getTimestampBase(),
                 config.getColorSpace(), config.useReadoutTimestamp());
-        int id = mSharedStreams[i]->getSurfaceId(consumers[0].mSurface);
+        int id = newStream->getSurfaceId(consumers[0].mSurface);
         if (id < 0) {
             SET_ERR_L("Invalid surface id");
             return BAD_VALUE;
         }
-        mSharedSurfaceIds[i] = id;
-        mSharedStreams[i]->setStatusTracker(mStatusTracker);
-        mSharedStreams[i]->setBufferManager(mBufferManager);
-        mSharedStreams[i]->setImageDumpMask(mImageDumpMask);
-        res = mOutputStreams.add(mNextStreamId, mSharedStreams[i]);
+        mSharedSurfaceIds.push_back(id);
+        newStream->setStatusTracker(mStatusTracker);
+        newStream->setBufferManager(mBufferManager);
+        newStream->setImageDumpMask(mImageDumpMask);
+        res = mOutputStreams.add(mNextStreamId, newStream);
         if (res < 0) {
             SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
             return res;
         }
         mSessionStatsBuilder.addStream(mNextStreamId);
-        mConfiguredOutputs.add(mNextStreamId++, config);
+        OutputStreamInfo streamInfo(config.getWidth(),config.getHeight(), config.getFormat(),
+                dataspace, config.getUsage(), overriddenSensorPixelModes,
+                config.getDynamicRangeProfile(), config.getStreamUseCase(),
+                config.getTimestampBase(), config.getColorSpace());
+        mStreamInfoMap[mNextStreamId++] = streamInfo;
         i++;
     }
     CameraMetadata sessionParams;
-    res = configureStreams(sessionParams, CAMERA_STREAM_CONFIGURATION_SHARED_MODE);
+    res = configureStreams(sessionParams, CAMERA_STREAM_CONFIGURATION_NORMAL_MODE);
     if (res != OK) {
         std::string msg = fmt::sprintf("Camera %s: Error configuring streams: %s (%d)",
                 mId.c_str(), strerror(-res), res);
@@ -241,15 +298,17 @@
     return OK;
 }
 
-status_t AidlCamera3SharedDevice::getSharedStreamId(const OutputConfiguration &config,
+status_t AidlCamera3SharedDevice::getSharedStreamId(const OutputStreamInfo &config,
         int *streamId) {
+    Mutex::Autolock l(mSharedDeviceLock);
     if (streamId ==  nullptr) {
         return BAD_VALUE;
     }
-    for (size_t i = 0 ; i < mConfiguredOutputs.size(); i++){
-        OutputConfiguration sharedConfig = mConfiguredOutputs.valueAt(i);
-        if (config.sharedConfigEqual(sharedConfig)) {
-            *streamId = mConfiguredOutputs.keyAt(i);
+
+    for (const auto& streamInfo : mStreamInfoMap) {
+        OutputStreamInfo info = streamInfo.second;
+        if (info == config) {
+            *streamId = streamInfo.first;
             return OK;
         }
     }
@@ -259,6 +318,7 @@
 status_t AidlCamera3SharedDevice::addSharedSurfaces(int streamId,
         const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
         const std::vector<SurfaceHolder> &surfaces,  std::vector<int> *surfaceIds) {
+    Mutex::Autolock l(mSharedDeviceLock);
     KeyedVector<sp<Surface>, size_t> outputMap;
     std::vector<size_t> removedSurfaceIds;
     status_t res;
@@ -274,7 +334,6 @@
               streamId, res, strerror(-res));
         return res;
     }
-
     for (size_t i = 0 ; i < outputMap.size(); i++){
         if (surfaceIds != nullptr) {
             surfaceIds->push_back(outputMap.valueAt(i));
@@ -285,6 +344,7 @@
 
 status_t AidlCamera3SharedDevice::removeSharedSurfaces(int streamId,
         const std::vector<size_t> &removedSurfaceIds) {
+    Mutex::Autolock l(mSharedDeviceLock);
     KeyedVector<sp<Surface>, size_t> outputMap;
     std::vector<SurfaceHolder> surfaces;
     std::vector<OutputStreamInfo> outputInfo;
@@ -303,4 +363,259 @@
     }
     return OK;
 }
+
+SurfaceMap AidlCamera3SharedDevice::mergeSurfaceMaps(const SurfaceMap& map1,
+        const SurfaceMap& map2) {
+    SurfaceMap mergedMap = map1;
+
+    for (const auto& [key, value] : map2) {
+        // If the key exists in map1, append the values
+        if (mergedMap.count(key) > 0) {
+            mergedMap[key].insert(mergedMap[key].end(), value.begin(), value.end());
+        } else {
+            // Otherwise, insert the key-value pair from map2
+            mergedMap[key] = value;
+        }
+    }
+    return mergedMap;
+}
+
+SurfaceMap AidlCamera3SharedDevice::removeClientSurfaceMap(const SurfaceMap& map1,
+        const SurfaceMap& map2) {
+    SurfaceMap resultMap = map1;
+
+    for (const auto& [key, value2] : map2) {
+        auto it1 = resultMap.find(key);
+        if (it1 != resultMap.end()) {
+            // Key exists in both maps, remove matching values
+            std::vector<size_t>& value1 = it1->second;
+            for (size_t val2 : value2) {
+                value1.erase(std::remove(value1.begin(), value1.end(), val2), value1.end());
+            }
+
+            // If the vector is empty after removing, remove the key
+            if (value1.empty()) {
+                resultMap.erase(it1);
+            }
+        }
+    }
+    return resultMap;
+}
+
+status_t AidlCamera3SharedDevice::setSharedStreamingRequest(
+        const CameraDeviceBase::PhysicalCameraSettingsList &clientSettings,
+        const SurfaceMap &surfaceMap, int32_t *sharedReqID,
+        int64_t *lastFrameNumber) {
+    if ((sharedReqID == nullptr) || (lastFrameNumber == nullptr)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock l(mSharedDeviceLock);
+    auto requestIdEntry = clientSettings.begin()->metadata.find(ANDROID_REQUEST_ID);
+    if (requestIdEntry.count == 0) {
+        CLOGE("RequestID does not exist in metadata");
+        return BAD_VALUE;
+    }
+    int clientRequestId = requestIdEntry.data.i32[0];
+    CameraDeviceBase::PhysicalCameraSettingsList newSettings = clientSettings;
+    SurfaceMap newSurfaceMap = surfaceMap;
+    List<const CameraDeviceBase::PhysicalCameraSettingsList> settingsList;
+    std::list<SurfaceMap> surfaceMaps;
+    int32_t requestID = mRequestIdCounter;
+    const sp<CaptureRequest> curRequest = getOngoingRepeatingRequestLocked();
+
+    if (curRequest != nullptr) {
+        // If there is ongoing streaming going by secondary clients, then
+        // merge their surface map in the new repeating request.
+        newSurfaceMap = mergeSurfaceMaps(surfaceMap, curRequest->mOutputSurfaces);
+    }
+
+    std::vector<int32_t> outputStreamIds;
+    for (const auto& [key, value] : newSurfaceMap) {
+        outputStreamIds.push_back(key);
+    }
+    surfaceMaps.push_back(newSurfaceMap);
+    newSettings.begin()->metadata.update(ANDROID_REQUEST_ID, &requestID, /*size*/1);
+    mRequestIdCounter++;
+    newSettings.begin()->metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+            &outputStreamIds[0], outputStreamIds.size());
+    settingsList.push_back(newSettings);
+    status_t  err = setStreamingRequestList(settingsList, surfaceMaps, lastFrameNumber);
+    if (err != OK) {
+        CLOGE("Cannot start shared streaming request");
+        return err;
+    }
+    mStreamingRequestId = requestID;
+    int clientPid = mAttributionAndPermissionUtils->getCallingPid();
+    mClientRequestIds[clientPid] = clientRequestId;
+    mClientSurfaces[clientPid] = surfaceMap;
+    *sharedReqID = mStreamingRequestId;
+
+    return err;
+}
+
+status_t AidlCamera3SharedDevice::clearSharedStreamingRequest(int64_t *lastFrameNumber) {
+    Mutex::Autolock l(mSharedDeviceLock);
+    int clientPid = mAttributionAndPermissionUtils->getCallingPid();
+    const sp<CaptureRequest> curRequest = getOngoingRepeatingRequestLocked();
+    if (curRequest == nullptr) {
+        CLOGE("No streaming ongoing");
+        return INVALID_OPERATION;
+    }
+
+    SurfaceMap newSurfaceMap;
+    newSurfaceMap = removeClientSurfaceMap(curRequest->mOutputSurfaces, mClientSurfaces[clientPid]);
+    mClientRequestIds.erase(clientPid);
+    mClientSurfaces.erase(clientPid);
+    if (newSurfaceMap.empty()) {
+        status_t err = clearStreamingRequest(lastFrameNumber);
+        if (err != OK) {
+            CLOGE("Error clearing streaming request");
+        }
+        return err;
+    }
+    *lastFrameNumber = getRepeatingRequestLastFrameNumberLocked();
+    return updateOngoingRepeatingRequestLocked(newSurfaceMap);
+}
+
+status_t AidlCamera3SharedDevice::setSharedCaptureRequest(const PhysicalCameraSettingsList &request,
+        const SurfaceMap &surfaceMap, int32_t *sharedReqID, int64_t *lastFrameNumber) {
+    Mutex::Autolock l(mSharedDeviceLock);
+    if (sharedReqID == nullptr) {
+        return BAD_VALUE;
+    }
+    CameraDeviceBase::PhysicalCameraSettingsList newRequest = request;
+    int newReqID = mRequestIdCounter;
+    List<const CameraDeviceBase::PhysicalCameraSettingsList> settingsList;
+    std::list<SurfaceMap> surfaceMaps;
+    surfaceMaps.push_back(surfaceMap);
+    newRequest.begin()->metadata.update(ANDROID_REQUEST_ID, &newReqID, /*size*/1);
+    settingsList.push_back(newRequest);
+    mRequestIdCounter++;
+    status_t err = captureList(settingsList, surfaceMaps, lastFrameNumber);
+    if (err != OK) {
+        CLOGE("Cannot start shared capture request");
+        return err;
+    }
+    *sharedReqID = newReqID;
+
+    return err;
+}
+
+status_t AidlCamera3SharedDevice::startStreaming(const int32_t reqId, const SurfaceMap& surfaceMap,
+        int32_t* sharedReqID, int64_t* lastFrameNumber) {
+    ATRACE_CALL();
+
+    if ((sharedReqID == nullptr) || (lastFrameNumber ==  nullptr)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock l(mSharedDeviceLock);
+    const sp<CaptureRequest> curRequest = getOngoingRepeatingRequestLocked();
+    if (curRequest != nullptr) {
+        // If there is already repeating request ongoing, attach the surfaces to
+        // the request.
+        SurfaceMap newSurfaceMap = mergeSurfaceMaps(surfaceMap, curRequest->mOutputSurfaces);
+        updateOngoingRepeatingRequestLocked(newSurfaceMap);
+        *lastFrameNumber = getRepeatingRequestLastFrameNumberLocked();
+    } else {
+        // If there is no ongoing repeating request, then send a default
+        // request with template preview.
+        std::vector<int32_t> outputStreamIds;
+        for (const auto& [key, value] : surfaceMap) {
+            outputStreamIds.push_back(key);
+        }
+
+        CameraMetadata previewTemplate;
+        status_t err = createDefaultRequest(CAMERA_TEMPLATE_PREVIEW, &previewTemplate);
+        if (err != OK) {
+            ALOGE("%s: Failed to create default PREVIEW request: %s (%d)",
+                    __FUNCTION__, strerror(-err), err);
+            return err;
+        }
+        int32_t requestID = mRequestIdCounter;
+        previewTemplate.update(ANDROID_REQUEST_ID, &requestID, /*size*/1);
+        mRequestIdCounter++;
+        previewTemplate.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
+                outputStreamIds.size());
+        CameraDeviceBase::PhysicalCameraSettingsList previewSettings;
+        previewSettings.push_back({mId, previewTemplate});
+
+        List<const CameraDeviceBase::PhysicalCameraSettingsList> settingsList;
+        std::list<SurfaceMap> surfaceMaps;
+        settingsList.push_back(previewSettings);
+        surfaceMaps.push_back(surfaceMap);
+        err = setStreamingRequestList(settingsList, surfaceMaps, lastFrameNumber);
+        if (err != OK) {
+            CLOGE("Cannot start shared streaming request");
+            return err;
+        }
+        mStreamingRequestId = requestID;
+    }
+
+    int clientPid = mAttributionAndPermissionUtils->getCallingPid();
+    mClientRequestIds[clientPid] = reqId;
+    mClientSurfaces[clientPid] = surfaceMap;
+    *sharedReqID = mStreamingRequestId;
+    return OK;
+}
+
+status_t AidlCamera3SharedDevice::setNotifyCallback(wp<NotificationListener> listener) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mSharedDeviceLock);
+
+    if (listener == NULL) {
+        return BAD_VALUE;
+    }
+    mClientListeners[mAttributionAndPermissionUtils->getCallingPid()] = listener;
+    return OK;
+}
+
+void AidlCamera3SharedDevice::notifyError(
+        int32_t errorCode,
+        const CaptureResultExtras& resultExtras) {
+    for (auto clientListener : mClientListeners) {
+        sp<NotificationListener> listener = clientListener.second.promote();
+        if (listener != NULL) {
+            listener->notifyError(errorCode, resultExtras);
+        }
+    }
+}
+
+status_t AidlCamera3SharedDevice::notifyActive(float maxPreviewFps) {
+    Mutex::Autolock l(mSharedDeviceActiveLock);
+    for (auto activeClient : mClientRequestIds) {
+        sp<NotificationListener> listener =  mClientListeners[activeClient.first].promote();
+        if (listener != NULL) {
+            listener->notifyActive(maxPreviewFps);
+        }
+    }
+
+    return OK;
+}
+
+void  AidlCamera3SharedDevice::notifyIdle(int64_t requestCount, int64_t resultErrorCount,
+                                     bool deviceError,
+                                     std::pair<int32_t, int32_t> mostRequestedFpsRange,
+                                     const std::vector<hardware::CameraStreamStats>& stats) {
+    Mutex::Autolock l(mSharedDeviceActiveLock);
+    for (auto clientListener : mClientListeners) {
+        sp<NotificationListener> listener =  clientListener.second.promote();
+        if (listener != NULL) {
+            listener->notifyIdle(requestCount, resultErrorCount, deviceError, mostRequestedFpsRange,
+                    stats);
+        }
+    }
+}
+
+void  AidlCamera3SharedDevice::notifyShutter(const CaptureResultExtras& resultExtras,
+        nsecs_t timestamp) {
+    for (auto clientListener : mClientListeners) {
+        sp<NotificationListener> listener =  clientListener.second.promote();
+        if (listener != NULL) {
+            listener->notifyShutter(resultExtras, timestamp);
+        }
+    }
+}
+
 }
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
index b2ee2d6..1030d01 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
@@ -18,6 +18,7 @@
 #define ANDROID_SERVERS_AIDLCAMERA3SHAREDDEVICE_H
 
 #include <camera/camera2/OutputConfiguration.h>
+#include "common/FrameProcessorBase.h"
 #include "../Camera3SharedOutputStream.h"
 #include "AidlCamera3Device.h"
 namespace android {
@@ -27,40 +28,84 @@
  */
 using ::android::camera3::Camera3SharedOutputStream;
 class AidlCamera3SharedDevice :
-        public AidlCamera3Device {
+        public AidlCamera3Device,
+        public NotificationListener {
   public:
     static sp<AidlCamera3SharedDevice> getInstance(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& id, bool overrideForPerfClass, int rotationOverride,
-            bool legacyClient = false);
+            bool isVendorClient, bool legacyClient = false);
     status_t initialize(sp<CameraProviderManager> manager,
             const std::string& monitorTags) override;
-    status_t disconnectClient(int clientUid) override;
+    status_t disconnectClient(int clientPid) override;
     status_t beginConfigure() override;
-    status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) override;
+    status_t getSharedStreamId(const OutputStreamInfo &config, int *streamId) override;
     status_t addSharedSurfaces(int streamId,
             const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
             const std::vector<SurfaceHolder>& surfaces,
             std::vector<int> *surfaceIds = nullptr) override;
     status_t removeSharedSurfaces(int streamId,
             const std::vector<size_t> &surfaceIds) override;
+    status_t setSharedStreamingRequest(const PhysicalCameraSettingsList &request,
+            const SurfaceMap &surfaceMap, int32_t *sharedReqID, int64_t *lastFrameNumber = NULL)
+            override;
+    status_t clearSharedStreamingRequest(int64_t *lastFrameNumber = NULL) override;
+    status_t setSharedCaptureRequest(const PhysicalCameraSettingsList &request,
+            const SurfaceMap &surfaceMap, int32_t *sharedReqID, int64_t *lastFrameNumber = NULL)
+            override;
+    sp<camera2::FrameProcessorBase> getSharedFrameProcessor() override {return mFrameProcessor;};
+    status_t startStreaming(const int32_t reqId, const SurfaceMap &surfaceMap,
+            int32_t *sharedReqID, int64_t *lastFrameNumber = NULL);
+
+    status_t setNotifyCallback(wp<NotificationListener> listener) override;
+    virtual void notifyError(int32_t errorCode,
+                             const CaptureResultExtras &resultExtras) override;
+    virtual status_t notifyActive(float maxPreviewFps) override;
+    virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+            std::pair<int32_t, int32_t> mostRequestedFpsRange,
+            const std::vector<hardware::CameraStreamStats>& streamStats) override;
+    virtual void notifyShutter(const CaptureResultExtras &resultExtras,
+            nsecs_t timestamp) override;
+    virtual void notifyRequestQueueEmpty() {};
+    // Prepare api not supported for shared session
+    virtual void notifyPrepared(int /*streamId*/) {};
+    // Required only for API1
+    virtual void notifyAutoFocus(uint8_t /*newState*/, int /*triggerId*/) {};
+    virtual void notifyAutoExposure(uint8_t /*newState*/, int /*triggerId*/) {};
+    virtual void notifyAutoWhitebalance(uint8_t /*newState*/,
+            int /*triggerId*/) {};
+    virtual void notifyRepeatingRequestError(long /*lastFrameNumber*/) {};
   private:
     static std::map<std::string, sp<AidlCamera3SharedDevice>> sSharedDevices;
-    static std::map<std::string, std::unordered_set<int>> sClientsUid;
+    static std::map<std::string, std::unordered_set<int>> sClientsPid;
+    static Mutex sSharedClientsLock;
     AidlCamera3SharedDevice(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& id, bool overrideForPerfClass, int rotationOverride,
-            bool legacyClient)
+            bool isVendorClient, bool legacyClient)
         : AidlCamera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                  overrideForPerfClass, rotationOverride, legacyClient) {}
+                  overrideForPerfClass, rotationOverride, isVendorClient, legacyClient),
+        mStreamingRequestId(REQUEST_ID_NONE),
+        mRequestIdCounter(0) {}
     std::vector<OutputConfiguration> getSharedOutputConfiguration();
     std::vector<OutputConfiguration> mSharedOutputConfigurations;
     std::vector<int> mSharedSurfaceIds;
     std::vector<sp<Surface>> mSharedSurfaces;
-    std::vector<sp<Camera3SharedOutputStream>> mSharedStreams;
-    KeyedVector<int32_t, OutputConfiguration> mConfiguredOutputs;
+    std::vector<sp<BufferItemConsumer>> mOpaqueConsumers;
+    std::unordered_map<int32_t, OutputStreamInfo> mStreamInfoMap;
+    // Streaming request ID
+    int32_t mStreamingRequestId;
+    static const int32_t REQUEST_ID_NONE = -1;
+    int32_t mRequestIdCounter;
+    std::unordered_map<int, int32_t> mClientRequestIds;
+    std::unordered_map<int, SurfaceMap> mClientSurfaces;
+    std::unordered_map<int, wp<NotificationListener>> mClientListeners;
+    SurfaceMap mergeSurfaceMaps(const SurfaceMap& map1, const SurfaceMap& map2);
+    SurfaceMap removeClientSurfaceMap(const SurfaceMap& map1, const SurfaceMap& map2);
+    Mutex mSharedDeviceLock;
+    sp<camera2::FrameProcessorBase> mFrameProcessor;
 }; // class AidlCamera3SharedDevice
 }; // namespace android
 #endif
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
index 41be9a4..31c9b89 100644
--- a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
@@ -80,19 +80,28 @@
         }
     }
 
-    // Create BufferQueue for input
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
     // Allocate 1 extra buffer to handle the case where all buffers are detached
     // from input, and attached to the outputs. In this case, the input queue's
     // dequeueBuffer can still allocate 1 extra buffer before being blocked by
     // the output's attachBuffer().
     mMaxConsumerBuffers++;
-    mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
-    if (mBufferItemConsumer == nullptr) {
-        return NO_MEMORY;
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+    res = mConsumer->setConsumerName(toString8(mConsumerName));
+    if (res != OK) {
+        SP_LOGE("%s: Failed to set consumer name: %s(%d)", __FUNCTION__, strerror(-res), res);
+        return res;
     }
-    mConsumer->setConsumerName(toString8(mConsumerName));
+    res = mConsumer->setConsumerUsageBits(consumerUsage);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to set consumer usage bits: %s(%d)", __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to set consumer max acquired buffer count: %s(%d)", __FUNCTION__,
+                strerror(-res), res);
+        return res;
+    }
 
     *consumer = new Surface(mProducer);
     if (*consumer == nullptr) {
@@ -474,6 +483,12 @@
         mMutex.unlock();
         res = gbp->attachBuffer(&slot, gb);
         mMutex.lock();
+        // During buffer attach 'mMutex' is not held which makes the removal of
+        //"gbp" possible. Check whether this is the case and continue.
+        if (gbp.get() == nullptr) {
+            res = OK;
+            continue;
+        }
         if (res != OK) {
             SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)", __FUNCTION__,
                     gbp.get(), strerror(-res), res);
@@ -485,11 +500,6 @@
                     __FUNCTION__, slot, BufferQueue::NUM_BUFFER_SLOTS);
             return BAD_VALUE;
         }
-        // During buffer attach 'mMutex' is not held which makes the removal of
-        //"gbp" possible. Check whether this is the case and continue.
-        if (mOutputSlots[gbp] == nullptr) {
-            continue;
-        }
         auto& outputSlots = *mOutputSlots[gbp];
         if (static_cast<size_t>(slot + 1) > outputSlots.size()) {
             outputSlots.resize(slot + 1);
@@ -526,7 +536,8 @@
     uint64_t bufferId;
     if (bufferItem.mGraphicBuffer != nullptr) {
         mInputSlots[bufferItem.mSlot] = bufferItem;
-    } else if (bufferItem.mAcquireCalled) {
+    } else if (bufferItem.mAcquireCalled
+            && (mInputSlots[bufferItem.mSlot].mGraphicBuffer != nullptr)) {
         bufferItem.mGraphicBuffer = mInputSlots[bufferItem.mSlot].mGraphicBuffer;
         mInputSlots[bufferItem.mSlot].mFrameNumber = bufferItem.mFrameNumber;
     } else {
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
index 61b43a8..2907c6a 100644
--- a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
@@ -21,7 +21,6 @@
 
 #include <camera/CameraMetadata.h>
 
-#include <gui/BufferItemConsumer.h>
 #include <gui/IConsumerListener.h>
 #include <gui/Surface.h>
 
@@ -47,7 +46,7 @@
 // BufferQueue, where each buffer queued to the input is available to be
 // acquired by each of the outputs, and is able to be dequeued by the input
 // again only once all of the outputs have released it.
-class DeprecatedCamera3StreamSplitter : public BnConsumerListener {
+class DeprecatedCamera3StreamSplitter : public IConsumerListener {
   public:
     // Constructor
     DeprecatedCamera3StreamSplitter(bool useHalBufManager = false);
@@ -248,7 +247,6 @@
 
     sp<IGraphicBufferProducer> mProducer;
     sp<IGraphicBufferConsumer> mConsumer;
-    sp<BufferItemConsumer> mBufferItemConsumer;
     sp<Surface> mSurface;
 
     // Map graphic buffer ids -> buffer items
@@ -291,7 +289,7 @@
     std::atomic<status_t> mOnFrameAvailableRes{0};
 
     // Currently acquired input buffers
-    size_t mAcquiredInputBuffers;
+    size_t mAcquiredInputBuffers = 0;
 
     std::string mConsumerName;
 
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 6986d3c..2ebaefb 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -306,6 +306,10 @@
     return initializeCommonLocked(manager);
 }
 
+int32_t HidlCamera3Device::getCaptureResultFMQSize() {
+    return Camera3Device::calculateFMQSize<ResultMetadataQueue>(mResultMetadataQueue);
+}
+
 hardware::Return<void> HidlCamera3Device::requestStreamBuffers(
         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
         requestStreamBuffers_cb _hidl_cb) {
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index bcc4d80..87dde8a 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -35,9 +35,9 @@
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& id, bool overrideForPerfClass, int rotationOverride,
-        bool legacyClient = false) :
+        bool isVendorClient, bool legacyClient = false) :
         Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                overrideForPerfClass, rotationOverride, legacyClient) { }
+                overrideForPerfClass, rotationOverride, isVendorClient, legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -63,6 +63,8 @@
 
     status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags) override;
 
+    virtual int32_t getCaptureResultFMQSize() override;
+
     /**
      * Implementation of android::hardware::camera::device::V3_5::ICameraDeviceCallback
      */
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index ec8da1a..b5f9a85 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -14,367 +14,10 @@
  * limitations under the License.
  */
 
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RingBufferConsumer"
-#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+#include <gui/Flags.h>
 
-#include <com_android_graphics_libgui_flags.h>
-#include <inttypes.h>
-
-#include <utils/Log.h>
-
-#include <camera/StringUtils.h>
-#include <com_android_graphics_libgui_flags.h>
-#include <gui/RingBufferConsumer.h>
-
-#define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.c_str(), ##__VA_ARGS__)
-#define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.c_str(), ##__VA_ARGS__)
-#define BI_LOGI(x, ...) ALOGI("[%s] " x, mName.c_str(), ##__VA_ARGS__)
-#define BI_LOGW(x, ...) ALOGW("[%s] " x, mName.c_str(), ##__VA_ARGS__)
-#define BI_LOGE(x, ...) ALOGE("[%s] " x, mName.c_str(), ##__VA_ARGS__)
-
-#undef assert
-#define assert(x) ALOG_ASSERT((x), #x)
-
-typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
-
-namespace android {
-
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-RingBufferConsumer::RingBufferConsumer(uint64_t consumerUsage, int bufferCount)
-    : ConsumerBase(), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include "flagged_files/RingBufferConsumer.inc"
 #else
-RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
-                                       uint64_t consumerUsage, int bufferCount)
-    : ConsumerBase(consumer), mBufferCount(bufferCount), mLatestTimestamp(0) {
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    mConsumer->setConsumerUsageBits(consumerUsage);
-    mConsumer->setMaxAcquiredBufferCount(bufferCount);
-
-    assert(bufferCount > 0);
-}
-
-RingBufferConsumer::~RingBufferConsumer() {
-}
-
-void RingBufferConsumer::setName(const std::string& name) {
-    Mutex::Autolock _l(mMutex);
-    mName = toString8(name);
-    mConsumer->setConsumerName(mName);
-}
-
-sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
-        const RingBufferComparator& filter,
-        bool waitForFence) {
-
-    sp<PinnedBufferItem> pinnedBuffer;
-
-    {
-        List<RingBufferItem>::iterator it, end, accIt;
-        BufferInfo acc, cur;
-        BufferInfo* accPtr = NULL;
-
-        Mutex::Autolock _l(mMutex);
-
-        for (it = mBufferItemList.begin(), end = mBufferItemList.end();
-             it != end;
-             ++it) {
-
-            const RingBufferItem& item = *it;
-
-            cur.mCrop = item.mCrop;
-            cur.mTransform = item.mTransform;
-            cur.mScalingMode = item.mScalingMode;
-            cur.mTimestamp = item.mTimestamp;
-            cur.mFrameNumber = item.mFrameNumber;
-            cur.mPinned = item.mPinCount > 0;
-
-            int ret = filter.compare(accPtr, &cur);
-
-            if (ret == 0) {
-                accPtr = NULL;
-            } else if (ret > 0) {
-                acc = cur;
-                accPtr = &acc;
-                accIt = it;
-            } // else acc = acc
-        }
-
-        if (!accPtr) {
-            return NULL;
-        }
-
-        pinnedBuffer = new PinnedBufferItem(this, *accIt);
-        pinBufferLocked(pinnedBuffer->getBufferItem());
-
-    } // end scope of mMutex autolock
-
-    if (waitForFence) {
-        status_t err = pinnedBuffer->getBufferItem().mFence->waitForever(
-                "RingBufferConsumer::pinSelectedBuffer");
-        if (err != OK) {
-            BI_LOGE("Failed to wait for fence of acquired buffer: %s (%d)",
-                    strerror(-err), err);
-        }
-    }
-
-    return pinnedBuffer;
-}
-
-status_t RingBufferConsumer::clear() {
-
-    status_t err;
-    Mutex::Autolock _l(mMutex);
-
-    BI_LOGV("%s", __FUNCTION__);
-
-    // Avoid annoying log warnings by returning early
-    if (mBufferItemList.size() == 0) {
-        return OK;
-    }
-
-    do {
-        size_t pinnedFrames = 0;
-        err = releaseOldestBufferLocked(&pinnedFrames);
-
-        if (err == NO_BUFFER_AVAILABLE) {
-            assert(pinnedFrames == mBufferItemList.size());
-            break;
-        }
-
-        if (err == NOT_ENOUGH_DATA) {
-            // Fine. Empty buffer item list.
-            break;
-        }
-
-        if (err != OK) {
-            BI_LOGE("Clear failed, could not release buffer");
-            return err;
-        }
-
-    } while(true);
-
-    return OK;
-}
-
-nsecs_t RingBufferConsumer::getLatestTimestamp() {
-    Mutex::Autolock _l(mMutex);
-    if (mBufferItemList.size() == 0) {
-        return 0;
-    }
-    return mLatestTimestamp;
-}
-
-void RingBufferConsumer::pinBufferLocked(const BufferItem& item) {
-    List<RingBufferItem>::iterator it, end;
-
-    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
-         it != end;
-         ++it) {
-
-        RingBufferItem& find = *it;
-        if (item.mGraphicBuffer == find.mGraphicBuffer) {
-            find.mPinCount++;
-            break;
-        }
-    }
-
-    if (it == end) {
-        BI_LOGE("Failed to pin buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
-                 item.mTimestamp, item.mFrameNumber);
-    } else {
-        BI_LOGV("Pinned buffer (frame %" PRIu64 ", timestamp %" PRId64 ")",
-                item.mFrameNumber, item.mTimestamp);
-    }
-}
-
-status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
-    status_t err = OK;
-
-    List<RingBufferItem>::iterator it, end, accIt;
-
-    it = mBufferItemList.begin();
-    end = mBufferItemList.end();
-    accIt = end;
-
-    if (it == end) {
-        /**
-         * This is fine. We really care about being able to acquire a buffer
-         * successfully after this function completes, not about it releasing
-         * some buffer.
-         */
-        BI_LOGV("%s: No buffers yet acquired, can't release anything",
-              __FUNCTION__);
-        return NOT_ENOUGH_DATA;
-    }
-
-    for (; it != end; ++it) {
-        RingBufferItem& find = *it;
-
-        if (find.mPinCount > 0) {
-            if (pinnedFrames != NULL) {
-                ++(*pinnedFrames);
-            }
-            // Filter out pinned frame when searching for buffer to release
-            continue;
-        }
-
-        if (find.mTimestamp < accIt->mTimestamp || accIt == end) {
-            accIt = it;
-        }
-    }
-
-    if (accIt != end) {
-        RingBufferItem& item = *accIt;
-
-        // In case the object was never pinned, pass the acquire fence
-        // back to the release fence. If the fence was already waited on,
-        // it'll just be a no-op to wait on it again.
-
-        // item.mGraphicBuffer was populated with the proper graphic-buffer
-        // at acquire even if it was previously acquired
-        err = addReleaseFenceLocked(item.mSlot,
-                item.mGraphicBuffer, item.mFence);
-
-        if (err != OK) {
-            BI_LOGE("Failed to add release fence to buffer "
-                    "(timestamp %" PRId64 ", framenumber %" PRIu64,
-                    item.mTimestamp, item.mFrameNumber);
-            return err;
-        }
-
-        BI_LOGV("Attempting to release buffer timestamp %" PRId64 ", frame %" PRIu64,
-                item.mTimestamp, item.mFrameNumber);
-
-        // item.mGraphicBuffer was populated with the proper graphic-buffer
-        // at acquire even if it was previously acquired
-        err = releaseBufferLocked(item.mSlot, item.mGraphicBuffer);
-        if (err != OK) {
-            BI_LOGE("Failed to release buffer: %s (%d)",
-                    strerror(-err), err);
-            return err;
-        }
-
-        BI_LOGV("Buffer timestamp %" PRId64 ", frame %" PRIu64 " evicted",
-                item.mTimestamp, item.mFrameNumber);
-
-        mBufferItemList.erase(accIt);
-    } else {
-        BI_LOGW("All buffers pinned, could not find any to release");
-        return NO_BUFFER_AVAILABLE;
-
-    }
-
-    return OK;
-}
-
-void RingBufferConsumer::onFrameAvailable(const BufferItem& item) {
-    status_t err;
-
-    {
-        Mutex::Autolock _l(mMutex);
-
-        /**
-         * Release oldest frame
-         */
-        if (mBufferItemList.size() >= (size_t)mBufferCount) {
-            err = releaseOldestBufferLocked(/*pinnedFrames*/NULL);
-            assert(err != NOT_ENOUGH_DATA);
-
-            // TODO: implement the case for NO_BUFFER_AVAILABLE
-            assert(err != NO_BUFFER_AVAILABLE);
-            if (err != OK) {
-                return;
-            }
-            // TODO: in unpinBuffer rerun this routine if we had buffers
-            // we could've locked but didn't because there was no space
-        }
-
-        RingBufferItem& item = *mBufferItemList.insert(mBufferItemList.end(),
-                                                       RingBufferItem());
-
-        /**
-         * Acquire new frame
-         */
-        err = acquireBufferLocked(&item, 0);
-        if (err != OK) {
-            if (err != NO_BUFFER_AVAILABLE) {
-                BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err);
-            }
-
-            mBufferItemList.erase(--mBufferItemList.end());
-            return;
-        }
-
-        BI_LOGV("New buffer acquired (timestamp %" PRId64 "), "
-                "buffer items %zu out of %d",
-                item.mTimestamp,
-                mBufferItemList.size(), mBufferCount);
-
-        if (item.mTimestamp < mLatestTimestamp) {
-            BI_LOGE("Timestamp  decreases from %" PRId64 " to %" PRId64,
-                    mLatestTimestamp, item.mTimestamp);
-        }
-
-        mLatestTimestamp = item.mTimestamp;
-
-#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_RING_BUFFER)
-        item.mGraphicBuffer = mSlots[item.mSlot].mGraphicBuffer;
-#endif
-    } // end of mMutex lock
-
-    ConsumerBase::onFrameAvailable(item);
-}
-
-void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
-    Mutex::Autolock _l(mMutex);
-
-    List<RingBufferItem>::iterator it, end, accIt;
-
-    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
-         it != end;
-         ++it) {
-
-        RingBufferItem& find = *it;
-        if (item.mGraphicBuffer == find.mGraphicBuffer) {
-            status_t res = addReleaseFenceLocked(item.mSlot,
-                    item.mGraphicBuffer, item.mFence);
-
-            if (res != OK) {
-                BI_LOGE("Failed to add release fence to buffer "
-                        "(timestamp %" PRId64 ", framenumber %" PRIu64,
-                        item.mTimestamp, item.mFrameNumber);
-                return;
-            }
-
-            find.mPinCount--;
-            break;
-        }
-    }
-
-    if (it == end) {
-        // This should never happen. If it happens, we have a bug.
-        BI_LOGE("Failed to unpin buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
-                 item.mTimestamp, item.mFrameNumber);
-    } else {
-        BI_LOGV("Unpinned buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
-                 item.mTimestamp, item.mFrameNumber);
-    }
-}
-
-status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
-    Mutex::Autolock _l(mMutex);
-    return mConsumer->setDefaultBufferSize(w, h);
-}
-
-status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
-    Mutex::Autolock _l(mMutex);
-    return mConsumer->setDefaultBufferFormat(defaultFormat);
-}
-
-status_t RingBufferConsumer::setConsumerUsage(uint64_t usage) {
-    Mutex::Autolock _l(mMutex);
-    return mConsumer->setConsumerUsageBits(usage);
-}
-
-} // namespace android
+#include "flagged_files/DeprecatedRingBufferConsumer.inc"
+#endif
\ No newline at end of file
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 9fdc996..3161533 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -21,6 +21,10 @@
 #include <gui/BufferItem.h>
 #include <gui/BufferQueue.h>
 #include <gui/ConsumerBase.h>
+#include <gui/Flags.h> // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <gui/BufferItemConsumer.h>
+#endif
 
 #include <utils/List.h>
 
@@ -46,8 +50,13 @@
  *  - If all the buffers get filled or pinned then there will be no empty
  *    buffers left, so the producer will block on dequeue.
  */
-class RingBufferConsumer : public ConsumerBase,
+class RingBufferConsumer
+#if not WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                         : public ConsumerBase,
                            public ConsumerBase::FrameAvailableListener
+#else
+                         : public BufferItemConsumer::FrameAvailableListener
+#endif
 {
   public:
     typedef ConsumerBase::FrameAvailableListener FrameAvailableListener;
@@ -161,11 +170,21 @@
 
     // Return 0 if RingBuffer is empty, otherwise return timestamp of latest buffer.
     nsecs_t getLatestTimestamp();
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+    sp<Surface> getSurface() const;
+#endif
 
   private:
 
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+    Mutex mMutex;
+
+    sp<Surface> mSurface;
+    sp<BufferItemConsumer> mBufferItemConsumer;
+#endif
+
     // Override ConsumerBase::onFrameAvailable
-    virtual void onFrameAvailable(const BufferItem& item);
+    virtual void onFrameAvailable(const BufferItem& item) override;
 
     void pinBufferLocked(const BufferItem& item);
     void unpinBuffer(const BufferItem& item);
diff --git a/services/camera/libcameraservice/gui/flagged_files/DeprecatedRingBufferConsumer.inc b/services/camera/libcameraservice/gui/flagged_files/DeprecatedRingBufferConsumer.inc
new file mode 100644
index 0000000..8c6afde
--- /dev/null
+++ b/services/camera/libcameraservice/gui/flagged_files/DeprecatedRingBufferConsumer.inc
@@ -0,0 +1,380 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RingBufferConsumer"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <com_android_graphics_libgui_flags.h>
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/RingBufferConsumer.h>
+#include <gui/Flags.h>
+
+#define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.c_str(), ##__VA_ARGS__)
+#define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.c_str(), ##__VA_ARGS__)
+#define BI_LOGI(x, ...) ALOGI("[%s] " x, mName.c_str(), ##__VA_ARGS__)
+#define BI_LOGW(x, ...) ALOGW("[%s] " x, mName.c_str(), ##__VA_ARGS__)
+#define BI_LOGE(x, ...) ALOGE("[%s] " x, mName.c_str(), ##__VA_ARGS__)
+
+#undef assert
+#define assert(x) ALOG_ASSERT((x), #x)
+
+typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
+
+namespace android {
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+RingBufferConsumer::RingBufferConsumer(uint64_t consumerUsage, int bufferCount)
+    :
+    ConsumerBase(), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#else
+RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
+                                       uint64_t consumerUsage, int bufferCount)
+    :
+    ConsumerBase(consumer), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mConsumer->setConsumerUsageBits(consumerUsage);
+    mConsumer->setMaxAcquiredBufferCount(bufferCount);
+    assert(bufferCount > 0);
+}
+
+RingBufferConsumer::~RingBufferConsumer() {
+}
+
+void RingBufferConsumer::setName(const std::string& name) {
+    Mutex::Autolock _l(mMutex);
+    mName = toString8(name);
+    mConsumer->setConsumerName(mName);
+}
+
+sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
+        const RingBufferComparator& filter,
+        bool waitForFence) {
+
+    sp<PinnedBufferItem> pinnedBuffer;
+
+    {
+        List<RingBufferItem>::iterator it, end, accIt;
+        BufferInfo acc, cur;
+        BufferInfo* accPtr = NULL;
+
+        Mutex::Autolock _l(mMutex);
+
+        for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+             it != end;
+             ++it) {
+
+            const RingBufferItem& item = *it;
+
+            cur.mCrop = item.mCrop;
+            cur.mTransform = item.mTransform;
+            cur.mScalingMode = item.mScalingMode;
+            cur.mTimestamp = item.mTimestamp;
+            cur.mFrameNumber = item.mFrameNumber;
+            cur.mPinned = item.mPinCount > 0;
+
+            int ret = filter.compare(accPtr, &cur);
+
+            if (ret == 0) {
+                accPtr = NULL;
+            } else if (ret > 0) {
+                acc = cur;
+                accPtr = &acc;
+                accIt = it;
+            } // else acc = acc
+        }
+
+        if (!accPtr) {
+            return NULL;
+        }
+
+        pinnedBuffer = new PinnedBufferItem(this, *accIt);
+        pinBufferLocked(pinnedBuffer->getBufferItem());
+
+    } // end scope of mMutex autolock
+
+    if (waitForFence) {
+        status_t err = pinnedBuffer->getBufferItem().mFence->waitForever(
+                "RingBufferConsumer::pinSelectedBuffer");
+        if (err != OK) {
+            BI_LOGE("Failed to wait for fence of acquired buffer: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+
+    return pinnedBuffer;
+}
+
+status_t RingBufferConsumer::clear() {
+
+    status_t err;
+    Mutex::Autolock _l(mMutex);
+
+    BI_LOGV("%s", __FUNCTION__);
+
+    // Avoid annoying log warnings by returning early
+    if (mBufferItemList.size() == 0) {
+        return OK;
+    }
+
+    do {
+        size_t pinnedFrames = 0;
+        err = releaseOldestBufferLocked(&pinnedFrames);
+
+        if (err == NO_BUFFER_AVAILABLE) {
+            assert(pinnedFrames == mBufferItemList.size());
+            break;
+        }
+
+        if (err == NOT_ENOUGH_DATA) {
+            // Fine. Empty buffer item list.
+            break;
+        }
+
+        if (err != OK) {
+            BI_LOGE("Clear failed, could not release buffer");
+            return err;
+        }
+
+    } while(true);
+
+    return OK;
+}
+
+nsecs_t RingBufferConsumer::getLatestTimestamp() {
+    Mutex::Autolock _l(mMutex);
+    if (mBufferItemList.size() == 0) {
+        return 0;
+    }
+    return mLatestTimestamp;
+}
+
+void RingBufferConsumer::pinBufferLocked(const BufferItem& item) {
+    List<RingBufferItem>::iterator it, end;
+
+    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+         it != end;
+         ++it) {
+
+        RingBufferItem& find = *it;
+        if (item.mGraphicBuffer == find.mGraphicBuffer) {
+            find.mPinCount++;
+            break;
+        }
+    }
+
+    if (it == end) {
+        BI_LOGE("Failed to pin buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
+                 item.mTimestamp, item.mFrameNumber);
+    } else {
+        BI_LOGV("Pinned buffer (frame %" PRIu64 ", timestamp %" PRId64 ")",
+                item.mFrameNumber, item.mTimestamp);
+    }
+}
+
+status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
+    status_t err = OK;
+
+    List<RingBufferItem>::iterator it, end, accIt;
+
+    it = mBufferItemList.begin();
+    end = mBufferItemList.end();
+    accIt = end;
+
+    if (it == end) {
+        /**
+         * This is fine. We really care about being able to acquire a buffer
+         * successfully after this function completes, not about it releasing
+         * some buffer.
+         */
+        BI_LOGV("%s: No buffers yet acquired, can't release anything",
+              __FUNCTION__);
+        return NOT_ENOUGH_DATA;
+    }
+
+    for (; it != end; ++it) {
+        RingBufferItem& find = *it;
+
+        if (find.mPinCount > 0) {
+            if (pinnedFrames != NULL) {
+                ++(*pinnedFrames);
+            }
+            // Filter out pinned frame when searching for buffer to release
+            continue;
+        }
+
+        if (find.mTimestamp < accIt->mTimestamp || accIt == end) {
+            accIt = it;
+        }
+    }
+
+    if (accIt != end) {
+        RingBufferItem& item = *accIt;
+
+        // In case the object was never pinned, pass the acquire fence
+        // back to the release fence. If the fence was already waited on,
+        // it'll just be a no-op to wait on it again.
+
+        // item.mGraphicBuffer was populated with the proper graphic-buffer
+        // at acquire even if it was previously acquired
+        err = addReleaseFenceLocked(item.mSlot,
+                item.mGraphicBuffer, item.mFence);
+
+        if (err != OK) {
+            BI_LOGE("Failed to add release fence to buffer "
+                    "(timestamp %" PRId64 ", framenumber %" PRIu64,
+                    item.mTimestamp, item.mFrameNumber);
+            return err;
+        }
+
+        BI_LOGV("Attempting to release buffer timestamp %" PRId64 ", frame %" PRIu64,
+                item.mTimestamp, item.mFrameNumber);
+
+        // item.mGraphicBuffer was populated with the proper graphic-buffer
+        // at acquire even if it was previously acquired
+        err = releaseBufferLocked(item.mSlot, item.mGraphicBuffer);
+        if (err != OK) {
+            BI_LOGE("Failed to release buffer: %s (%d)",
+                    strerror(-err), err);
+            return err;
+        }
+
+        BI_LOGV("Buffer timestamp %" PRId64 ", frame %" PRIu64 " evicted",
+                item.mTimestamp, item.mFrameNumber);
+
+        mBufferItemList.erase(accIt);
+    } else {
+        BI_LOGW("All buffers pinned, could not find any to release");
+        return NO_BUFFER_AVAILABLE;
+
+    }
+
+    return OK;
+}
+
+void RingBufferConsumer::onFrameAvailable(const BufferItem& item) {
+    status_t err;
+
+    {
+        Mutex::Autolock _l(mMutex);
+
+        /**
+         * Release oldest frame
+         */
+        if (mBufferItemList.size() >= (size_t)mBufferCount) {
+            err = releaseOldestBufferLocked(/*pinnedFrames*/NULL);
+            assert(err != NOT_ENOUGH_DATA);
+
+            // TODO: implement the case for NO_BUFFER_AVAILABLE
+            assert(err != NO_BUFFER_AVAILABLE);
+            if (err != OK) {
+                return;
+            }
+            // TODO: in unpinBuffer rerun this routine if we had buffers
+            // we could've locked but didn't because there was no space
+        }
+
+        RingBufferItem& item = *mBufferItemList.insert(mBufferItemList.end(),
+                                                       RingBufferItem());
+
+        /**
+         * Acquire new frame
+         */
+        err = acquireBufferLocked(&item, 0);
+        if (err != OK) {
+            if (err != NO_BUFFER_AVAILABLE) {
+                BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err);
+            }
+
+            mBufferItemList.erase(--mBufferItemList.end());
+            return;
+        }
+
+        BI_LOGV("New buffer acquired (timestamp %" PRId64 "), "
+                "buffer items %zu out of %d",
+                item.mTimestamp,
+                mBufferItemList.size(), mBufferCount);
+
+        if (item.mTimestamp < mLatestTimestamp) {
+            BI_LOGE("Timestamp  decreases from %" PRId64 " to %" PRId64,
+                    mLatestTimestamp, item.mTimestamp);
+        }
+
+        mLatestTimestamp = item.mTimestamp;
+
+        item.mGraphicBuffer = mSlots[item.mSlot].mGraphicBuffer;
+    } // end of mMutex lock
+
+    ConsumerBase::onFrameAvailable(item);
+}
+
+void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
+    Mutex::Autolock _l(mMutex);
+
+    List<RingBufferItem>::iterator it, end, accIt;
+
+    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+         it != end;
+         ++it) {
+
+        RingBufferItem& find = *it;
+        if (item.mGraphicBuffer == find.mGraphicBuffer) {
+            status_t res = addReleaseFenceLocked(item.mSlot,
+                    item.mGraphicBuffer, item.mFence);
+
+            if (res != OK) {
+                BI_LOGE("Failed to add release fence to buffer "
+                        "(timestamp %" PRId64 ", framenumber %" PRIu64,
+                        item.mTimestamp, item.mFrameNumber);
+                return;
+            }
+
+            find.mPinCount--;
+            break;
+        }
+    }
+
+    if (it == end) {
+        // This should never happen. If it happens, we have a bug.
+        BI_LOGE("Failed to unpin buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
+                 item.mTimestamp, item.mFrameNumber);
+    } else {
+        BI_LOGV("Unpinned buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
+                 item.mTimestamp, item.mFrameNumber);
+    }
+}
+
+status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
+    Mutex::Autolock _l(mMutex);
+    return mConsumer->setDefaultBufferSize(w, h);
+}
+
+status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
+    Mutex::Autolock _l(mMutex);
+    return mConsumer->setDefaultBufferFormat(defaultFormat);
+}
+
+status_t RingBufferConsumer::setConsumerUsage(uint64_t usage) {
+    Mutex::Autolock _l(mMutex);
+    return mConsumer->setConsumerUsageBits(usage);
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/libcameraservice/gui/flagged_files/RingBufferConsumer.inc b/services/camera/libcameraservice/gui/flagged_files/RingBufferConsumer.inc
new file mode 100644
index 0000000..f30669d
--- /dev/null
+++ b/services/camera/libcameraservice/gui/flagged_files/RingBufferConsumer.inc
@@ -0,0 +1,358 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RingBufferConsumer"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <inttypes.h>
+#include <tuple>
+
+#include <utils/Log.h>
+
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/Flags.h>
+#include <gui/RingBufferConsumer.h>
+#include <gui/Surface.h>
+
+#define BI_LOGV(x, ...) ALOGV(" " x, ##__VA_ARGS__)
+#define BI_LOGD(x, ...) ALOGD(" " x, ##__VA_ARGS__)
+#define BI_LOGI(x, ...) ALOGI(" " x, ##__VA_ARGS__)
+#define BI_LOGW(x, ...) ALOGW(" " x, ##__VA_ARGS__)
+#define BI_LOGE(x, ...) ALOGE(" " x, ##__VA_ARGS__)
+
+#undef assert
+#define assert(x) ALOG_ASSERT((x), #x)
+
+typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
+
+namespace android {
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+RingBufferConsumer::RingBufferConsumer(uint64_t consumerUsage, int bufferCount)
+    : mBufferCount(bufferCount), mLatestTimestamp(0) {
+#else
+RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
+                                       uint64_t consumerUsage, int bufferCount)
+    : mBufferCount(bufferCount), mLatestTimestamp(0) {
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    std::tie(mBufferItemConsumer, mSurface) =
+            BufferItemConsumer::create(consumerUsage, bufferCount);
+    assert(bufferCount > 0);
+}
+
+RingBufferConsumer::~RingBufferConsumer() {
+}
+
+void RingBufferConsumer::setName(const std::string& name) {
+    String8 name8(name.c_str());
+    mBufferItemConsumer->setName(name8);
+}
+
+sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
+        const RingBufferComparator& filter,
+        bool waitForFence) {
+
+    sp<PinnedBufferItem> pinnedBuffer;
+
+    {
+        List<RingBufferItem>::iterator it, end, accIt;
+        BufferInfo acc, cur;
+        BufferInfo* accPtr = NULL;
+
+        Mutex::Autolock _l(mMutex);
+
+        for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+             it != end;
+             ++it) {
+
+            const RingBufferItem& item = *it;
+
+            cur.mCrop = item.mCrop;
+            cur.mTransform = item.mTransform;
+            cur.mScalingMode = item.mScalingMode;
+            cur.mTimestamp = item.mTimestamp;
+            cur.mFrameNumber = item.mFrameNumber;
+            cur.mPinned = item.mPinCount > 0;
+
+            int ret = filter.compare(accPtr, &cur);
+
+            if (ret == 0) {
+                accPtr = NULL;
+            } else if (ret > 0) {
+                acc = cur;
+                accPtr = &acc;
+                accIt = it;
+            } // else acc = acc
+        }
+
+        if (!accPtr) {
+            return NULL;
+        }
+
+        pinnedBuffer = new PinnedBufferItem(this, *accIt);
+        pinBufferLocked(pinnedBuffer->getBufferItem());
+
+    } // end scope of mMutex autolock
+
+    if (waitForFence) {
+        status_t err = pinnedBuffer->getBufferItem().mFence->waitForever(
+                "RingBufferConsumer::pinSelectedBuffer");
+        if (err != OK) {
+            BI_LOGE("Failed to wait for fence of acquired buffer: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+
+    return pinnedBuffer;
+}
+
+status_t RingBufferConsumer::clear() {
+
+    status_t err;
+    Mutex::Autolock _l(mMutex);
+
+    BI_LOGV("%s", __FUNCTION__);
+
+    // Avoid annoying log warnings by returning early
+    if (mBufferItemList.size() == 0) {
+        return OK;
+    }
+
+    do {
+        size_t pinnedFrames = 0;
+        err = releaseOldestBufferLocked(&pinnedFrames);
+
+        if (err == NO_BUFFER_AVAILABLE) {
+            assert(pinnedFrames == mBufferItemList.size());
+            break;
+        }
+
+        if (err == NOT_ENOUGH_DATA) {
+            // Fine. Empty buffer item list.
+            break;
+        }
+
+        if (err != OK) {
+            BI_LOGE("Clear failed, could not release buffer");
+            return err;
+        }
+
+    } while(true);
+
+    return OK;
+}
+
+nsecs_t RingBufferConsumer::getLatestTimestamp() {
+    Mutex::Autolock _l(mMutex);
+    if (mBufferItemList.size() == 0) {
+        return 0;
+    }
+    return mLatestTimestamp;
+}
+
+sp<Surface> RingBufferConsumer::getSurface() const {
+    return mSurface;
+}
+
+void RingBufferConsumer::pinBufferLocked(const BufferItem& item) {
+    List<RingBufferItem>::iterator it, end;
+
+    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+         it != end;
+         ++it) {
+
+        RingBufferItem& find = *it;
+        if (item.mGraphicBuffer == find.mGraphicBuffer) {
+            find.mPinCount++;
+            break;
+        }
+    }
+
+    if (it == end) {
+        BI_LOGE("Failed to pin buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
+                 item.mTimestamp, item.mFrameNumber);
+    } else {
+        BI_LOGV("Pinned buffer (frame %" PRIu64 ", timestamp %" PRId64 ")",
+                item.mFrameNumber, item.mTimestamp);
+    }
+}
+
+status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
+    status_t err = OK;
+
+    List<RingBufferItem>::iterator it, end, accIt;
+
+    it = mBufferItemList.begin();
+    end = mBufferItemList.end();
+    accIt = end;
+
+    if (it == end) {
+        /**
+         * This is fine. We really care about being able to acquire a buffer
+         * successfully after this function completes, not about it releasing
+         * some buffer.
+         */
+        BI_LOGV("%s: No buffers yet acquired, can't release anything",
+              __FUNCTION__);
+        return NOT_ENOUGH_DATA;
+    }
+
+    for (; it != end; ++it) {
+        RingBufferItem& find = *it;
+
+        if (find.mPinCount > 0) {
+            if (pinnedFrames != NULL) {
+                ++(*pinnedFrames);
+            }
+            // Filter out pinned frame when searching for buffer to release
+            continue;
+        }
+
+        if (find.mTimestamp < accIt->mTimestamp || accIt == end) {
+            accIt = it;
+        }
+    }
+
+    if (accIt != end) {
+        RingBufferItem& item = *accIt;
+
+        BI_LOGV("Attempting to release buffer timestamp %" PRId64 ", frame %" PRIu64,
+                item.mTimestamp, item.mFrameNumber);
+
+        err = mBufferItemConsumer->releaseBuffer(item, item.mFence);
+        if (err != OK) {
+            BI_LOGE("Failed to release buffer: %s (%d)",
+                    strerror(-err), err);
+            return err;
+        }
+
+        BI_LOGV("Buffer timestamp %" PRId64 ", frame %" PRIu64 " evicted",
+                item.mTimestamp, item.mFrameNumber);
+
+        mBufferItemList.erase(accIt);
+    } else {
+        BI_LOGW("All buffers pinned, could not find any to release");
+        return NO_BUFFER_AVAILABLE;
+
+    }
+
+    return OK;
+}
+
+void RingBufferConsumer::onFrameAvailable(const BufferItem& ) {
+    status_t err;
+    Mutex::Autolock _l(mMutex);
+
+    /**
+     * Release oldest frame
+     */
+    if (mBufferItemList.size() >= (size_t)mBufferCount) {
+        err = releaseOldestBufferLocked(/*pinnedFrames*/NULL);
+        assert(err != NOT_ENOUGH_DATA);
+
+        // TODO: implement the case for NO_BUFFER_AVAILABLE
+        assert(err != NO_BUFFER_AVAILABLE);
+        if (err != OK) {
+            return;
+        }
+        // TODO: in unpinBuffer rerun this routine if we had buffers
+        // we could've locked but didn't because there was no space
+    }
+
+    RingBufferItem& item = *mBufferItemList.insert(mBufferItemList.end(),
+                                                    RingBufferItem());
+
+    /**
+     * Acquire new frame
+     */
+    err = mBufferItemConsumer->acquireBuffer(&item, 0);
+    if (err != OK) {
+        if (err != NO_BUFFER_AVAILABLE) {
+            BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err);
+        }
+
+        mBufferItemList.erase(--mBufferItemList.end());
+        return;
+    }
+
+    BI_LOGV("New buffer acquired (timestamp %" PRId64 "), "
+            "buffer items %zu out of %d",
+            item.mTimestamp,
+            mBufferItemList.size(), mBufferCount);
+
+    if (item.mTimestamp < mLatestTimestamp) {
+        BI_LOGE("Timestamp  decreases from %" PRId64 " to %" PRId64,
+                mLatestTimestamp, item.mTimestamp);
+    }
+
+    mLatestTimestamp = item.mTimestamp;
+}
+
+void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
+    Mutex::Autolock _l(mMutex);
+
+    List<RingBufferItem>::iterator it, end, accIt;
+
+    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+         it != end;
+         ++it) {
+
+        RingBufferItem& find = *it;
+        if (item.mGraphicBuffer == find.mGraphicBuffer) {
+            status_t res = mBufferItemConsumer->addReleaseFence(item.mGraphicBuffer,
+                item.mFence);
+
+            if (res != OK) {
+                BI_LOGE("Failed to add release fence to buffer "
+                        "(timestamp %" PRId64 ", framenumber %" PRIu64,
+                        item.mTimestamp, item.mFrameNumber);
+                return;
+            }
+
+            find.mPinCount--;
+            break;
+        }
+    }
+
+    if (it == end) {
+        // This should never happen. If it happens, we have a bug.
+        BI_LOGE("Failed to unpin buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
+                 item.mTimestamp, item.mFrameNumber);
+    } else {
+        BI_LOGV("Unpinned buffer (timestamp %" PRId64 ", framenumber %" PRIu64 ")",
+                 item.mTimestamp, item.mFrameNumber);
+    }
+}
+
+status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
+    Mutex::Autolock _l(mMutex);
+    return mBufferItemConsumer->setDefaultBufferSize(w, h);
+}
+
+status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
+    Mutex::Autolock _l(mMutex);
+    return mBufferItemConsumer->setDefaultBufferFormat(defaultFormat);
+}
+
+status_t RingBufferConsumer::setConsumerUsage(uint64_t usage) {
+    Mutex::Autolock _l(mMutex);
+    return mBufferItemConsumer->setConsumerUsageBits(usage);
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index d607d10..018a45c 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -59,7 +59,7 @@
     const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
   : mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
-    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersion();
 }
 
 bool HidlCameraDeviceUser::initDevice() {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 9e66236..8998e2a 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -61,7 +61,7 @@
 }
 
 HidlCameraService::HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) {
-    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersion();
 }
 
 Return<void>
@@ -254,15 +254,6 @@
         status = B2HStatus(serviceRet);
         return status;
     }
-    cameraStatusAndIds->erase(std::remove_if(cameraStatusAndIds->begin(), cameraStatusAndIds->end(),
-            [this](const hardware::CameraStatus& s) {
-                bool supportsHAL3 = false;
-                binder::Status sRet =
-                            mAidlICameraService->supportsCameraApi(s.cameraId,
-                                    hardware::ICameraService::API_VERSION_2, &supportsHAL3);
-                return !sRet.isOk() || !supportsHAL3;
-            }), cameraStatusAndIds->end());
-
     return HStatus::NO_ERROR;
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 3858410..219aa24 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -41,22 +41,7 @@
         "mediautils_headers",
     ],
     shared_libs: [
-        "framework-permission-aidl-cpp",
-        "libbinder",
-        "libbase",
-        "libutils",
-        "libcutils",
-        "libcameraservice",
-        "libcamera_client",
-        "liblog",
-        "libui",
-        "libgui",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V3-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
@@ -64,12 +49,25 @@
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
+        "android.hardware.camera.provider-V3-ndk",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
         "camera_platform_flags_c_lib",
+        "framework-permission-aidl-cpp",
         "libactivitymanager_aidl",
         "libaudioclient",
         "libaudioflinger",
         "libaudiohal",
         "libaudioprocessing",
+        "libbase",
+        "libbinder",
+        "libcamera_client",
+        "libcameraservice",
+        "libcutils",
+        "libgui",
+        "liblog",
         "libmediaplayerservice",
         "libmediautils",
         "libnbaio",
@@ -77,10 +75,15 @@
         "libpermission",
         "libpowermanager",
         "libsensorprivacy",
+        "libui",
+        "libutils",
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
-    static_libs: ["libbinder_random_parcel"],
+    static_libs: [
+        "libaudiomockhal",
+        "libbinder_random_parcel",
+    ],
     fuzz_config: {
         cc: [
             "android-camera-fwk-eng@google.com",
@@ -111,6 +114,10 @@
     ],
     defaults: [
         "camera_service_fuzzer_defaults",
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudioflinger_dependencies",
     ],
 }
 
@@ -121,8 +128,8 @@
     ],
     defaults: [
         "camera_service_fuzzer_defaults",
-        "service_fuzzer_defaults",
         "fuzzer_disable_leaks",
+        "service_fuzzer_defaults",
     ],
     fuzz_config: {
         triage_assignee: "waghpawan@google.com",
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 8c7d39e..214832f 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -26,6 +26,7 @@
 #include <ISchedulingPolicyService.h>
 #include <MediaPlayerService.h>
 #include <android-base/logging.h>
+#include <android/binder_manager.h>
 #include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/ICameraServiceListener.h>
@@ -36,11 +37,13 @@
 #include <camera/CameraUtils.h>
 #include <camera/camera2/OutputConfiguration.h>
 #include <com_android_graphics_libgui_flags.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
 #include <device3/Camera3StreamInterface.h>
+#include <effect-mock/FactoryMock.h>
 #include <fakeservicemanager/FakeServiceManager.h>
 #include <fuzzbinder/random_binder.h>
 #include <gui/BufferItemConsumer.h>
-#include <gui/Flags.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
@@ -65,8 +68,6 @@
 const size_t kMaxArgs = 5;
 const int32_t kCamType[] = {hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
                             hardware::ICameraService::CAMERA_TYPE_ALL};
-const int kCameraApiVersion[] = {android::CameraService::API_VERSION_1,
-                                 android::CameraService::API_VERSION_2};
 const uint8_t kSensorPixelModes[] = {ANDROID_SENSOR_PIXEL_MODE_DEFAULT,
         ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION};
 const int32_t kRequestTemplates[] = {
@@ -113,6 +114,7 @@
 const size_t kNumShellCmd = size(kShellCmd);
 static std::once_flag gSmOnce;
 sp<CameraService> gCameraService;
+sp<FakeServiceManager> gFsm;
 
 void addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
                 FuzzedDataProvider* fdp) {
@@ -430,8 +432,6 @@
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
     std::string cameraIdStr = std::to_string(cameraId);
     bool isSupported = false;
-    mCameraService->supportsCameraApi(
-        cameraIdStr, kCameraApiVersion[mFuzzedDataProvider->ConsumeBool()], &isSupported);
     mCameraService->isHiddenPhysicalCamera(cameraIdStr, &isSupported);
 
     std::string parameters;
@@ -835,8 +835,7 @@
             continue;
         }
         device->beginConfigure();
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+        auto [opaqueConsumer, surface] = BufferItemConsumer::create(
                 GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 8, /*controlledByApp*/ true);
         opaqueConsumer->setName(String8("Roger"));
 
@@ -844,34 +843,14 @@
         opaqueConsumer->setDefaultBufferSize(640, 480);
         opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
 
-        sp<Surface> surface = opaqueConsumer->getSurface();
-
         std::string noPhysicalId;
         size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
-        sp<IGraphicBufferProducer> igbp = surface->getIGraphicBufferProducer();
+        ParcelableSurfaceType pSurface = flagtools::surfaceToParcelableSurfaceType(surface);
         OutputConfiguration output(
-                igbp, kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
-                noPhysicalId);
-#else
-        sp<IGraphicBufferProducer> gbProducer;
-        sp<IGraphicBufferConsumer> gbConsumer;
-        BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
-        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(gbConsumer,
-                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/8, /*controlledByApp*/true);
-        opaqueConsumer->setName(String8("Roger"));
-
-        // Set to VGA dimension for default, as that is guaranteed to be present
-        gbConsumer->setDefaultBufferSize(640, 480);
-        gbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
-
-        sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
-
-        std::string noPhysicalId;
-        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
-        OutputConfiguration output(gbProducer,
+                pSurface,
                 kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
                 noPhysicalId);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
         int streamId;
         device->createStream(output, &streamId);
         CameraMetadata sessionParams;
@@ -902,35 +881,57 @@
     }
 }
 
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+    /* Create a FakeServiceManager instance and add required services */
+    gFsm = sp<FakeServiceManager>::make();
+    setDefaultServiceManager(gFsm);
+
+    auto configService = ndk::SharedRefBase::make<ConfigMock>();
+    CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+                                                  "android.hardware.audio.core.IConfig/default"));
+
+    auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+    CHECK_EQ(NO_ERROR,
+             AServiceManager_addService(factoryService.get()->asBinder().get(),
+                                        "android.hardware.audio.effect.IFactory/default"));
+
+    auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+    CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+                                                  "android.hardware.audio.core.IModule/default"));
+
+    // Disable creating thread pool for fuzzer instance of audio flinger
+    AudioSystem::disableThreadPool();
+
+    return 0;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     if (size < 1) {
         return 0;
     }
     setuid(AID_CAMERASERVER);
     std::shared_ptr<FuzzedDataProvider> fp = std::make_shared<FuzzedDataProvider>(data, size);
 
+    for (const char* service :
+         {"sensor_privacy", "permission", "media.camera.proxy", "batterystats", "media.metrics",
+          "media.extractor", "drm.drmManager", "permission_checker"}) {
+        addService(String16(service), gFsm, fp.get());
+    }
+
     std::call_once(gSmOnce, [&] {
-        /* Create a FakeServiceManager instance and add required services */
-        sp<FakeServiceManager> fsm = sp<FakeServiceManager>::make();
-        setDefaultServiceManager(fsm);
-        for (const char* service :
-             {"sensor_privacy", "permission", "media.camera.proxy", "batterystats", "media.metrics",
-              "media.extractor", "drm.drmManager", "permission_checker"}) {
-            addService(String16(service), fsm, fp.get());
-        }
         const auto audioFlinger = sp<AudioFlinger>::make();
         const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
         CHECK_EQ(NO_ERROR,
-                 fsm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
-                                 IInterface::asBinder(afAdapter), false /* allowIsolated */,
-                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+                 gFsm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                  IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                  IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
         sp<FuzzerActivityManager> am = new FuzzerActivityManager();
-        CHECK_EQ(NO_ERROR, fsm->addService(String16("activity"), IInterface::asBinder(am)));
+        CHECK_EQ(NO_ERROR, gFsm->addService(String16("activity"), IInterface::asBinder(am)));
         sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
-        CHECK_EQ(NO_ERROR, fsm->addService(String16("sensor_privacy"),
-                                           IInterface::asBinder(sensorPrivacyManager)));
+        CHECK_EQ(NO_ERROR, gFsm->addService(String16("sensor_privacy"),
+                                            IInterface::asBinder(sensorPrivacyManager)));
         sp<FuzzAppOpsService> appops = new FuzzAppOpsService();
-        CHECK_EQ(NO_ERROR, fsm->addService(String16("appops"), IInterface::asBinder(appops)));
+        CHECK_EQ(NO_ERROR, gFsm->addService(String16("appops"), IInterface::asBinder(appops)));
         MediaPlayerService::instantiate();
         gCameraService = new CameraService();
     });
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 837bf6d..c528f1e 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -98,7 +98,6 @@
         "libcamera_client",
         "libgui",
         "libui",
-        "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
@@ -124,6 +123,7 @@
         "Camera3StreamSplitterTest.cpp",
         "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
+        "SharedSessionConfigUtilsTest.cpp",
     ],
 
 }
diff --git a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
index 5e32482..05959ec 100644
--- a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
+++ b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
@@ -22,7 +22,10 @@
 #include <com_android_internal_camera_flags.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferConsumer.h>
+#include <gui/Flags.h> // remove with WB_PLATFORM_API_IMPROVEMENTS
+#if not COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_PLATFORM_API_IMPROVEMENTS)
 #include <gui/IGraphicBufferProducer.h>
+#endif
 #include <gui/Surface.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBuffer.h>
@@ -55,16 +58,7 @@
 int64_t kDynamicRangeProfile = 0;
 
 std::tuple<sp<BufferItemConsumer>, sp<Surface>> createConsumerAndSurface() {
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-    sp<BufferItemConsumer> consumer = sp<BufferItemConsumer>::make(kConsumerUsage);
-    return {consumer, consumer->getSurface()};
-#else
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-
-    return {sp<BufferItemConsumer>::make(consumer, kConsumerUsage), sp<Surface>::make(producer)};
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    return BufferItemConsumer::create(kConsumerUsage);
 }
 
 class Camera3StreamSplitterTest : public testing::Test {
@@ -162,7 +156,11 @@
     sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
     EXPECT_EQ(OK, inputSurface->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
     // TODO: Do this with the surface itself once the API is available.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_PLATFORM_API_IMPROVEMENTS)
+    EXPECT_EQ(OK, inputSurface->allowAllocation(false));
+#else
     EXPECT_EQ(OK, inputSurface->getIGraphicBufferProducer()->allowAllocation(false));
+#endif
 
     //
     // Create a buffer to use:
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index 2f035e7..849537d 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -172,6 +172,10 @@
         return mCameraServiceProxy->isCameraDisabled(userId, ret);
     }
 
+    virtual binder::Status notifyWatchdog(int pid, bool isNative) override {
+        return mCameraServiceProxy->notifyWatchdog(pid, isNative);
+    }
+
     void setCameraDisabled(bool cameraDisabled) {
         mCameraDisabled = cameraDisabled;
     }
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 56cacef..ec41459 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -20,7 +20,6 @@
 #include "../common/CameraProviderManager.h"
 #include <aidl/android/hardware/camera/device/BnCameraDevice.h>
 #include <aidl/android/hardware/camera/provider/BnCameraProvider.h>
-#include <android_companion_virtualdevice_flags.h>
 #include <android/binder_auto_utils.h>
 #include <android/binder_ibinder.h>
 #include <android/binder_interface_utils.h>
@@ -53,7 +52,6 @@
 using ::testing::ElementsAre;
 
 namespace flags = com::android::internal::camera::flags;
-namespace vd_flags = android::companion::virtualdevice::flags;
 
 /**
  * Basic test implementation of a camera ver. 3.2 device interface
@@ -881,8 +879,8 @@
     ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
         << "Unavailable physical camera Ids not set properly.";
 }
-TEST_WITH_FLAGS(CameraProviderManagerTest, AidlVirtualCameraProviderDiscovered,
-                REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
+
+TEST(CameraProviderManagerTest, AidlVirtualCameraProviderDiscovered) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
     TestAidlInteractionProxy aidlServiceProxy;
@@ -909,8 +907,7 @@
     EXPECT_THAT(cameraIds, ElementsAre("123"));
 }
 
-TEST_WITH_FLAGS(CameraProviderManagerTest, AidlVirtualCameraProviderDiscoveredOnInit,
-                REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
+TEST(CameraProviderManagerTest, AidlVirtualCameraProviderDiscoveredOnInit) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
     TestAidlInteractionProxy aidlServiceProxy;
diff --git a/services/camera/libcameraservice/tests/SharedSessionConfigUtilsTest.cpp b/services/camera/libcameraservice/tests/SharedSessionConfigUtilsTest.cpp
new file mode 100644
index 0000000..b763e23
--- /dev/null
+++ b/services/camera/libcameraservice/tests/SharedSessionConfigUtilsTest.cpp
@@ -0,0 +1,672 @@
+/*
+* Copyright 2024 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#define LOG_TAG "SharedSessionConfigUtilsTest"
+
+#include <android/hardware_buffer.h>
+#include <camera/camera2/OutputConfiguration.h>
+#include <system/camera_metadata.h>
+#include <system/graphics.h>
+
+#include <gtest/gtest.h>
+#include "../config/SharedSessionConfigUtils.h"
+#include <tinyxml2.h>
+
+using namespace android;
+using namespace tinyxml2;
+
+// Helper function to create an XML element with text
+XMLElement* CreateXMLElement(XMLDocument& doc, const char* elementName, const char* text) {
+    XMLElement* elem = doc.NewElement(elementName);
+    if (text != nullptr) {
+        elem->SetText(text);
+    }
+    doc.InsertEndChild(elem);
+    return elem;
+}
+
+// Test for SharedSessionConfigUtils::toString
+TEST(SharedSessionConfigUtilsTest, ToStringTest) {
+    EXPECT_STREQ(SharedSessionConfigUtils::toString(ErrorCode::STATUS_OK), "STATUS_OK");
+    EXPECT_STREQ(SharedSessionConfigUtils::toString(ErrorCode::ERROR_READ_CONFIG_FILE),
+                 "ERROR_READ_CONFIG_FILE");
+    EXPECT_STREQ(SharedSessionConfigUtils::toString(ErrorCode::ERROR_CONFIG_FILE_FORMAT),
+                 "ERROR_CONFIG_FILE_FORMAT");
+    EXPECT_STREQ(SharedSessionConfigUtils::toString(
+                         ErrorCode::ERROR_CONFIG_READER_UNINITIALIZED),
+                 "ERROR_CONFIG_READER_UNINITIALIZED");
+    EXPECT_STREQ(SharedSessionConfigUtils::toString(ErrorCode::ERROR_BAD_PARAMETER),
+                 "ERROR_BAD_PARAMETER");
+
+    // Test default case (unknown ErrorCode)
+    EXPECT_STREQ(SharedSessionConfigUtils::toString(static_cast<ErrorCode>(999)), "");
+}
+
+// Test for SharedSessionConfigUtils::getColorSpaceFromStr
+TEST(SharedSessionConfigUtilsTest, GetColorSpaceFromStrTest) {
+    int32_t colorSpace;
+    // Test with nullptr
+    EXPECT_EQ(SharedSessionConfigUtils::getColorSpaceFromStr(nullptr, &colorSpace),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(colorSpace, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+
+    // Test with empty string
+    EXPECT_EQ(SharedSessionConfigUtils::getColorSpaceFromStr("", &colorSpace),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(colorSpace, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+
+    // Test with valid strings
+    struct {
+        std::string input;
+        int expected;
+    } testCases[] = {
+            {std::to_string(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+             ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED},
+            {std::to_string(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB),
+             ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB},
+            {std::to_string(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3),
+             ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3},
+            {std::to_string(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG),
+             ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG}
+    };
+
+    for (const auto& testCase : testCases) {
+        EXPECT_EQ(SharedSessionConfigUtils::getColorSpaceFromStr(testCase.input.c_str(),
+                                                                 &colorSpace),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(colorSpace, testCase.expected);
+    }
+
+    // Test with invalid string
+    EXPECT_EQ(SharedSessionConfigUtils::getColorSpaceFromStr("-99", &colorSpace),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getSurfaceTypeFromXml
+TEST(SharedSessionConfigUtilsTest, GetSurfaceTypeFromXmlTest) {
+    int64_t surfaceType;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getSurfaceTypeFromXml(nullptr, &surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // Test with empty text
+    XMLDocument doc;
+    XMLElement* emptyElem = CreateXMLElement(doc, "surfaceType", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getSurfaceTypeFromXml(emptyElem, &surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // Test with valid surface types
+    struct {
+        std::string input;
+        int expected;
+    } testCases[] = {
+            {std::to_string(OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW),
+             OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW},
+            {std::to_string(OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE),
+             OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE},
+            {std::to_string(OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER),
+             OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER},
+            {std::to_string(OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC),
+             OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC},
+            {std::to_string(OutputConfiguration::SURFACE_TYPE_IMAGE_READER),
+             OutputConfiguration::SURFACE_TYPE_IMAGE_READER}
+    };
+
+    for (const auto& testCase : testCases) {
+        XMLElement* elem = CreateXMLElement(doc, "surfaceType", testCase.input.c_str());
+        EXPECT_EQ(SharedSessionConfigUtils::getSurfaceTypeFromXml(elem, &surfaceType),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(surfaceType, testCase.expected);
+    }
+
+    // Test with invalid surface type
+    XMLElement* invalidElem = CreateXMLElement(doc, "surfaceType", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getSurfaceTypeFromXml(invalidElem, &surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getWidthFromXml
+TEST(SharedSessionConfigUtilsTest, GetWidthFromXmlTest) {
+    int64_t width;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getWidthFromXml(nullptr, &width),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "width", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getWidthFromXml(emptyElem, &width),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // Test with valid width
+    XMLElement* validElem = CreateXMLElement(doc, "width", "1920");
+    EXPECT_EQ(SharedSessionConfigUtils::getWidthFromXml(validElem, &width),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(width, 1920);
+
+    // Test with invalid width (negative)
+    XMLElement* invalidWidthElem = CreateXMLElement(doc, "width", "-100");
+    EXPECT_EQ(SharedSessionConfigUtils::getWidthFromXml(invalidWidthElem, &width),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(width, -100); // The method logs an error but still returns STATUS_OK
+
+    // Test with non-numeric width
+    XMLElement* nonNumericElem = CreateXMLElement(doc, "width", "abc");
+    EXPECT_EQ(SharedSessionConfigUtils::getWidthFromXml(nonNumericElem, &width),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(width, 0); // std::atoi returns 0 for non-numeric strings
+}
+
+// Test for SharedSessionConfigUtils::getHeightFromXml
+TEST(SharedSessionConfigUtilsTest, GetHeightFromXmlTest) {
+    int64_t height;
+
+    XMLDocument doc;
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getHeightFromXml(nullptr, &height),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "height", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getHeightFromXml(emptyElem, &height),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // Test with valid height
+    XMLElement* validElem = CreateXMLElement(doc, "height", "1080");
+    EXPECT_EQ(SharedSessionConfigUtils::getHeightFromXml(validElem, &height), ErrorCode::STATUS_OK);
+    EXPECT_EQ(height, 1080);
+
+    // Test with invalid height (zero)
+    XMLElement* invalidHeightElem = CreateXMLElement(doc, "height", "0");
+    EXPECT_EQ(SharedSessionConfigUtils::getHeightFromXml(invalidHeightElem, &height),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(height, 0); // The method logs an error but still returns STATUS_OK
+
+    // Test with non-numeric height
+    XMLElement* nonNumericElem = CreateXMLElement(doc, "height", "xyz");
+    EXPECT_EQ(SharedSessionConfigUtils::getHeightFromXml(nonNumericElem, &height),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(height, 0); // std::atoi returns 0 for non-numeric strings
+}
+
+// Test for SharedSessionConfigUtils::getPhysicalCameraIdFromXml
+TEST(SharedSessionConfigUtilsTest, GetPhysicalCameraIdFromXmlTest) {
+    std::string physicalCameraId;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getPhysicalCameraIdFromXml(nullptr, &physicalCameraId),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(physicalCameraId, "");
+
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "physicalCameraId", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getPhysicalCameraIdFromXml(emptyElem, &physicalCameraId),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(physicalCameraId, "");
+
+    // Test with valid physical camera ID
+    XMLElement* validElem = CreateXMLElement(doc, "physicalCameraId", "physical_camera_1");
+    EXPECT_EQ(SharedSessionConfigUtils::getPhysicalCameraIdFromXml(validElem, &physicalCameraId),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(physicalCameraId, "physical_camera_1");
+}
+
+// Test for SharedSessionConfigUtils::getStreamUseCaseFromXml
+TEST(SharedSessionConfigUtilsTest, GetStreamUseCaseFromXmlTest) {
+    int64_t streamUseCase;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getStreamUseCaseFromXml(nullptr, &streamUseCase),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(streamUseCase, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "streamUseCase", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getStreamUseCaseFromXml(emptyElem, &streamUseCase),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(streamUseCase, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+
+    // Test with valid stream use cases
+    struct {
+        std::string input;
+        int64_t expected;
+    } testCases[] = {
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW},
+            {std::to_string(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START),
+             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START}
+    };
+
+    for (const auto& testCase : testCases) {
+        XMLElement* elem = CreateXMLElement(doc, "streamUseCase", testCase.input.c_str());
+        EXPECT_EQ(SharedSessionConfigUtils::getStreamUseCaseFromXml(elem, &streamUseCase),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(streamUseCase, testCase.expected);
+    }
+
+    // Test with invalid stream use case
+    XMLElement* invalidElem = CreateXMLElement(doc, "streamUseCase", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getStreamUseCaseFromXml(invalidElem, &streamUseCase),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getTimestampBaseFromXml
+TEST(SharedSessionConfigUtilsTest, GetTimestampBaseFromXmlTest) {
+    int64_t timestampBase;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getTimestampBaseFromXml(nullptr, &timestampBase),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(timestampBase, OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "timestampBase", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getTimestampBaseFromXml(emptyElem, &timestampBase),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(timestampBase, OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+
+    // Test with valid timestamp bases
+    struct {
+        std::string input;
+        int expected;
+    } testCases[] = {
+            {std::to_string(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
+             OutputConfiguration::TIMESTAMP_BASE_DEFAULT},
+            {std::to_string(OutputConfiguration::TIMESTAMP_BASE_SENSOR),
+             OutputConfiguration::TIMESTAMP_BASE_SENSOR},
+            {std::to_string(OutputConfiguration::TIMESTAMP_BASE_MONOTONIC),
+             OutputConfiguration::TIMESTAMP_BASE_MONOTONIC},
+            {std::to_string(OutputConfiguration::TIMESTAMP_BASE_REALTIME),
+             OutputConfiguration::TIMESTAMP_BASE_REALTIME},
+            {std::to_string(OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED),
+             OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED},
+            {std::to_string(OutputConfiguration::TIMESTAMP_BASE_MAX),
+             OutputConfiguration::TIMESTAMP_BASE_MAX}
+    };
+
+    for (const auto& testCase : testCases) {
+        XMLElement* elem = CreateXMLElement(doc, "timestampBase", testCase.input.c_str());
+        EXPECT_EQ(SharedSessionConfigUtils::getTimestampBaseFromXml(elem, &timestampBase),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(timestampBase, testCase.expected);
+    }
+
+    // Test with invalid timestamp base
+    XMLElement* invalidElem = CreateXMLElement(doc, "timestampBase", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getTimestampBaseFromXml(invalidElem, &timestampBase),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getMirrorModeFromXml
+TEST(SharedSessionConfigUtilsTest, GetMirrorModeFromXmlTest) {
+    int64_t mirrorMode;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getMirrorModeFromXml(nullptr, &mirrorMode),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(mirrorMode, OutputConfiguration::MIRROR_MODE_AUTO);
+
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "mirrorMode", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getMirrorModeFromXml(emptyElem, &mirrorMode),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(mirrorMode, OutputConfiguration::MIRROR_MODE_AUTO);
+
+    // Test with valid mirror modes
+    struct {
+        std::string input;
+        int expected;
+    } testCases[] = {
+            {std::to_string(OutputConfiguration::MIRROR_MODE_AUTO),
+             OutputConfiguration::MIRROR_MODE_AUTO},
+            {std::to_string(OutputConfiguration::MIRROR_MODE_NONE),
+             OutputConfiguration::MIRROR_MODE_NONE},
+            {std::to_string(OutputConfiguration::MIRROR_MODE_H),
+             OutputConfiguration::MIRROR_MODE_H},
+            {std::to_string(OutputConfiguration::MIRROR_MODE_V),
+             OutputConfiguration::MIRROR_MODE_V}
+    };
+
+    for (const auto& testCase : testCases) {
+        XMLElement* elem = CreateXMLElement(doc, "mirrorMode", testCase.input.c_str());
+        EXPECT_EQ(SharedSessionConfigUtils::getMirrorModeFromXml(elem, &mirrorMode),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(mirrorMode, testCase.expected);
+    }
+
+    // Test with invalid mirror mode
+    XMLElement* invalidElem = CreateXMLElement(doc, "mirrorMode", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getMirrorModeFromXml(invalidElem, &mirrorMode),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getUseReadoutTimestampFromXml
+TEST(SharedSessionConfigUtilsTest, GetUseReadoutTimestampFromXmlTest) {
+    bool useReadoutTimestamp;
+
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getUseReadoutTimestampFromXml(nullptr,
+                                                                      &useReadoutTimestamp),
+              ErrorCode::STATUS_OK);
+    EXPECT_FALSE(useReadoutTimestamp);
+
+    XMLDocument doc;
+    // Test with empty text (should default to false)
+    XMLElement* emptyElem = CreateXMLElement(doc, "useReadoutTimestamp", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getUseReadoutTimestampFromXml(emptyElem,
+                                                                      &useReadoutTimestamp),
+              ErrorCode::STATUS_OK);
+    EXPECT_FALSE(useReadoutTimestamp);
+
+    // Test with "true"
+    XMLElement* trueElem = CreateXMLElement(doc, "useReadoutTimestamp", "1");
+    EXPECT_EQ(SharedSessionConfigUtils::getUseReadoutTimestampFromXml(trueElem,
+                                                                      &useReadoutTimestamp),
+              ErrorCode::STATUS_OK);
+    EXPECT_TRUE(useReadoutTimestamp);
+
+    // Test with "false"
+    XMLElement* falseElem = CreateXMLElement(doc, "useReadoutTimestamp", "0");
+    EXPECT_EQ(SharedSessionConfigUtils::getUseReadoutTimestampFromXml(falseElem,
+                                                                      &useReadoutTimestamp),
+              ErrorCode::STATUS_OK);
+    EXPECT_FALSE(useReadoutTimestamp);
+
+    // Test with invalid string
+    XMLElement* invalidElem = CreateXMLElement(doc, "useReadoutTimestamp", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getUseReadoutTimestampFromXml(invalidElem,
+                                                                      &useReadoutTimestamp),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getFormatFromXml
+TEST(SharedSessionConfigUtilsTest, GetFormatFromXmlTest) {
+    int64_t format;
+
+    int64_t surfaceType = OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE;
+    // Test with nullptr XML element with surfaceType != IMAGE_READER
+    EXPECT_EQ(SharedSessionConfigUtils::getFormatFromXml(nullptr, &format, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(format, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+    surfaceType = OutputConfiguration::SURFACE_TYPE_IMAGE_READER;
+    // Test with nullptr XML element with surfaceType == IMAGE_READER
+    EXPECT_EQ(SharedSessionConfigUtils::getFormatFromXml(nullptr, &format, surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "format", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getFormatFromXml(emptyElem, &format, surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // Test with valid formats
+    struct {
+        std::string input;
+        int expected;
+    } testCases[] = {
+            {std::to_string(HAL_PIXEL_FORMAT_RGBA_8888), HAL_PIXEL_FORMAT_RGBA_8888},
+            {std::to_string(HAL_PIXEL_FORMAT_RGBX_8888), HAL_PIXEL_FORMAT_RGBX_8888},
+            {std::to_string(HAL_PIXEL_FORMAT_RGB_888), HAL_PIXEL_FORMAT_RGB_888},
+            {std::to_string(HAL_PIXEL_FORMAT_RGB_565), HAL_PIXEL_FORMAT_RGB_565},
+            {std::to_string(HAL_PIXEL_FORMAT_BGRA_8888), HAL_PIXEL_FORMAT_BGRA_8888},
+            {std::to_string(HAL_PIXEL_FORMAT_YCBCR_422_SP), HAL_PIXEL_FORMAT_YCBCR_422_SP},
+            {std::to_string(HAL_PIXEL_FORMAT_YCRCB_420_SP), HAL_PIXEL_FORMAT_YCRCB_420_SP},
+            {std::to_string(HAL_PIXEL_FORMAT_YCBCR_422_I), HAL_PIXEL_FORMAT_YCBCR_422_I},
+            {std::to_string(HAL_PIXEL_FORMAT_RGBA_FP16), HAL_PIXEL_FORMAT_RGBA_FP16},
+            {std::to_string(HAL_PIXEL_FORMAT_RAW16), HAL_PIXEL_FORMAT_RAW16},
+            {std::to_string(HAL_PIXEL_FORMAT_BLOB), HAL_PIXEL_FORMAT_BLOB},
+            {std::to_string(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED},
+            {std::to_string(HAL_PIXEL_FORMAT_YCBCR_420_888), HAL_PIXEL_FORMAT_YCBCR_420_888},
+            {std::to_string(HAL_PIXEL_FORMAT_RAW_OPAQUE), HAL_PIXEL_FORMAT_RAW_OPAQUE},
+            {std::to_string(HAL_PIXEL_FORMAT_RAW10), HAL_PIXEL_FORMAT_RAW10},
+            {std::to_string(HAL_PIXEL_FORMAT_RAW12), HAL_PIXEL_FORMAT_RAW12},
+            {std::to_string(HAL_PIXEL_FORMAT_RGBA_1010102), HAL_PIXEL_FORMAT_RGBA_1010102},
+            {std::to_string(HAL_PIXEL_FORMAT_Y8), HAL_PIXEL_FORMAT_Y8},
+            {std::to_string(HAL_PIXEL_FORMAT_Y16), HAL_PIXEL_FORMAT_Y16},
+            {std::to_string(HAL_PIXEL_FORMAT_YV12), HAL_PIXEL_FORMAT_YV12},
+            {std::to_string(HAL_PIXEL_FORMAT_DEPTH_16), HAL_PIXEL_FORMAT_DEPTH_16},
+            {std::to_string(HAL_PIXEL_FORMAT_DEPTH_24), HAL_PIXEL_FORMAT_DEPTH_24},
+            {std::to_string(HAL_PIXEL_FORMAT_DEPTH_24_STENCIL_8),
+             HAL_PIXEL_FORMAT_DEPTH_24_STENCIL_8},
+            {std::to_string(HAL_PIXEL_FORMAT_DEPTH_32F), HAL_PIXEL_FORMAT_DEPTH_32F},
+            {std::to_string(HAL_PIXEL_FORMAT_DEPTH_32F_STENCIL_8),
+             HAL_PIXEL_FORMAT_DEPTH_32F_STENCIL_8},
+            {std::to_string(HAL_PIXEL_FORMAT_STENCIL_8), HAL_PIXEL_FORMAT_STENCIL_8},
+            {std::to_string(HAL_PIXEL_FORMAT_YCBCR_P010), HAL_PIXEL_FORMAT_YCBCR_P010},
+            {std::to_string(HAL_PIXEL_FORMAT_HSV_888), HAL_PIXEL_FORMAT_HSV_888}
+    };
+
+    for (const auto& testCase : testCases) {
+        XMLElement* elem = CreateXMLElement(doc, "format", testCase.input.c_str());
+        EXPECT_EQ(SharedSessionConfigUtils::getFormatFromXml(elem, &format, surfaceType),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(format, testCase.expected);
+    }
+
+    // Test with invalid format
+    XMLElement* invalidElem = CreateXMLElement(doc, "format", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getFormatFromXml(invalidElem, &format, surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getUsageFromXml
+TEST(SharedSessionConfigUtilsTest, GetUsageFromXmlTest) {
+    int64_t usage = 0;
+
+    int64_t surfaceType = OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE;
+    // Test with nullptr XML element with surfaceType == SURFACE_TYPE_SURFACE_TEXTURE
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(nullptr, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE));
+
+    // clear usage value
+    usage = 0;
+    surfaceType = OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW;
+    // Test with nullptr XML element with surfaceType == SURFACE_TYPE_SURFACE_VIEW
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(nullptr, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
+                                          | AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY));
+
+    // clear usage value
+    usage = 0;
+    surfaceType = OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER;
+    // Test with nullptr XML element with surfaceType == SURFACE_TYPE_MEDIA_RECORDER
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(nullptr, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_VIDEO_ENCODE));
+
+    // clear usage value
+    usage = 0;
+    surfaceType = OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC;
+    // Test with nullptr XML element with surfaceType == SURFACE_TYPE_MEDIA_CODEC
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(nullptr, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_VIDEO_ENCODE));
+
+    // clear usage value
+    usage = 0;
+    surfaceType = OutputConfiguration::SURFACE_TYPE_IMAGE_READER;
+    // Test with nullptr XML element with surfaceType == IMAGE_READER
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(nullptr, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_CPU_READ_NEVER));
+
+
+    // clear usage value
+    usage = 0;
+    XMLDocument doc;
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "usage", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(emptyElem, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_CPU_READ_NEVER));
+
+    // clear usage value
+    usage = 0;
+    // Test with valid single usage
+    XMLElement* singleUsageElem = CreateXMLElement(doc, "usage",
+                                                   std::to_string(
+                                                           AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN)
+                                                           .c_str());
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(singleUsageElem, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN));
+
+    // clear usage value
+    usage = 0;
+    // Test with valid multiple usages
+    XMLElement* multipleUsagesElem =
+            CreateXMLElement(doc, "usage",
+                             (std::to_string(AHARDWAREBUFFER_USAGE_CPU_READ_NEVER)
+                                     + "|" + std::to_string(AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER)
+                                     + "|" + std::to_string(AHARDWAREBUFFER_USAGE_VIDEO_ENCODE))
+                                     .c_str());
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(multipleUsagesElem, &usage, surfaceType),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(usage, static_cast<int64_t>(AHARDWAREBUFFER_USAGE_CPU_READ_NEVER
+                                          | AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER
+                                          | AHARDWAREBUFFER_USAGE_VIDEO_ENCODE));
+
+    // clear usage value
+    usage = 0;
+    // Test with invalid usage
+    XMLElement* invalidUsageElem = CreateXMLElement(doc, "usage", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(invalidUsageElem, &usage, surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+
+    // clear usage value
+    usage = 0;
+    // Test with a mix of valid and invalid usages
+    XMLElement* mixedUsageElem =
+            CreateXMLElement(doc, "usage",
+                             (std::to_string(AHARDWAREBUFFER_USAGE_CPU_READ_NEVER) + "|-99")
+                                     .c_str());
+    EXPECT_EQ(SharedSessionConfigUtils::getUsageFromXml(mixedUsageElem, &usage, surfaceType),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
+
+// Test for SharedSessionConfigUtils::getDataSpaceFromXml
+TEST(SharedSessionConfigUtilsTest, GetDataSpaceFromXmlTest) {
+    int64_t dataSpace;
+
+    XMLDocument doc;
+    // Test with nullptr XML element
+    EXPECT_EQ(SharedSessionConfigUtils::getDataSpaceFromXml(nullptr, &dataSpace),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(dataSpace, HAL_DATASPACE_UNKNOWN);
+
+    // Test with empty text
+    XMLElement* emptyElem = CreateXMLElement(doc, "dataSpace", "");
+    EXPECT_EQ(SharedSessionConfigUtils::getDataSpaceFromXml(emptyElem, &dataSpace),
+              ErrorCode::STATUS_OK);
+    EXPECT_EQ(dataSpace, HAL_DATASPACE_UNKNOWN);
+
+    // Test with valid data spaces
+    struct {
+        std::string input;
+        int expected;
+    } testCases[] = {
+            {std::to_string(HAL_DATASPACE_UNKNOWN), HAL_DATASPACE_UNKNOWN},
+            {std::to_string(HAL_DATASPACE_ARBITRARY), HAL_DATASPACE_ARBITRARY},
+            {std::to_string(HAL_DATASPACE_STANDARD_UNSPECIFIED),
+             HAL_DATASPACE_STANDARD_UNSPECIFIED},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT709), HAL_DATASPACE_STANDARD_BT709},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT601_625), HAL_DATASPACE_STANDARD_BT601_625},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT601_625_UNADJUSTED),
+             HAL_DATASPACE_STANDARD_BT601_625_UNADJUSTED},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT601_525), HAL_DATASPACE_STANDARD_BT601_525},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT601_525_UNADJUSTED),
+             HAL_DATASPACE_STANDARD_BT601_525_UNADJUSTED},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT2020), HAL_DATASPACE_STANDARD_BT2020},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT2020_CONSTANT_LUMINANCE),
+             HAL_DATASPACE_STANDARD_BT2020_CONSTANT_LUMINANCE},
+            {std::to_string(HAL_DATASPACE_STANDARD_BT470M), HAL_DATASPACE_STANDARD_BT470M},
+            {std::to_string(HAL_DATASPACE_STANDARD_FILM), HAL_DATASPACE_STANDARD_FILM},
+            {std::to_string(HAL_DATASPACE_STANDARD_DCI_P3), HAL_DATASPACE_STANDARD_DCI_P3},
+            {std::to_string(HAL_DATASPACE_STANDARD_ADOBE_RGB), HAL_DATASPACE_STANDARD_ADOBE_RGB},
+            {std::to_string(HAL_DATASPACE_TRANSFER_UNSPECIFIED),
+             HAL_DATASPACE_TRANSFER_UNSPECIFIED},
+            {std::to_string(HAL_DATASPACE_TRANSFER_LINEAR), HAL_DATASPACE_TRANSFER_LINEAR},
+            {std::to_string(HAL_DATASPACE_TRANSFER_SRGB), HAL_DATASPACE_TRANSFER_SRGB},
+            {std::to_string(HAL_DATASPACE_TRANSFER_SMPTE_170M), HAL_DATASPACE_TRANSFER_SMPTE_170M},
+            {std::to_string(HAL_DATASPACE_TRANSFER_GAMMA2_2), HAL_DATASPACE_TRANSFER_GAMMA2_2},
+            {std::to_string(HAL_DATASPACE_TRANSFER_GAMMA2_6), HAL_DATASPACE_TRANSFER_GAMMA2_6},
+            {std::to_string(HAL_DATASPACE_TRANSFER_GAMMA2_8), HAL_DATASPACE_TRANSFER_GAMMA2_8},
+            {std::to_string(HAL_DATASPACE_TRANSFER_ST2084), HAL_DATASPACE_TRANSFER_ST2084},
+            {std::to_string(HAL_DATASPACE_TRANSFER_HLG), HAL_DATASPACE_TRANSFER_HLG},
+            {std::to_string(HAL_DATASPACE_RANGE_UNSPECIFIED), HAL_DATASPACE_RANGE_UNSPECIFIED},
+            {std::to_string(HAL_DATASPACE_RANGE_FULL), HAL_DATASPACE_RANGE_FULL},
+            {std::to_string(HAL_DATASPACE_RANGE_LIMITED), HAL_DATASPACE_RANGE_LIMITED},
+            {std::to_string(HAL_DATASPACE_RANGE_EXTENDED), HAL_DATASPACE_RANGE_EXTENDED},
+            {std::to_string(HAL_DATASPACE_SRGB_LINEAR), HAL_DATASPACE_SRGB_LINEAR},
+            {std::to_string(HAL_DATASPACE_V0_SRGB_LINEAR), HAL_DATASPACE_V0_SRGB_LINEAR},
+            {std::to_string(HAL_DATASPACE_V0_SCRGB_LINEAR), HAL_DATASPACE_V0_SCRGB_LINEAR},
+            {std::to_string(HAL_DATASPACE_SRGB), HAL_DATASPACE_SRGB},
+            {std::to_string(HAL_DATASPACE_V0_SRGB), HAL_DATASPACE_V0_SRGB},
+            {std::to_string(HAL_DATASPACE_V0_SCRGB), HAL_DATASPACE_V0_SCRGB},
+            {std::to_string(HAL_DATASPACE_JFIF), HAL_DATASPACE_JFIF},
+            {std::to_string(HAL_DATASPACE_V0_JFIF), HAL_DATASPACE_V0_JFIF},
+            {std::to_string(HAL_DATASPACE_BT601_625), HAL_DATASPACE_BT601_625},
+            {std::to_string(HAL_DATASPACE_V0_BT601_625), HAL_DATASPACE_V0_BT601_625},
+            {std::to_string(HAL_DATASPACE_BT601_525), HAL_DATASPACE_BT601_525},
+            {std::to_string(HAL_DATASPACE_V0_BT601_525), HAL_DATASPACE_V0_BT601_525},
+            {std::to_string(HAL_DATASPACE_BT709), HAL_DATASPACE_BT709},
+            {std::to_string(HAL_DATASPACE_V0_BT709), HAL_DATASPACE_V0_BT709},
+            {std::to_string(HAL_DATASPACE_DCI_P3_LINEAR), HAL_DATASPACE_DCI_P3_LINEAR},
+            {std::to_string(HAL_DATASPACE_DCI_P3), HAL_DATASPACE_DCI_P3},
+            {std::to_string(HAL_DATASPACE_DISPLAY_P3_LINEAR), HAL_DATASPACE_DISPLAY_P3_LINEAR},
+            {std::to_string(HAL_DATASPACE_DISPLAY_P3), HAL_DATASPACE_DISPLAY_P3},
+            {std::to_string(HAL_DATASPACE_ADOBE_RGB), HAL_DATASPACE_ADOBE_RGB},
+            {std::to_string(HAL_DATASPACE_BT2020_LINEAR), HAL_DATASPACE_BT2020_LINEAR},
+            {std::to_string(HAL_DATASPACE_BT2020), HAL_DATASPACE_BT2020},
+            {std::to_string(HAL_DATASPACE_BT2020_PQ), HAL_DATASPACE_BT2020_PQ},
+            {std::to_string(HAL_DATASPACE_DEPTH), HAL_DATASPACE_DEPTH},
+            {std::to_string(HAL_DATASPACE_SENSOR), HAL_DATASPACE_SENSOR}
+    };
+
+    for (const auto& testCase : testCases) {
+        XMLElement* elem = CreateXMLElement(doc, "dataSpace", testCase.input.c_str());
+        EXPECT_EQ(SharedSessionConfigUtils::getDataSpaceFromXml(elem, &dataSpace),
+                  ErrorCode::STATUS_OK);
+        EXPECT_EQ(dataSpace, testCase.expected);
+    }
+
+    // Test with invalid data space
+    XMLElement* invalidElem = CreateXMLElement(doc, "dataSpace", "-99");
+    EXPECT_EQ(SharedSessionConfigUtils::getDataSpaceFromXml(invalidElem, &dataSpace),
+              ErrorCode::ERROR_CONFIG_FILE_FORMAT);
+}
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 8f93ee0..22b9a75 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -551,4 +551,18 @@
     }
 }
 
+void CameraServiceProxyWrapper::notifyWatchdog(pid_t clientPid, bool isNativePid) {
+    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) {
+        ALOGW("%s: ICameraServiceProxy is null!", __FUNCTION__);
+        return;
+    }
+
+    auto status = proxyBinder->notifyWatchdog(clientPid, isNativePid);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed calling notifyWatchdog: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+}
+
 }  // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index ad8b1cd..418bb4c 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -154,6 +154,9 @@
 
     // Update the stored extension stats to the latest values
     std::string updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
+
+    // notify CameraServiceProxy that watchdog is to be triggered
+    void notifyWatchdog(pid_t clientPid, bool isNativePid);
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 0f0dc4c..d5c74f8 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -418,6 +418,12 @@
     std::vector<int32_t> getAllOwners() const;
 
     /**
+     * Return the ClientDescriptor for a client which has opened the camera in
+     * shared mode corresponding to the given pid.
+     */
+    std::shared_ptr<ClientDescriptor<KEY, VALUE>> getSharedClient(int pid) const;
+
+    /**
      * Return the ClientDescriptor corresponding to the given key, or an empty shared pointer
      * if none exists.
      */
@@ -685,6 +691,20 @@
 }
 
 template<class KEY, class VALUE, class LISTENER>
+std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::getSharedClient(
+        int pid) const {
+    Mutex::Autolock lock(mLock);
+    if (flags::camera_multi_client()) {
+        for (const auto& i : mClients) {
+            if ((i->getOwnerId() == pid) && (i->getSharedMode())) {
+                return i;
+            }
+        }
+    }
+    return std::shared_ptr<ClientDescriptor<KEY, VALUE>>(nullptr);
+}
+
+template<class KEY, class VALUE, class LISTENER>
 void ClientManager<KEY, VALUE, LISTENER>::updatePriorities(
         const std::map<int32_t,ClientPriority>& ownerPriorityList) {
     Mutex::Autolock lock(mLock);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f41cb85..97d2179 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -444,15 +444,15 @@
     return false;
 }
 
-binder::Status createSurfaceFromGbp(
+binder::Status createConfiguredSurface(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        sp<Surface>& out_surface, const sp<SurfaceType>& surface,
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
         int32_t colorSpace, bool respectSurfaceSize) {
     // bufferProducer must be non-null
-    if (gbp == nullptr) {
+    if ( flagtools::isSurfaceTypeValid(surface) == false ) {
         std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
         ALOGW("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
@@ -463,7 +463,7 @@
     bool useAsync = false;
     uint64_t consumerUsage = 0;
     status_t err;
-    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
+    if ((err = surface->getConsumerUsage(&consumerUsage)) != OK) {
         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface consumer usage: %s (%d)",
                 logicalCameraId.c_str(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -483,8 +483,9 @@
     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
             (consumerUsage & allowedFlags) != 0;
 
-    surface = new Surface(gbp, useAsync);
-    ANativeWindow *anw = surface.get();
+    out_surface = new Surface(flagtools::surfaceTypeToIGBP(surface), useAsync);
+
+    ANativeWindow *anw = out_surface.get();
 
     int width, height, format;
     android_dataspace dataSpace;
@@ -695,16 +696,18 @@
     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
-binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+binder::Status mapStream(
+        const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+        bool isCompositeHeicDisabled, bool isCompositeHeicUltraHDRDisabled,
         const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
-        size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
+        size_t* streamIdx /*out*/, const std::string& physicalId, int32_t groupId,
         const std::string& logicalCameraId,
-        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
-        bool *earlyExit /*out*/) {
+        aidl::android::hardware::camera::device::StreamConfiguration& streamConfiguration /*out*/,
+        bool* earlyExit /*out*/) {
     bool isDepthCompositeStream =
             camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
-    bool isHeicCompositeStream =
-            camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
+    bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStreamInfo(
+            streamInfo, isCompositeHeicDisabled, isCompositeHeicUltraHDRDisabled);
     bool isJpegRCompositeStream =
             camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
             !isCompositeJpegRDisabled;
@@ -756,16 +759,14 @@
     return binder::Status::ok();
 }
 
-binder::Status
-convertToHALStreamCombination(
-        const SessionConfiguration& sessionConfiguration,
-        const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
-        bool isCompositeJpegRDisabled,
-        metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
-        bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
-        bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
-        bool *earlyExit) {
+binder::Status convertToHALStreamCombination(
+        const SessionConfiguration& sessionConfiguration, const std::string& logicalCameraId,
+        const CameraMetadata& deviceInfo, bool isCompositeJpegRDisabled,
+        bool isCompositeHeicDisabled, bool isCompositeHeicUltraHDRDisabled,
+        metadataGetter getMetadata, const std::vector<std::string>& physicalCameraIds,
+        aidl::android::hardware::camera::device::StreamConfiguration& streamConfiguration,
+        bool overrideForPerfClass, metadata_vendor_id_t vendorTagId, bool checkSessionParams,
+        const std::vector<int32_t>& additionalKeys, bool* earlyExit) {
     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
     auto operatingMode = sessionConfiguration.getOperatingMode();
     binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
@@ -906,9 +907,10 @@
                                 "Deferred surface sensor pixel modes not valid");
             }
             streamInfo.streamUseCase = streamUseCase;
-            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
-                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
-                    logicalCameraId, streamConfiguration, earlyExit);
+            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, isCompositeHeicDisabled,
+                                    isCompositeHeicUltraHDRDisabled, deviceInfo,
+                                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId,
+                                    groupId, logicalCameraId, streamConfiguration, earlyExit);
             if (*earlyExit || !status.isOk()) {
                 return status;
             }
@@ -923,23 +925,20 @@
         for (auto& surface_type : surfaces) {
             sp<Surface> surface;
             int mirrorMode = it.getMirrorMode(surface_type);
-            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface,
-                                       surface_type
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                                       .graphicBufferProducer
-#endif
-                                       , logicalCameraId,
-                                       metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                                       streamUseCase, timestampBase, mirrorMode, colorSpace,
-                                       /*respectSurfaceSize*/ true);
+            res = createConfiguredSurface(streamInfo, isStreamInfoValid, surface,
+                                    flagtools::convertParcelableSurfaceTypeToSurface(surface_type),
+                                    logicalCameraId,  metadataChosen, sensorPixelModesUsed,
+                                    dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode,
+                                    colorSpace, /*respectSurfaceSize*/ true);
 
             if (!res.isOk()) return res;
 
             if (!isStreamInfoValid) {
-                auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
-                                        static_cast<camera_stream_rotation_t>(it.getRotation()),
-                                        &streamIdx, physicalCameraId, groupId, logicalCameraId,
-                                        streamConfiguration, earlyExit);
+                auto status = mapStream(
+                        streamInfo, isCompositeJpegRDisabled, isCompositeHeicDisabled,
+                        isCompositeHeicUltraHDRDisabled, deviceInfo,
+                        static_cast<camera_stream_rotation_t>(it.getRotation()), &streamIdx,
+                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
                 if (*earlyExit || !status.isOk()) {
                     return status;
                 }
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 3c0f109..3852933 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -104,11 +104,11 @@
 // check if format is not custom format
 bool isPublicFormat(int32_t format);
 
-// Create a Surface from an IGraphicBufferProducer. Returns error if
-// IGraphicBufferProducer's property doesn't match with streamInfo
-binder::Status createSurfaceFromGbp(
+// Recreates a Surface from another Surface setting the controlledByApp correctly. Returns error if
+// previous Surface property doesn't match with streamInfo
+binder::Status createConfiguredSurface(
         camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        sp<Surface>& out_surface, const sp<SurfaceType>& surface,
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
@@ -152,7 +152,8 @@
 convertToHALStreamCombination(
     const SessionConfiguration& sessionConfiguration,
     const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
-    bool isCompositeJpegRDisabled, metadataGetter getMetadata,
+    bool isCompositeJpegRDisabled, bool isCompositeHeicDisabled,
+    bool isCompositeHeicUltraHDRDisabled, metadataGetter getMetadata,
     const std::vector<std::string> &physicalCameraIds,
     aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
     bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index 9986a84..99ebaf0 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -111,10 +111,11 @@
         hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, metadata_vendor_id_t vendorTagId, bool *earlyExit) {
     aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
-    auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
-            false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
-            aidlStreamConfiguration, overrideForPerfClass, vendorTagId,
-            /*checkSessionParams*/false, /*additionalKeys*/{}, earlyExit);
+    auto ret = convertToHALStreamCombination(
+            sessionConfiguration, logicalCameraId, deviceInfo, false /*isCompositeJpegRDisabled*/,
+            false /*isCompositeHeicDisabled*/, false /*isCompositeHeicUltraHDRDisabled*/,
+            getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
+            vendorTagId, /*checkSessionParams*/ false, /*additionalKeys*/ {}, earlyExit);
     if (!ret.isOk()) {
         return ret;
     }
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
index 5f61de5..2fbdd8d 100644
--- a/services/camera/libcameraservice/utils/Utils.cpp
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -23,8 +23,13 @@
 #include <utils/Log.h>
 #include <vendorsupport/api_level.h>
 
+#include <camera/CameraUtils.h>
+
 namespace android {
 
+/**
+ * Returns defaultVersion if the property is not found.
+ */
 int getVNDKVersionFromProp(int defaultVersion) {
     int vendorApiLevel = AVendorSupport_getVendorApiLevel();
     if (vendorApiLevel == 0) {
@@ -38,6 +43,20 @@
     return AVendorSupport_getSdkApiLevelOf(vendorApiLevel);
 }
 
+int getVNDKVersion() {
+    static int kVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+    return kVndkVersion;
+}
+
+int32_t getDeviceId(const CameraMetadata& cameraInfo) {
+    if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
+        return kDefaultDeviceId;
+    }
+
+    const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
+    return deviceIdEntry.data.i32[0];
+}
+
 RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid)
     : mTid(tid), mPreviousPolicy(sched_getscheduler(tid)) {
     auto res = sched_getparam(mTid, &mPreviousParams);
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
index 0eb5e2c..7ca3e93 100644
--- a/services/camera/libcameraservice/utils/Utils.h
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -21,6 +21,8 @@
 #include <unistd.h>
 #include <type_traits>
 
+#include <camera/CameraMetadata.h>
+
 namespace android {
 
 /**
@@ -34,13 +36,18 @@
 }
 
 /**
- * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
- * instead of release SDK version. This function maps year/month format back to release
- * SDK version.
+ * Helper function for getting the current VNDK version.
  *
- * Returns defaultVersion if the property is not found.
+ * If the current VNDK version cannot be determined, this function returns
+ * __ANDROID_API_FUTURE__.
  */
-int getVNDKVersionFromProp(int defaultVersion);
+int getVNDKVersion();
+
+/**
+ * Returns the deviceId for the given camera metadata. For any virtual camera, this is the id
+ * of the virtual device owning the camera. For any real camera, this is kDefaultDeviceId.
+ */
+int32_t getDeviceId(const CameraMetadata& cameraInfo);
 
 /**
  * An instance of this class will raise the scheduling policy of a given
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
index 22dd806..312da00 100644
--- a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
@@ -17,22 +17,14 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "VirtualDeviceCameraIdMapper"
 
-#include <android_companion_virtualdevice_flags.h>
 #include <camera/CameraUtils.h>
 
 #include "VirtualDeviceCameraIdMapper.h"
 
 namespace android {
 
-namespace vd_flags = android::companion::virtualdevice::flags;
-
 void VirtualDeviceCameraIdMapper::addCamera(const std::string& cameraId,
         int32_t deviceId, const std::string& mappedCameraId) {
-    if (!vd_flags::camera_device_awareness()) {
-        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
-        return;
-    }
-
     if (deviceId == kDefaultDeviceId) {
         ALOGV("%s: Not adding entry for a camera of the default device", __func__);
         return;
@@ -47,11 +39,6 @@
 }
 
 void VirtualDeviceCameraIdMapper::removeCamera(const std::string& cameraId) {
-    if (!vd_flags::camera_device_awareness()) {
-        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
-        return;
-    }
-
     auto deviceIdAndMappedCameraIdPair = getDeviceIdAndMappedCameraIdPair(cameraId);
 
     std::scoped_lock lock(mLock);
@@ -67,12 +54,6 @@
         return mappedCameraId;
     }
 
-    if (!vd_flags::camera_device_awareness()) {
-        ALOGV("%s: Device-aware camera feature is not enabled, returning the camera id as "
-              "the mapped camera id for camera %s", __func__, mappedCameraId.c_str());
-        return mappedCameraId;
-    }
-
     std::scoped_lock lock(mLock);
     auto iterator = mDeviceIdMappedCameraIdPairToCameraIdMap.find(
             {deviceId, mappedCameraId});
@@ -86,11 +67,6 @@
 
 std::pair<int32_t, std::string> VirtualDeviceCameraIdMapper::getDeviceIdAndMappedCameraIdPair(
         const std::string& cameraId) const {
-    if (!vd_flags::camera_device_awareness()) {
-        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
-        return std::make_pair(kDefaultDeviceId, cameraId);
-    }
-
     std::scoped_lock lock(mLock);
     auto iterator = mCameraIdToDeviceIdMappedCameraIdPairMap.find(cameraId);
     if (iterator != mCameraIdToDeviceIdMappedCameraIdPairMap.end()) {
@@ -102,10 +78,6 @@
 }
 
 int VirtualDeviceCameraIdMapper::getNumberOfCameras(int32_t deviceId) const {
-    if (!vd_flags::camera_device_awareness()) {
-        return 0;
-    }
-
     int numOfCameras = 0;
     std::scoped_lock lock(mLock);
     for (const auto& [deviceIdMappedCameraIdPair, _]
@@ -119,11 +91,6 @@
 
 std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
         int api1CameraId, int32_t deviceId) const {
-    if (!vd_flags::camera_device_awareness()) {
-        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
-        return std::nullopt;
-    }
-
     int matchingCameraIndex = 0;
     std::scoped_lock lock(mLock);
     for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
@@ -140,4 +107,4 @@
     return std::nullopt;
 }
 
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index dd64daa..c76bb1b 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -1,5 +1,5 @@
 package {
-    default_team: "trendy_team_xr_framework",
+    default_team: "trendy_team_virtual_device_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -7,29 +7,29 @@
 cc_defaults {
     name: "libvirtualcamera_defaults",
     shared_libs: [
+        "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "libbinder",
-        "libbinder_ndk",
-        "libcamera_metadata",
-        "libexif",
-        "liblog",
-        "libfmq",
-        "libgui",
-        "libjpeg",
-        "libnativewindow",
-        "libbase",
-        "libcutils",
-        "libui",
-        "libutils",
         "libEGL",
         "libGLESv2",
         "libGLESv3",
-        "android.companion.virtualdevice.flags-aconfig-cc",
+        "libbase",
+        "libbinder",
+        "libbinder_ndk",
+        "libcamera_metadata",
+        "libcutils",
+        "libexif",
+        "libfmq",
+        "libgui",
+        "libjpeg",
+        "liblog",
+        "libnativewindow",
+        "libui",
+        "libutils",
     ],
     static_libs: [
-        "android.hardware.camera.common@1.0-helper",
         "android.hardware.camera.common-V1-ndk",
+        "android.hardware.camera.common@1.0-helper",
         "android.hardware.camera.device-V2-ndk",
         "android.hardware.camera.metadata-V2-ndk",
         "android.hardware.camera.provider-V2-ndk",
@@ -43,20 +43,21 @@
         "-Wformat",
         "-Wthread-safety",
     ],
+    cpp_std: "c++20",
 }
 
 cc_library_static {
     name: "libvirtualcamera_utils",
     srcs: [
-        "util/JpegUtil.cc",
-        "util/MetadataUtil.cc",
-        "util/Util.cc",
         "util/EglDisplayContext.cc",
         "util/EglFramebuffer.cc",
         "util/EglProgram.cc",
         "util/EglSurfaceTexture.cc",
         "util/EglUtil.cc",
+        "util/JpegUtil.cc",
+        "util/MetadataUtil.cc",
         "util/Permissions.cc",
+        "util/Util.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/OWNERS b/services/camera/virtualcamera/OWNERS
index db34336..22c2eb7 100644
--- a/services/camera/virtualcamera/OWNERS
+++ b/services/camera/virtualcamera/OWNERS
@@ -1,4 +1,3 @@
 # Bug component: 1171888
 include platform/frameworks/base:/services/companion/java/com/android/server/companion/virtual/OWNERS
 caen@google.com
-jsebechlebsky@google.com
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 4c243e3..3c75763 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -16,6 +16,7 @@
 
 // #define LOG_NDEBUG 0
 #define LOG_TAG "VirtualCameraRenderThread"
+
 #include "VirtualCameraRenderThread.h"
 
 #include <android_companion_virtualdevice_flags.h>
@@ -103,6 +104,8 @@
 // The number of nanosecond to wait for the first frame to be drawn on the input surface
 static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
 
+static constexpr double kOneSecondInNanos = 1e9;
+
 NotifyMsg createShutterNotifyMsg(int frameNumber,
                                  std::chrono::nanoseconds timestamp) {
   NotifyMsg msg;
@@ -136,15 +139,13 @@
 
 std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
     EGLDisplay eglDisplay, const uint width, const int height) {
-  const AHardwareBuffer_Desc desc{
-      .width = static_cast<uint32_t>(width),
-      .height = static_cast<uint32_t>(height),
-      .layers = 1,
-      .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
-      .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
-               AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
-      .rfu0 = 0,
-      .rfu1 = 0};
+  const AHardwareBuffer_Desc desc{.width = static_cast<uint32_t>(width),
+                                  .height = static_cast<uint32_t>(height),
+                                  .layers = 1,
+                                  .format = kHardwareBufferFormat,
+                                  .usage = kHardwareBufferUsage,
+                                  .rfu0 = 0,
+                                  .rfu1 = 0};
 
   AHardwareBuffer* hwBufferPtr;
   int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
@@ -217,10 +218,17 @@
     const RequestSettings& requestSettings) {
   if (requestSettings.fpsRange.has_value()) {
     return std::chrono::nanoseconds(static_cast<uint64_t>(
-        1e9 / std::max(1, requestSettings.fpsRange->minFps)));
+        kOneSecondInNanos / std::max(1, requestSettings.fpsRange->minFps)));
   }
   return std::chrono::nanoseconds(
-      static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
+      static_cast<uint64_t>(kOneSecondInNanos / VirtualCameraDevice::kMinFps));
+}
+
+// Translate a frame duration into a fps value with triple decimal precision
+double nanosToFps(std::chrono::nanoseconds frameDuration) {
+  const double oneSecondInNanos = 1e9;
+  const double fpsNanos = oneSecondInNanos / frameDuration.count();
+  return fpsNanos;
 }
 
 }  // namespace
@@ -283,6 +291,7 @@
 
 void VirtualCameraRenderThread::requestTextureUpdate() {
   std::lock_guard<std::mutex> lock(mLock);
+  ALOGV("%s", __func__);
   // If queue is not empty, we don't need to set the mTextureUpdateRequested
   // flag, since the texture will be updated during ProcessCaptureRequestTask
   // processing anyway.
@@ -396,16 +405,20 @@
 
 void VirtualCameraRenderThread::processTask(
     const ProcessCaptureRequestTask& request) {
-  std::chrono::nanoseconds timestamp =
+  ALOGV("%s request frame number %d", __func__, request.getFrameNumber());
+  std::chrono::nanoseconds deviceTime =
       std::chrono::duration_cast<std::chrono::nanoseconds>(
           std::chrono::steady_clock::now().time_since_epoch());
   const std::chrono::nanoseconds lastAcquisitionTimestamp(
-      mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
+      mLastAcquisitionTimestampNanoseconds.exchange(deviceTime.count(),
                                                     std::memory_order_relaxed));
 
   if (request.getRequestSettings().fpsRange) {
+    ALOGV("%s request fps {%d,%d}", __func__,
+          request.getRequestSettings().fpsRange->minFps,
+          request.getRequestSettings().fpsRange->maxFps);
     int maxFps = std::max(1, request.getRequestSettings().fpsRange->maxFps);
-    timestamp = throttleRendering(maxFps, lastAcquisitionTimestamp, timestamp);
+    deviceTime = throttleRendering(maxFps, lastAcquisitionTimestamp, deviceTime);
   }
 
   // Calculate the maximal amount of time we can afford to wait for next frame.
@@ -416,7 +429,7 @@
       isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
                         : kMaxWaitFirstFrame;
   const std::chrono::nanoseconds elapsedDuration =
-      isFirstFrameDrawn ? timestamp - lastAcquisitionTimestamp : 0ns;
+      isFirstFrameDrawn ? deviceTime - lastAcquisitionTimestamp : 0ns;
 
   if (elapsedDuration < maxFrameDuration) {
     // We can afford to wait for next frame.
@@ -424,7 +437,7 @@
     // below returns immediatelly.
     bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
                                                             elapsedDuration);
-    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+    deviceTime = std::chrono::duration_cast<std::chrono::nanoseconds>(
         std::chrono::steady_clock::now().time_since_epoch());
     if (!gotNewFrame) {
       if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
@@ -442,14 +455,15 @@
           "%s: No new frame received on input surface after waiting for "
           "%" PRIu64 "ns, repeating last frame.",
           __func__,
-          static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
+          static_cast<uint64_t>(
+              (deviceTime - lastAcquisitionTimestamp).count()));
     }
-    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+    mLastAcquisitionTimestampNanoseconds.store(deviceTime.count(),
                                                std::memory_order_relaxed);
   }
   // Acquire new (most recent) image from the Surface.
   mEglSurfaceTexture->updateTexture();
-  std::chrono::nanoseconds captureTimestamp = timestamp;
+  std::chrono::nanoseconds captureTimestamp = deviceTime;
 
   if (flags::camera_timestamp_from_surface()) {
     std::chrono::nanoseconds surfaceTimestamp =
@@ -457,8 +471,11 @@
     if (surfaceTimestamp.count() > 0) {
       captureTimestamp = surfaceTimestamp;
     }
-    ALOGV("%s captureTimestamp:%lld timestamp:%lld", __func__,
-          captureTimestamp.count(), timestamp.count());
+    ALOGV(
+        "%s surfaceTimestamp:%lld deviceTime:%lld captureTimestamp:%lld "
+        "(nanos)",
+        __func__, surfaceTimestamp.count(), deviceTime.count(),
+        captureTimestamp.count());
   }
 
   std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
@@ -488,11 +505,12 @@
     // We're too fast for the configured maxFps, let's wait a bit.
     const std::chrono::nanoseconds sleepTime = minFrameDuration - frameDuration;
     ALOGV("Current frame duration would  be %" PRIu64
-          " ns corresponding to, "
+          " ns corresponding to %.3f Fps, "
           "sleeping for %" PRIu64
           " ns before updating texture to match maxFps %d",
           static_cast<uint64_t>(frameDuration.count()),
-          static_cast<uint64_t>(sleepTime.count()), maxFps);
+          nanosToFps(frameDuration), static_cast<uint64_t>(sleepTime.count()),
+          maxFps);
 
     std::this_thread::sleep_for(sleepTime);
     timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 67225c9..18bebde 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -38,6 +38,7 @@
 #include "android/binder_interface_utils.h"
 #include "android/binder_libbinder.h"
 #include "android/binder_status.h"
+#include "android/hardware_buffer.h"
 #include "binder/Status.h"
 #include "fmt/format.h"
 #include "util/EglDisplayContext.h"
@@ -213,6 +214,27 @@
   return ndk::ScopedAStatus::ok();
 }
 
+ndk::ScopedAStatus verifyHardwareBufferSupport() {
+  static constexpr AHardwareBuffer_Desc desc{
+      .width = static_cast<uint32_t>(kVgaWidth),
+      .height = static_cast<uint32_t>(kVgaHeight),
+      .layers = 1,
+      .format = kHardwareBufferFormat,
+      .usage = kHardwareBufferUsage,
+      .rfu0 = 0,
+      .rfu1 = 0};
+  if (AHardwareBuffer_isSupported(&desc)) {
+    return ndk::ScopedAStatus::ok();
+  }
+  ALOGE("%s: Hardware buffer allocation is unsupported for required formats",
+        __func__);
+  return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+      EX_UNSUPPORTED_OPERATION,
+      fmt::format("Cannot create virtual camera, because hardware buffer "
+                  "allocation is unsupported")
+          .c_str());
+}
+
 std::string createCameraId(const int32_t deviceId) {
   return kCameraIdPrefix + std::to_string(deviceId) + "_" +
          std::to_string(sNextIdNumericalPortion++);
@@ -255,12 +277,18 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
-  if (mVerifyEglExtensions) {
+  if (mCheckHardwareRequirements) {
     auto status = verifyRequiredEglExtensions();
     if (!status.isOk()) {
       *_aidl_return = false;
       return status;
     }
+
+    status = verifyHardwareBufferSupport();
+    if (!status.isOk()) {
+      *_aidl_return = false;
+      return status;
+    }
   }
 
   auto status = validateConfiguration(configuration);
@@ -492,7 +520,8 @@
       kDefaultDeviceId, &ret);
   if (!ret) {
     dprintf(err, "Failed to create test camera (error %d)\n", ret);
-    return ret;
+    mTestCameraToken.set(nullptr);
+    return EOPNOTSUPP;
   }
 
   dprintf(out, "Successfully registered test camera %s\n",
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index defa75b..3b4b06d 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -71,10 +71,10 @@
   binder_status_t handleShellCommand(int in, int out, int err, const char** args,
                                      uint32_t numArgs) override;
 
-  // Do not verify presence on required EGL extensions when registering virtual
-  // camera. Only to be used by unit tests.
-  void disableEglVerificationForTest() {
-    mVerifyEglExtensions = false;
+  // Do not check hardware requirements when registering virtual camera.
+  // Only to be used by unit tests.
+  void disableHardwareRequirementsCheck() {
+    mCheckHardwareRequirements = false;
   }
 
   // Default virtual device id (the host device id)
@@ -97,7 +97,7 @@
       EXCLUDES(mLock);
 
   std::shared_ptr<VirtualCameraProvider> mVirtualCameraProvider;
-  bool mVerifyEglExtensions = true;
+  bool mCheckHardwareRequirements = true;
   const PermissionsProxy& mPermissionProxy;
 
   std::mutex mLock;
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index a5921af..a01889a 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -94,9 +94,6 @@
 
 namespace {
 
-using metadata_ptr =
-    std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
-
 using namespace std::chrono_literals;
 
 // Size of request/result metadata fast message queue.
@@ -106,8 +103,7 @@
 // Maximum number of buffers to use per single stream.
 constexpr size_t kMaxStreamBuffers = 2;
 
-// Thumbnail size (0,0) correspods to disabling thumbnail.
-const Resolution kDefaultJpegThumbnailSize(0, 0);
+constexpr int kInvalidStreamId = -1;
 
 camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
     const RequestTemplate type) {
@@ -291,7 +287,7 @@
       .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
 }
 
-}  // namespace
+};  // namespace
 
 VirtualCameraSession::VirtualCameraSession(
     std::shared_ptr<VirtualCameraDevice> cameraDevice,
@@ -299,7 +295,8 @@
     std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
     : mCameraDevice(cameraDevice),
       mCameraDeviceCallback(cameraDeviceCallback),
-      mVirtualCameraClientCallback(virtualCameraClientCallback) {
+      mVirtualCameraClientCallback(virtualCameraClientCallback),
+      mCurrentInputStreamId(kInvalidStreamId) {
   mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
       kMetadataMsgQueueSize, false /* non blocking */);
   if (!mRequestMetadataQueue->isValid()) {
@@ -318,13 +315,15 @@
   {
     std::lock_guard<std::mutex> lock(mLock);
 
-    if (mVirtualCameraClientCallback != nullptr) {
-      mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
-    }
-
     if (mRenderThread != nullptr) {
+      mRenderThread->flush();
       mRenderThread->stop();
       mRenderThread = nullptr;
+
+      if (mVirtualCameraClientCallback != nullptr) {
+        mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
+      }
+      mCurrentInputStreamId = kInvalidStreamId;
     }
   }
 
@@ -357,7 +356,11 @@
   halStreams.resize(in_requestedConfiguration.streams.size());
 
   if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
-    ALOGE("%s: Requested stream configuration is not supported", __func__);
+    ALOGE(
+        "%s: Requested stream configuration is not supported, closing existing "
+        "session",
+        __func__);
+    close();
     return cameraStatus(Status::ILLEGAL_ARGUMENT);
   }
 
@@ -466,13 +469,11 @@
       // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
       return ndk::ScopedAStatus::fromServiceSpecificError(
           static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
-      ;
     default:
       ALOGE("%s: unknown request template type %d", __FUNCTION__,
             static_cast<int>(in_type));
       return ndk::ScopedAStatus::fromServiceSpecificError(
           static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
-      ;
   }
 }
 
@@ -518,7 +519,7 @@
 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
     const std::vector<CaptureRequest>& in_requests,
     const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
-  ALOGV("%s", __func__);
+  ALOGV("%s: request count: %zu", __func__, in_requests.size());
 
   if (!in_cachesToRemove.empty()) {
     mSessionContext.removeBufferCaches(in_cachesToRemove);
@@ -575,7 +576,7 @@
 
 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
     const CaptureRequest& request) {
-  ALOGV("%s: request: %s", __func__, request.toString().c_str());
+  ALOGV("%s: CaptureRequest { frameNumber:%d }", __func__, request.frameNumber);
 
   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
   RequestSettings requestSettings;
diff --git a/services/camera/virtualcamera/aidl/Android.bp b/services/camera/virtualcamera/aidl/Android.bp
index b3fe3ad..b3c0bce 100644
--- a/services/camera/virtualcamera/aidl/Android.bp
+++ b/services/camera/virtualcamera/aidl/Android.bp
@@ -1,5 +1,5 @@
 package {
-    default_team: "trendy_team_xr_framework",
+    default_team: "trendy_team_virtual_device_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
index 5fa8852..fc72e22 100644
--- a/services/camera/virtualcamera/flags/Android.bp
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -1,5 +1,5 @@
 package {
-    default_team: "trendy_team_xr_framework",
+    default_team: "trendy_team_virtual_device_framework",
 }
 
 soong_config_module_type {
diff --git a/services/camera/virtualcamera/fuzzer/Android.bp b/services/camera/virtualcamera/fuzzer/Android.bp
index 6a72167..6b8d9cb 100644
--- a/services/camera/virtualcamera/fuzzer/Android.bp
+++ b/services/camera/virtualcamera/fuzzer/Android.bp
@@ -16,7 +16,7 @@
  *
  *****************************************************************************/
 package {
-    default_team: "trendy_team_xr_framework",
+    default_team: "trendy_team_virtual_device_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/services/camera/virtualcamera/fuzzer/virtual_camera_fuzzer.cc b/services/camera/virtualcamera/fuzzer/virtual_camera_fuzzer.cc
index ebd5e73..df3f0f1 100644
--- a/services/camera/virtualcamera/fuzzer/virtual_camera_fuzzer.cc
+++ b/services/camera/virtualcamera/fuzzer/virtual_camera_fuzzer.cc
@@ -28,6 +28,8 @@
 using ndk::SharedRefBase;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  // TODO(b/183141167): need to rewrite 'dump' to avoid SIGPIPE.
+  signal(SIGPIPE, SIG_IGN);
   std::shared_ptr<VirtualCameraProvider> defaultProvider =
       SharedRefBase::make<VirtualCameraProvider>();
 
diff --git a/services/camera/virtualcamera/tests/Android.bp b/services/camera/virtualcamera/tests/Android.bp
index 543cc10..f67f2b3 100644
--- a/services/camera/virtualcamera/tests/Android.bp
+++ b/services/camera/virtualcamera/tests/Android.bp
@@ -1,5 +1,5 @@
 package {
-    default_team: "trendy_team_xr_framework",
+    default_team: "trendy_team_virtual_device_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index 719f64d..72e03de 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -133,7 +133,7 @@
     mCameraProvider->setCallback(mMockCameraProviderCallback);
     mCameraService = ndk::SharedRefBase::make<VirtualCameraService>(
         mCameraProvider, mMockPermissionsProxy);
-    mCameraService->disableEglVerificationForTest();
+    mCameraService->disableHardwareRequirementsCheck();
 
     ON_CALL(mMockPermissionsProxy, checkCallingPermission)
         .WillByDefault(Return(true));
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index a9eb413..1494643 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -220,6 +220,16 @@
 }
 
 TEST_F(VirtualCameraSessionTest, CloseTriggersClientTerminateCallback) {
+  // First, configure a stream.
+  PixelFormat format = PixelFormat::YCBCR_420_888;
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {
+      createStream(kStreamId, kVgaWidth, kVgaHeight, format)};
+  std::vector<HalStream> halStreams;
+  ASSERT_TRUE(
+      mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+
   EXPECT_CALL(*mMockVirtualCameraClientCallback, onStreamClosed(kStreamId))
       .WillOnce(Return(ndk::ScopedAStatus::ok()));
 
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index f99b965..44da9b4 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -15,21 +15,20 @@
  */
 
 // #define LOG_NDEBUG 0
-#include <chrono>
-
-#include "utils/Timers.h"
 #define LOG_TAG "EglSurfaceTexture"
 
+#include "EglSurfaceTexture.h"
+
 #include <GLES/gl.h>
 #include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferQueue.h>
 #include <gui/GLConsumer.h>
-#include <gui/IGraphicBufferProducer.h>
 #include <hardware/gralloc.h>
 
+#include <chrono>
 #include <cstdint>
+#include <mutex>
 
-#include "EglSurfaceTexture.h"
 #include "EglUtil.h"
 
 namespace android {
@@ -40,22 +39,27 @@
 // Maximal number of buffers producer can dequeue without blocking.
 constexpr int kBufferProducerMaxDequeueBufferCount = 64;
 
-class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
- public:
-  FrameAvailableListenerProxy(const std::function<void()>& callback)
-      : mOnFrameAvailableCallback(callback) {
-  }
+}  // namespace
 
-  virtual void onFrameAvailable(const BufferItem&) override {
-    ALOGV("%s: onFrameAvailable", __func__);
+EglSurfaceTexture::FrameAvailableListenerProxy::FrameAvailableListenerProxy(
+    EglSurfaceTexture* surface)
+    : mSurface(*surface) {
+}
+
+void EglSurfaceTexture::FrameAvailableListenerProxy::setCallback(
+    const std::function<void()>& callback) {
+  mOnFrameAvailableCallback = callback;
+}
+
+void EglSurfaceTexture::FrameAvailableListenerProxy::onFrameAvailable(
+    const BufferItem&) {
+  long frameNumber = mSurface.mGlConsumer->getFrameNumber();
+  ALOGV("%s: onFrameAvailable frameNumber %ld", __func__, frameNumber);
+  mSurface.mFrameAvailableCondition.notify_all();
+  if (mOnFrameAvailableCallback) {
     mOnFrameAvailableCallback();
   }
-
- private:
-  std::function<void()> mOnFrameAvailableCallback;
-};
-
-}  // namespace
+}
 
 EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
     : mWidth(width), mHeight(height) {
@@ -65,31 +69,17 @@
     return;
   }
 
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-  mGlConsumer = sp<GLConsumer>::make(mTextureId, GLConsumer::TEXTURE_EXTERNAL,
-                                     false, false);
+  std::tie(mGlConsumer, mSurface) = GLConsumer::create(
+      mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
   mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
   mGlConsumer->setDefaultBufferSize(mWidth, mHeight);
   mGlConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_TEXTURE);
   mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
 
-  mSurface = mGlConsumer->getSurface();
   mSurface->setMaxDequeuedBufferCount(kBufferProducerMaxDequeueBufferCount);
-#else
-  BufferQueue::createBufferQueue(&mBufferProducer, &mBufferConsumer);
-  // Set max dequeue buffer count for producer to maximal value to prevent
-  // blocking when dequeuing input buffers.
-  mBufferProducer->setMaxDequeuedBufferCount(
-      kBufferProducerMaxDequeueBufferCount);
-  mGlConsumer = sp<GLConsumer>::make(
-      mBufferConsumer, mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
-  mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
-  mGlConsumer->setDefaultBufferSize(mWidth, mHeight);
-  mGlConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_TEXTURE);
-  mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
 
-  mSurface = sp<Surface>::make(mBufferProducer);
-#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+  mFrameAvailableListenerProxy = sp<FrameAvailableListenerProxy>::make(this);
+  mGlConsumer->setFrameAvailableListener(mFrameAvailableListenerProxy);
 }
 
 EglSurfaceTexture::~EglSurfaceTexture() {
@@ -108,17 +98,29 @@
 
 void EglSurfaceTexture::setFrameAvailableListener(
     const std::function<void()>& listener) {
-  mFrameAvailableListener =
-      sp<FrameAvailableListenerProxy>::make([this, listener]() {
-        mIsFirstFrameDrawn.store(true);
-        listener();
-      });
-  mGlConsumer->setFrameAvailableListener(mFrameAvailableListener);
+  mFrameAvailableListenerProxy->setCallback(listener);
 }
 
 bool EglSurfaceTexture::waitForNextFrame(const std::chrono::nanoseconds timeout) {
-  return mSurface->waitForNextFrame(mGlConsumer->getFrameNumber(),
-                                    static_cast<nsecs_t>(timeout.count()));
+  std::unique_lock<std::mutex> lock(mWaitForFrameMutex);
+  mGlConsumer->updateTexImage();
+  const long lastRenderedFrame = mGlConsumer->getFrameNumber();
+  const long lastWaitedForFrame = mLastWaitedFrame.exchange(lastRenderedFrame);
+  ALOGV("%s lastRenderedFrame:%ld lastWaitedForFrame: %ld", __func__,
+        lastRenderedFrame, lastWaitedForFrame);
+  if (lastRenderedFrame > lastWaitedForFrame) {
+    return true;
+  }
+  ALOGV(
+      "%s waiting for max %lld ns. Last waited frame:%ld, last rendered "
+      "frame:%ld",
+      __func__, timeout.count(), lastWaitedForFrame, lastRenderedFrame);
+  return mFrameAvailableCondition.wait_for(lock, timeout, [this]() {
+    // Call updateTexImage to update the frame number.
+    mGlConsumer->updateTexImage();
+    const long lastRenderedFrame = mGlConsumer->getFrameNumber();
+    return lastRenderedFrame > mLastWaitedFrame.exchange(lastRenderedFrame);
+  });
 }
 
 std::chrono::nanoseconds EglSurfaceTexture::getTimestamp() {
@@ -126,7 +128,7 @@
 }
 
 bool EglSurfaceTexture::isFirstFrameDrawn() {
-  return mIsFirstFrameDrawn.load();
+  return mGlConsumer->getFrameNumber() > 0;
 }
 
 GLuint EglSurfaceTexture::updateTexture() {
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index 8b4d45e..dabeaf0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -22,16 +22,13 @@
 #include <gui/Surface.h>
 #include <utils/RefBase.h>
 
+#include <atomic>
 #include <chrono>
+#include <condition_variable>
 #include <cstdint>
 
 namespace android {
 
-#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-class IGraphicBufferProducer;
-class IGraphicBufferConsumer;
-#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-
 class GLConsumer;
 
 namespace companion {
@@ -88,18 +85,31 @@
   // Returns true is a frame has ever been drawn on this surface.
   bool isFirstFrameDrawn();
 
+  class FrameAvailableListenerProxy
+      : public ConsumerBase::FrameAvailableListener {
+   public:
+    FrameAvailableListenerProxy(EglSurfaceTexture* surface);
+
+    void setCallback(const std::function<void()>& callback);
+
+    virtual void onFrameAvailable(const BufferItem&) override;
+
+   private:
+    EglSurfaceTexture& mSurface;
+    std::function<void()> mOnFrameAvailableCallback;
+  };
+
  private:
-#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-  sp<IGraphicBufferProducer> mBufferProducer;
-  sp<IGraphicBufferConsumer> mBufferConsumer;
-#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<GLConsumer> mGlConsumer;
   sp<Surface> mSurface;
   GLuint mTextureId;
   const uint32_t mWidth;
   const uint32_t mHeight;
-  std::atomic_bool mIsFirstFrameDrawn = false;
+  std::atomic_long mLastWaitedFrame = 0;
+  sp<FrameAvailableListenerProxy> mFrameAvailableListenerProxy;
   sp<ConsumerBase::FrameAvailableListener> mFrameAvailableListener;
+  std::condition_variable mFrameAvailableCondition;
+  std::mutex mWaitForFrameMutex;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index 291e105..2225a4b 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -35,6 +35,10 @@
 namespace companion {
 namespace virtualcamera {
 
+constexpr int kHardwareBufferUsage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
+                                     AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN;
+constexpr int kHardwareBufferFormat = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
+
 // RAII utility class to safely lock AHardwareBuffer and obtain android_ycbcr
 // structure describing YUV plane layout.
 //
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
deleted file mode 100644
index 7a4c3ad..0000000
--- a/services/medialog/Android.bp
+++ /dev/null
@@ -1,38 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_license"],
-}
-
-cc_library {
-    name: "libmedialogservice",
-
-    srcs: [
-        "IMediaLogService.cpp",
-        "MediaLogService.cpp",
-    ],
-
-    header_libs: [
-        "libmedia_headers",
-    ],
-
-    shared_libs: [
-        "libaudioutils",
-        "libbinder",
-        "liblog",
-        "libmediautils",
-        "libnblog",
-        "libutils",
-        "packagemanager_aidl-cpp",
-    ],
-
-    export_include_dirs: ["."],
-
-    cflags: [
-        "-Wall",
-        "-Werror",
-    ],
-}
diff --git a/services/medialog/IMediaLogService.cpp b/services/medialog/IMediaLogService.cpp
deleted file mode 100644
index 0e9b01e..0000000
--- a/services/medialog/IMediaLogService.cpp
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IMediaLogService"
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <media/IMediaLogService.h>
-
-namespace android {
-
-enum {
-    REGISTER_WRITER = IBinder::FIRST_CALL_TRANSACTION,
-    UNREGISTER_WRITER,
-    REQUEST_MERGE_WAKEUP,
-};
-
-class BpMediaLogService : public BpInterface<IMediaLogService>
-{
-public:
-    explicit BpMediaLogService(const sp<IBinder>& impl)
-        : BpInterface<IMediaLogService>(impl)
-    {
-    }
-
-    virtual void    registerWriter(const sp<IMemory>& shared, size_t size, const char *name) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(shared));
-        data.writeInt64((int64_t) size);
-        data.writeCString(name);
-        status_t status __unused = remote()->transact(REGISTER_WRITER, data, &reply);
-        // FIXME ignores status
-    }
-
-    virtual void    unregisterWriter(const sp<IMemory>& shared) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(shared));
-        status_t status __unused = remote()->transact(UNREGISTER_WRITER, data, &reply);
-        // FIXME ignores status
-    }
-
-    virtual void    requestMergeWakeup() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor());
-        status_t status __unused = remote()->transact(REQUEST_MERGE_WAKEUP, data, &reply);
-        // FIXME ignores status
-    }
-
-};
-
-IMPLEMENT_META_INTERFACE(MediaLogService, "android.media.IMediaLogService");
-
-// ----------------------------------------------------------------------
-
-status_t BnMediaLogService::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-
-        case REGISTER_WRITER: {
-            CHECK_INTERFACE(IMediaLogService, data, reply);
-            sp<IMemory> shared = interface_cast<IMemory>(data.readStrongBinder());
-            size_t size = (size_t) data.readInt64();
-            const char *name = data.readCString();
-            registerWriter(shared, size, name);
-            return NO_ERROR;
-        }
-
-        case UNREGISTER_WRITER: {
-            CHECK_INTERFACE(IMediaLogService, data, reply);
-            sp<IMemory> shared = interface_cast<IMemory>(data.readStrongBinder());
-            unregisterWriter(shared);
-            return NO_ERROR;
-        }
-
-        case REQUEST_MERGE_WAKEUP: {
-            CHECK_INTERFACE(IMediaLogService, data, reply);
-            requestMergeWakeup();
-            return NO_ERROR;
-        }
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp
deleted file mode 100644
index abe5f16..0000000
--- a/services/medialog/MediaLogService.cpp
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "MediaLog"
-//#define LOG_NDEBUG 0
-
-#include <sys/mman.h>
-#include <utils/Log.h>
-#include <binder/PermissionCache.h>
-#include <media/nblog/Merger.h>
-#include <media/nblog/NBLog.h>
-#include <mediautils/ServiceUtilities.h>
-#include "MediaLogService.h"
-
-namespace android {
-
-static const char kDeadlockedString[] = "MediaLogService may be deadlocked\n";
-
-// mMerger, mMergeReader, and mMergeThread all point to the same location in memory
-// mMergerShared. This is the local memory FIFO containing data merged from all
-// individual thread FIFOs in shared memory. mMergeThread is used to periodically
-// call NBLog::Merger::merge() to collect the data and write it to the FIFO, and call
-// NBLog::MergeReader::getAndProcessSnapshot to process the merged data.
-MediaLogService::MediaLogService() :
-    BnMediaLogService(),
-    mMergerShared((NBLog::Shared*) malloc(NBLog::Timeline::sharedSize(kMergeBufferSize))),
-    mMerger(mMergerShared, kMergeBufferSize),
-    mMergeReader(mMergerShared, kMergeBufferSize, mMerger),
-    mMergeThread(new NBLog::MergeThread(mMerger, mMergeReader))
-{
-    mMergeThread->run("MergeThread");
-}
-
-MediaLogService::~MediaLogService()
-{
-    mMergeThread->requestExit();
-    mMergeThread->setTimeoutUs(0);
-    mMergeThread->join();
-    free(mMergerShared);
-}
-
-void MediaLogService::registerWriter(const sp<IMemory>& shared, size_t size, const char *name)
-{
-    if (!isAudioServerOrMediaServerUid(IPCThreadState::self()->getCallingUid()) || shared == 0 ||
-            size < kMinSize || size > kMaxSize || name == NULL ||
-            shared->size() < NBLog::Timeline::sharedSize(size)) {
-        return;
-    }
-    sp<NBLog::Reader> reader(new NBLog::Reader(shared, size, name)); // Reader handled by merger
-    sp<NBLog::DumpReader> dumpReader(new NBLog::DumpReader(shared, size, name)); // for dumpsys
-    Mutex::Autolock _l(mLock);
-    mDumpReaders.add(dumpReader);
-    mMerger.addReader(reader);
-}
-
-void MediaLogService::unregisterWriter(const sp<IMemory>& shared)
-{
-    if (!isAudioServerOrMediaServerUid(IPCThreadState::self()->getCallingUid()) || shared == 0) {
-        return;
-    }
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mDumpReaders.size(); ) {
-        if (mDumpReaders[i]->isIMemory(shared)) {
-            mDumpReaders.removeAt(i);
-            // TODO mMerger.removeReaders(shared)
-        } else {
-            i++;
-        }
-    }
-}
-
-bool MediaLogService::dumpTryLock(Mutex& mutex)
-{
-    bool locked = false;
-    for (int i = 0; i < kDumpLockRetries; ++i) {
-        if (mutex.tryLock() == NO_ERROR) {
-            locked = true;
-            break;
-        }
-        usleep(kDumpLockSleepUs);
-    }
-    return locked;
-}
-
-status_t MediaLogService::dump(int fd, const Vector<String16>& args __unused)
-{
-    if (!(isAudioServerOrMediaServerUid(IPCThreadState::self()->getCallingUid())
-            || dumpAllowed())) {
-        dprintf(fd, "Permission Denial: can't dump media.log from pid=%d, uid=%d\n",
-                IPCThreadState::self()->getCallingPid(),
-                IPCThreadState::self()->getCallingUid());
-        return NO_ERROR;
-    }
-
-    if (args.size() > 0) {
-        const String8 arg0(args[0]);
-        if (!strcmp(arg0.c_str(), "-r")) {
-            // needed because mReaders is protected by mLock
-            bool locked = dumpTryLock(mLock);
-
-            // failed to lock - MediaLogService is probably deadlocked
-            if (!locked) {
-                String8 result(kDeadlockedString);
-                if (fd >= 0) {
-                    write(fd, result.c_str(), result.size());
-                } else {
-                    ALOGW("%s:", result.c_str());
-                }
-                return NO_ERROR;
-            }
-
-            for (const auto &dumpReader : mDumpReaders) {
-                if (fd >= 0) {
-                    dprintf(fd, "\n%s:\n", dumpReader->name().c_str());
-                    dumpReader->dump(fd, 0 /*indent*/);
-                } else {
-                    ALOGI("%s:", dumpReader->name().c_str());
-                }
-            }
-            mLock.unlock();
-        } else {
-            mMergeReader.dump(fd, args);
-        }
-    }
-    return NO_ERROR;
-}
-
-status_t MediaLogService::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
-        uint32_t flags)
-{
-    return BnMediaLogService::onTransact(code, data, reply, flags);
-}
-
-void MediaLogService::requestMergeWakeup() {
-    mMergeThread->wakeup();
-}
-
-}   // namespace android
diff --git a/services/medialog/MediaLogService.h b/services/medialog/MediaLogService.h
deleted file mode 100644
index 21df898..0000000
--- a/services/medialog/MediaLogService.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_MEDIA_LOG_SERVICE_H
-#define ANDROID_MEDIA_LOG_SERVICE_H
-
-#include <binder/BinderService.h>
-#include <media/IMediaLogService.h>
-#include <media/nblog/Merger.h>
-#include <media/nblog/NBLog.h>
-
-namespace android {
-
-class MediaLogService : public BinderService<MediaLogService>, public BnMediaLogService
-{
-    friend class BinderService<MediaLogService>;    // for MediaLogService()
-public:
-    MediaLogService();
-    virtual ~MediaLogService() override;
-    virtual void onFirstRef() { }
-
-    static const char*  getServiceName() { return "media.log"; }
-
-    static const size_t kMinSize = 0x100;
-    static const size_t kMaxSize = 0x10000;
-    virtual void        registerWriter(const sp<IMemory>& shared, size_t size, const char *name);
-    virtual void        unregisterWriter(const sp<IMemory>& shared);
-
-    virtual status_t    dump(int fd, const Vector<String16>& args);
-    virtual status_t    onTransact(uint32_t code, const Parcel& data, Parcel* reply,
-                                uint32_t flags);
-
-    virtual void        requestMergeWakeup() override;
-
-private:
-
-    // Internal dump
-    static const int kDumpLockRetries = 50;
-    static const int kDumpLockSleepUs = 20000;
-    // Size of merge buffer, in bytes
-    static const size_t kMergeBufferSize = 64 * 1024; // TODO determine good value for this
-    static bool dumpTryLock(Mutex& mutex);
-
-    Mutex               mLock;
-
-    Vector<sp<NBLog::DumpReader>> mDumpReaders;   // protected by mLock
-
-    // FIXME Need comments on all of these, especially about locking
-    NBLog::Shared *mMergerShared;
-    NBLog::Merger mMerger;
-    NBLog::MergeReader mMergeReader;
-    const sp<NBLog::MergeThread> mMergeThread;
-};
-
-}   // namespace android
-
-#endif  // ANDROID_MEDIA_LOG_SERVICE_H
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
deleted file mode 100644
index 6b4ee5f..0000000
--- a/services/medialog/fuzzer/Android.bp
+++ /dev/null
@@ -1,51 +0,0 @@
-package {
-    default_team: "trendy_team_media_framework_audio",
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_license"],
-}
-
-cc_fuzz {
-    name: "media_log_fuzzer",
-    static_libs: [
-        "libmedialogservice",
-    ],
-    srcs: [
-        "media_log_fuzzer.cpp",
-    ],
-    header_libs: [
-        "libmedia_headers",
-    ],
-    shared_libs: [
-        "libaudioutils",
-        "libbinder",
-        "liblog",
-        "libmediautils",
-        "libnblog",
-        "libutils",
-    ],
-    include_dirs: [
-        "frameworks/av/services/medialog",
-    ],
-    cflags: [
-        "-Wall",
-        "-Werror",
-    ],
-    fuzz_config: {
-        cc: [
-            "android-audio-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-        hotlists: [
-            "4593311",
-        ],
-        description: "The fuzzer targets the APIs of libmedialogservice library",
-        vector: "local_privileges_required",
-        service_privilege: "constrained",
-        users: "multi_user",
-        fuzzed_code_usage: "future_version",
-    },
-}
diff --git a/services/medialog/fuzzer/README.md b/services/medialog/fuzzer/README.md
deleted file mode 100644
index b79e5c8..0000000
--- a/services/medialog/fuzzer/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-# Fuzzer for libmedialogservice
-
-## Plugin Design Considerations
-The fuzzer plugin for libmedialogservice is designed based on the understanding of the
-service and tries to achieve the following:
-
-##### Maximize code coverage
-The configuration parameters are not hardcoded, but instead selected based on
-incoming data. This ensures more code paths are reached by the fuzzer.
-
-medialogservice supports the following parameters:
-1. Writer name (parameter name: `writerNameIdx`)
-2. Log size (parameter name: `logSize`)
-3. Enable dump before unrgister API (parameter name: `shouldDumpBeforeUnregister`)
-5. size of string for log dump (parameter name: `numberOfLines`)
-
-| Parameter| Valid Values| Configured Value|
-|------------- |-------------| ----- |
-| `writerNameIdx` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
-| `logSize` | In the range `256 to 65536` | Value obtained from FuzzedDataProvider |
-| `shouldDumpBeforeUnregister` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
-| `numberOfLines` | In the range `0 to 65535` | Value obtained from FuzzedDataProvider |
-
-This also ensures that the plugin is always deterministic for any given input.
-
-## Build
-
-This describes steps to build media_log_fuzzer binary.
-
-### Android
-
-#### Steps to build
-Build the fuzzer
-```
-  $ mm -j$(nproc) media_log_fuzzer
-```
-
-#### Steps to run
-Create a directory CORPUS_DIR and copy some files to that folder
-Push this directory to device.
-
-To run on device
-```
-  $ adb sync data
-  $ adb shell /data/fuzz/arm64/media_log_fuzzer/media_log_fuzzer CORPUS_DIR
-```
-
-## References:
- * http://llvm.org/docs/LibFuzzer.html
- * https://github.com/google/oss-fuzz
diff --git a/services/medialog/fuzzer/media_log_fuzzer.cpp b/services/medialog/fuzzer/media_log_fuzzer.cpp
deleted file mode 100644
index bd50d0f..0000000
--- a/services/medialog/fuzzer/media_log_fuzzer.cpp
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include <binder/IMemory.h>
-#include <binder/MemoryDealer.h>
-#include <private/android_filesystem_config.h>
-#include "MediaLogService.h"
-#include "fuzzer/FuzzedDataProvider.h"
-
-constexpr const char* kWriterNames[2] = {"FastMixer", "FastCapture"};
-constexpr size_t kMinSize = 0x100;
-constexpr size_t kMaxSize = 0x10000;
-constexpr size_t kLogMemorySize = 400 * 1024;
-constexpr size_t kMaxNumLines = USHRT_MAX;
-
-using namespace android;
-
-class MediaLogFuzzer {
-   public:
-    void init();
-    void process(const uint8_t* data, size_t size);
-
-   private:
-    sp<MemoryDealer> mMemoryDealer = nullptr;
-    sp<MediaLogService> mService = nullptr;
-};
-
-void MediaLogFuzzer::init() {
-    setuid(AID_MEDIA);
-    mService = new MediaLogService();
-    mMemoryDealer = new MemoryDealer(kLogMemorySize, "MediaLogFuzzer", MemoryHeapBase::READ_ONLY);
-}
-
-void MediaLogFuzzer::process(const uint8_t* data, size_t size) {
-    FuzzedDataProvider fuzzedDataProvider(data, size);
-    size_t writerNameIdx =
-        fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, std::size(kWriterNames) - 1);
-    bool shouldDumpBeforeUnregister = fuzzedDataProvider.ConsumeBool();
-    size_t logSize = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
-    sp<IMemory> logBuffer = mMemoryDealer->allocate(NBLog::Timeline::sharedSize(logSize));
-    Vector<String16> args;
-    size_t numberOfLines = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
-    for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
-        args.add(static_cast<String16>(fuzzedDataProvider.ConsumeRandomLengthString().c_str()));
-    }
-    const char* fileName = "logDumpFile";
-    int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
-    fuzzedDataProvider.ConsumeData(logBuffer->unsecurePointer(), logBuffer->size());
-    mService->registerWriter(logBuffer, logSize, kWriterNames[writerNameIdx]);
-    if (shouldDumpBeforeUnregister) {
-        mService->dump(fd, args);
-        mService->unregisterWriter(logBuffer);
-    } else {
-        mService->unregisterWriter(logBuffer);
-        mService->dump(fd, args);
-    }
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
-    MediaLogFuzzer mediaLogFuzzer = MediaLogFuzzer();
-    mediaLogFuzzer.init();
-    mediaLogFuzzer.process(data, size);
-    return 0;
-}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index c01d46e..6c2424d 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -315,7 +315,9 @@
     int result = 0;
 
 #ifdef STATSD_ENABLE
-    result = stats::media_metrics::stats_write(args...);
+    if (__builtin_available(android 33, *)) {
+        result = stats::media_metrics::stats_write(args...);
+    }
 #endif
     return result;
 }
@@ -331,7 +333,9 @@
     std::stringstream ss;
 
 #ifdef STATSD_ENABLE
-    result = stats::media_metrics::stats_write(args...);
+    if (__builtin_available(android 33, *)) {
+        result = stats::media_metrics::stats_write(args...);
+    }
     ss << "result:" << result;
 #endif
     ss << " { ";
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 095832c..253fc0a 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -174,7 +174,11 @@
     const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
     const int32_t min_volume_duration_secs = (int32_t)(min_volume_duration_ns / NANOS_PER_SECOND);
     const int32_t max_volume_duration_secs = (int32_t)(max_volume_duration_ns / NANOS_PER_SECOND);
-    const int result = stats::media_metrics::stats_write(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED,
+    int result = 0;
+
+    if (__builtin_available(android 33, *)) {
+        result = stats::media_metrics::stats_write(
+                                         stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED,
                                          audio_device,
                                          duration_secs,
                                          (float)volume,
@@ -183,6 +187,7 @@
                                          (float)min_volume,
                                          max_volume_duration_secs,
                                          (float)max_volume);
+    }
 
     std::stringstream log;
     log << "result:" << result << " {"
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 9a9bc1d..37b44f7 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -108,11 +108,14 @@
     }
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
+    }
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiopolicy_reported:"
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 63c61ec..008f12f 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -112,7 +112,7 @@
 
     int32_t port_id = -1;
     if (item->getInt32("android.media.audiorecord.portId", &port_id)) {
-        metrics_proto.set_port_id(count);
+        metrics_proto.set_port_id(port_id);
     }
 
     int32_t frame_count = -1;
@@ -148,12 +148,15 @@
     const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized,
         log_session_id.c_str());
+    }
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiorecord_reported:"
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index 3056605..351a8bc 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -189,11 +189,14 @@
     }
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
+    }
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiothread_reported:"
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 1fc7fb4..944c616 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -135,12 +135,15 @@
     const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+        result = stats::media_metrics::stats_write(
                                stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED,
                                timestamp_nanos, package_name.c_str(), package_version_code,
                                media_apex_version,
                                bf_serialized,
                                log_session_id.c_str());
+    }
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiotrack_reported:"
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 2f7c4f9..892db2a 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -760,7 +760,9 @@
         parseVector(judderScoreHistogramStr, &statsJudderScoreHistogram);
         std::vector<int32_t> statsJudderScoreHistogramBuckets;
         parseVector(judderScoreHistogramBucketsStr, &statsJudderScoreHistogramBuckets);
-        int result = stats_write(
+        int result = -1;
+        if (__builtin_available(android 33, *)) {
+          result = stats_write(
             MEDIA_CODEC_RENDERED,
             statsUid,
             statsCodecId,
@@ -794,6 +796,7 @@
             statsJudderRate,
             statsJudderScoreHistogram,
             statsJudderScoreHistogramBuckets);
+        }
         ALOGE_IF(result < 0, "Failed to record MEDIA_CODEC_RENDERED atom (%d)", result);
     }
 
@@ -803,10 +806,13 @@
         return false;
     }
     const stats::media_metrics::BytesField bf_serialized(serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+        result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
                                timestampNanos, packageName.c_str(), packageVersionCode,
                                mediaApexVersion,
                                bf_serialized);
+    }
 
     std::stringstream log;
     log << "result:" << result << " {"
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index e5f7190..d78cfc0 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -72,13 +72,16 @@
     // This field is not used anymore.
     const std::string  kUnusedField("");
     const stats::media_metrics::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         vendor.c_str(),
         description.c_str(),
         bf_serialized);
+    }
 
     std::stringstream log;
     log << "result:" << result << " {"
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 9345df6..46e33dc 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -97,11 +97,14 @@
     }
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
+    }
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_extractor_reported:"
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 458bd32..f1ed340 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -83,7 +83,9 @@
     item->getString("android.media.mediaparser.logSessionId", &logSessionId);
     logSessionId = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+        result = stats::media_metrics::stats_write(
                                stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED,
                                timestamp_nanos,
                                package_name.c_str(),
@@ -100,6 +102,7 @@
                                videoWidth,
                                videoHeight,
                                logSessionId.c_str());
+    }
 
     std::stringstream log;
     log << "result:" << result << " {"
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index fd545f4..628a9f2 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -154,11 +154,15 @@
     }
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+        // RBE this could be expanded to include the bf_serialized and such too.
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
+    }
 
     std::stringstream log;
     log << "result:" << result << " {"
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index efa284b..1bae826 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -180,11 +180,14 @@
     }
 
     const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    const int result = stats::media_metrics::stats_write(
+    int result = 0;
+    if (__builtin_available(android 33, *)) {
+      result = stats::media_metrics::stats_write(
         stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
+    }
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_recorder_reported:"
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 7f66859..9509ccb 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -102,10 +102,12 @@
         "libstatssocket",
         "libprotobuf-cpp-lite",
         "libactivitymanager_aidl",
+        "libaconfig_storage_read_api_cc",
         "aconfig_mediacodec_flags_c_lib",
     ],
 
     static_libs: [
+        "android.media.codec-aconfig-cc",
         "resourceobserver_aidl_interface-V1-ndk",
         "libplatformprotos",
     ],
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index f12a5d6..3704c5e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -1105,6 +1105,24 @@
     return Status::ok();
 }
 
+Status ResourceManagerService::getMediaResourceUsageReport(
+        std::vector<MediaResourceParcel>* resources) {
+    // Not implemented
+    if (resources) {
+        resources->clear();
+    }
+    return Status::ok();
+}
+
+Status ResourceManagerService::updateResource(const ClientInfoParcel& clientInfo,
+                                              const std::vector<MediaResourceParcel>& resources) {
+    (void)clientInfo;
+    (void)resources;
+
+    // Not implemented
+    return Status::ok();
+}
+
 long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
     return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
 }
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index dc1600a..017fe29 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -83,6 +83,9 @@
                        const std::shared_ptr<IResourceManagerClient>& client,
                        const std::vector<MediaResourceParcel>& resources) override;
 
+    Status updateResource(const ClientInfoParcel& clientInfo,
+                          const std::vector<MediaResourceParcel>& resources) override;
+
     Status removeResource(const ClientInfoParcel& clientInfo,
                           const std::vector<MediaResourceParcel>& resources) override;
 
@@ -112,6 +115,8 @@
 
     Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
 
+    Status getMediaResourceUsageReport(std::vector<MediaResourceParcel>* resources) override;
+
 protected:
     // To get notifications when a resource is added for the first time.
     void onFirstAdded(const MediaResourceParcel& res, uid_t uid);
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
index 0a0a8f4..35eb0de 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
@@ -21,6 +21,8 @@
 #include <binder/IPCThreadState.h>
 #include <mediautils/ProcessInfo.h>
 
+#include <android_media_codec.h>
+
 #include "DefaultResourceModel.h"
 #include "ClientImportanceReclaimPolicy.h"
 #include "ProcessPriorityReclaimPolicy.h"
@@ -218,11 +220,45 @@
     {
         // Update the ResourceTracker about the change in the configuration.
         std::scoped_lock lock{mLock};
-        mResourceTracker->updateResource(clientConfig.clientInfo);
+        mResourceTracker->updateClientImportance(clientConfig.clientInfo);
     }
     return ResourceManagerService::notifyClientConfigChanged(clientConfig);
 }
 
+Status ResourceManagerServiceNew::getMediaResourceUsageReport(
+        std::vector<MediaResourceParcel>* resources) {
+    if (!resources) {
+        return Status::fromStatus(INVALID_OPERATION);
+    }
+
+    resources->clear();
+    if (!android::media::codec::codec_availability() ||
+        !android::media::codec::codec_availability_support()) {
+        return Status::fromStatus(INVALID_OPERATION);
+    }
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->getMediaResourceUsageReport(resources);
+
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::updateResource(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("updateResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).c_str());
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->updateResource(clientInfo, resources);
+
+    return Status::ok();
+}
+
 void ResourceManagerServiceNew::getResourceDump(std::string& resourceLog) const {
     std::scoped_lock lock{mLock};
     mResourceTracker->dump(resourceLog);
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.h b/services/mediaresourcemanager/ResourceManagerServiceNew.h
index 0599936..54986b9 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceNew.h
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.h
@@ -52,6 +52,9 @@
                        const std::shared_ptr<IResourceManagerClient>& client,
                        const std::vector<MediaResourceParcel>& resources) override;
 
+    Status updateResource(const ClientInfoParcel& clientInfo,
+                          const std::vector<MediaResourceParcel>& resources) override;
+
     Status removeResource(const ClientInfoParcel& clientInfo,
                           const std::vector<MediaResourceParcel>& resources) override;
 
@@ -78,6 +81,8 @@
 
     Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
 
+    Status getMediaResourceUsageReport(std::vector<MediaResourceParcel>* resources) override;
+
     binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
 
     friend class ResourceTracker;
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index 49f68e9..0fef655 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -87,6 +87,29 @@
     return false;
 }
 
+bool ResourceList::update(const MediaResourceParcel& res, long* removedEntryValue) {
+    for (std::vector<MediaResourceParcel>::iterator it = mResourceList.begin();
+         it != mResourceList.end(); it++) {
+        if (it->type == res.type && it->subType == res.subType && it->id == res.id) {
+            if (res.value == 0) {
+                // This entry will be removed.
+                if (removedEntryValue) {
+                    *removedEntryValue = it->value;
+                }
+                mResourceList.erase(it);
+            } else {
+                // Update the new value.
+                it->value = res.value;
+            }
+            return true;
+        }
+    }
+
+    // Add the new entry.
+    mResourceList.push_back(res);
+    return false;
+}
+
 std::string ResourceList::toString() const {
     std::string str;
     for (const ::aidl::android::media::MediaResourceParcel& res : mResourceList) {
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.h b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
index e8f1515..4ff8211 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.h
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
@@ -117,6 +117,16 @@
     bool remove(const ::aidl::android::media::MediaResourceParcel& res,
                 long* removedEntryValue = nullptr);
 
+    // updates the resource usage by performing one of the following operations:
+    //  - If the resource is already there:
+    //    - If the new value is 0, remove the entry.
+    //      Also, returns the removed entry through removedEntryValue (if valid)
+    //    - Else update the resource value with the new value
+    //    - return true
+    //  - Else, add it as a new entry and return false.
+    bool update(const ::aidl::android::media::MediaResourceParcel& res,
+                long* removedEntryValue = nullptr);
+
     // Returns true if there aren't any resource entries.
     bool empty() const {
         return mResourceList.empty();
diff --git a/services/mediaresourcemanager/ResourceTracker.cpp b/services/mediaresourcemanager/ResourceTracker.cpp
index 3ee20cd..b8db8da 100644
--- a/services/mediaresourcemanager/ResourceTracker.cpp
+++ b/services/mediaresourcemanager/ResourceTracker.cpp
@@ -136,7 +136,60 @@
     return !resourceAdded.empty();
 }
 
-bool ResourceTracker::updateResource(const aidl::android::media::ClientInfoParcel& clientInfo) {
+bool ResourceTracker::updateResource(
+        const aidl::android::media::ClientInfoParcel& clientInfo,
+        const std::vector<::aidl::android::media::MediaResourceParcel>& resources) {
+    ResourceInfos& infos = getResourceInfosForEdit(clientInfo.pid);
+
+    ResourceInfos::iterator found = infos.find(clientInfo.id);
+    if (found == infos.end()) {
+        return false;
+    }
+
+    ResourceInfo& info = found->second;
+    ResourceList resourceAdded;
+    ResourceList resourceRemoved;
+
+    for (const MediaResourceParcel& res : resources) {
+        if (res.value < 0) {
+            ALOGV("%s: Ignoring request to update negative value of resource", __func__);
+            continue;
+        }
+
+        // Since resource value/amount is non-negative, we are using this magic value (-1)
+        // to detect whether the resource has been removed or updated.
+        long removedEntryValue = -1;
+        if (info.resources.update(res, &removedEntryValue)) {
+            // Check if the removedEntryValue has been updated.
+            if (removedEntryValue != -1) {
+                // An entry was removed.
+                onLastRemoved(res, info.uid);
+                // Add it to the list of removed resources for observers.
+                MediaResourceParcel actualRemoved = res;
+                actualRemoved.value = removedEntryValue;
+                resourceRemoved.add(actualRemoved);
+            }
+        } else {
+            // A new entry is added.
+            onFirstAdded(res, info.uid);
+            // Add it to the list of added resources for observers.
+            resourceAdded.add(res);
+        }
+    }
+    if (mObserverService != nullptr) {
+        if (!resourceAdded.empty()) {
+            mObserverService->onResourceAdded(info.uid, clientInfo.pid, resourceAdded);
+        }
+        if (!resourceRemoved.empty()) {
+            mObserverService->onResourceRemoved(info.uid, clientInfo.pid, resourceRemoved);
+        }
+    }
+
+    return true;
+}
+
+bool ResourceTracker::updateClientImportance(
+        const aidl::android::media::ClientInfoParcel& clientInfo) {
     ResourceInfos& infos = getResourceInfosForEdit(clientInfo.pid);
 
     ResourceInfos::iterator found = infos.find(clientInfo.id);
@@ -771,4 +824,20 @@
     return (callingPidPriority < priority);
 }
 
+void ResourceTracker::getMediaResourceUsageReport(
+        std::vector<MediaResourceParcel>* resources) const {
+    ResourceList resourceUsageList;
+
+    // Add up all the resource usage by every process into resourceUsageList
+    for (const auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        for (const auto& [infoKey, /* ResourceInfo */ info] : infos) {
+            for (const MediaResourceParcel& res : info.resources.getResources()) {
+                resourceUsageList.add(res);
+            }
+        }
+    }
+
+    *resources = resourceUsageList.getResources();
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceTracker.h b/services/mediaresourcemanager/ResourceTracker.h
index 20c904d..419aa59 100644
--- a/services/mediaresourcemanager/ResourceTracker.h
+++ b/services/mediaresourcemanager/ResourceTracker.h
@@ -67,8 +67,12 @@
                      const std::shared_ptr<::aidl::android::media::IResourceManagerClient>& client,
                      const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
 
-    // Update the resource info, if there is any changes.
-    bool updateResource(const aidl::android::media::ClientInfoParcel& clientInfo);
+    // Update the importance of the client.
+    bool updateClientImportance(const aidl::android::media::ClientInfoParcel& clientInfo);
+
+    // Update the resource info for given list of resources.
+    bool updateResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                        const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
 
     // Remove a set of resources from the given client.
     // returns true on success, false otherwise.
@@ -211,6 +215,9 @@
         return mMap;
     }
 
+    // For each MediaResourceType, get amount of resource being used at the moment.
+    void getMediaResourceUsageReport(std::vector<MediaResourceParcel>* resources) const;
+
 private:
     // Get ResourceInfos associated with the given process.
     // If none exists, this method will create and associate an empty object and return it.
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index 5071fa3..e79cb1e 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -56,6 +56,17 @@
             in MediaResourceParcel[] resources);
 
     /**
+     * Update the client with given list of resources.
+     * This is used to update the existing resource values with an updated value.
+     *
+     * @param clientInfo info of the calling client.
+     * @param resources an array of resources to be updated.
+     */
+    void updateResource(
+            in ClientInfoParcel clientInfo,
+            in MediaResourceParcel[] resources);
+
+    /**
      * Remove the listed resources from a client.
      *
      * @param clientInfo info of the calling client.
@@ -168,4 +179,13 @@
      * @param clientConfig Configuration information of the client.
      */
     void notifyClientConfigChanged(in ClientConfigParcel clientConfig);
+
+    /**
+     * Get a list of all the MediaResources currently being used.
+     *
+     * This provides information on current resource usage by the system.
+     *
+     * @param resources List of resources being used when this call is made.
+     */
+    void getMediaResourceUsageReport(out MediaResourceParcel[] resources);
 }
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
index 353e59c..3a5428f 100644
--- a/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
@@ -33,4 +33,12 @@
     kBattery = 5,
     // DRM Session resource type
     kDrmSession = 6,
+
+    // Resources reserved for SW component store
+    kSwResourceTypeMin = 0x1000,
+    kSwResourceTypeMax = 0x1FFF,
+
+    // Resources reserved for HW component store
+    kHwResourceTypeMin = 0x2000,
+    kHwResourceTypeMax = 0x2FFF,
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 3f04f69..0202625 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -33,17 +33,19 @@
         "service_fuzzer_defaults",
     ],
     static_libs: [
+        "android.media.codec-aconfig-cc",
         "liblog",
         "libresourcemanagerservice",
     ],
     shared_libs: [
+        "aconfig_mediacodec_flags_c_lib",
+        "libaconfig_storage_read_api_cc",
+        "libactivitymanager_aidl",
         "libmedia",
         "libmediautils",
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
-        "libactivitymanager_aidl",
-        "aconfig_mediacodec_flags_c_lib",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
index e69368d..6d3f17d 100644
--- a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
@@ -26,6 +26,8 @@
 using ndk::SharedRefBase;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   // TODO(b/183141167): need to rewrite 'dump' to avoid SIGPIPE.
+   signal(SIGPIPE, SIG_IGN);
    auto service = SharedRefBase::make<ResourceObserverService>();
    fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
    return 0;
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index ac41959..c39b681 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -15,6 +15,7 @@
     static_libs: [
         "libresourcemanagerservice",
         "aconfig_mediacodec_flags_c_lib",
+        "android.media.codec-aconfig-cc",
     ],
     shared_libs: [
         "libbinder",
@@ -27,6 +28,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "libaconfig_storage_read_api_cc",
         "server_configurable_flags",
     ],
     defaults: [
@@ -70,6 +72,7 @@
         "libresourcemanagerservice",
         "resourceobserver_aidl_interface-V1-ndk",
         "aconfig_mediacodec_flags_c_lib",
+        "android.media.codec-aconfig-cc",
     ],
     shared_libs: [
         "libbinder",
@@ -82,6 +85,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "libaconfig_storage_read_api_cc",
         "server_configurable_flags",
     ],
     defaults: [
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 027987e..0920e12 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -17,6 +17,8 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ResourceManagerService_test"
 
+#include <sstream>
+
 #include <utils/Log.h>
 
 #include "ResourceManagerServiceTestUtils.h"
@@ -1829,6 +1831,248 @@
         // aren't any lower priority clients or lower priority processes.
         EXPECT_FALSE(doReclaimResource(lowPriPidClientInfos[0]));
     }
+
+    // Gets 5 types of HW resources of random count (11 - 110)
+    std::vector<MediaResourceParcel> getHwResources() {
+        const int32_t resourceType = static_cast<int32_t>(MediaResourceType::kHwResourceTypeMin);
+        static const std::vector<std::pair<int32_t, uint64_t>> hwResources =
+            { {resourceType + 1, 10},
+              {resourceType + 2, 10},
+              {resourceType + 3, 10},
+              {resourceType + 4, 10},
+              {resourceType + 5, 10},
+            };
+
+        // Seed the random number generator with the current time
+        srand(time(0));
+        // Generate a random number between 1 and 100
+        int random_num = rand() % 100 + 1;
+
+        std::vector<MediaResourceParcel> resources;
+        for (const auto& resource : hwResources) {
+            MediaResourceParcel res;
+            res.type = static_cast<MediaResourceType>(resource.first);
+            res.value = resource.second + random_num++;
+            resources.push_back(res);
+        }
+
+        return resources;
+    }
+
+    void addResources(const std::vector<MediaResourceParcel>& rhs,
+                      std::vector<MediaResourceParcel>& lhs) {
+        for (MediaResourceParcel& res : lhs) {
+            auto found = std::find_if(rhs.begin(), rhs.end(),
+                                      [res](const MediaResourceParcel& item) {
+                                          return item.type == res.type; });
+
+            if (found != rhs.end() && found->value > 0) {
+                res.value += found->value;
+            }
+        }
+    }
+
+    void subtractResources(const std::vector<MediaResourceParcel>& rhs,
+                           std::vector<MediaResourceParcel>& lhs) {
+        for (MediaResourceParcel& res : lhs) {
+            auto found = std::find_if(rhs.begin(), rhs.end(),
+                                      [res](const MediaResourceParcel& item) {
+                                          return item.type == res.type; });
+
+            if (found != rhs.end() && found->value > 0) {
+                res.value -= found->value;
+            }
+        }
+    }
+
+    void displayResources(const char* what, const std::vector<MediaResourceParcel>& resources) {
+        std::stringstream debug;
+        debug << what << ": ";
+        for (const MediaResourceParcel& res : resources) {
+            debug << "{ " << static_cast<int32_t>(res.type) << " : " << res.value << " } ";
+        }
+        ALOGI("%s", debug.str().c_str());
+    }
+
+    // Compare the locally tracked resources with the tracked resources by the RM.
+    void validateResourceUsage(const std::vector<MediaResourceParcel>& trackedResources) {
+        // Query the RM about current resource usage.
+        std::vector<MediaResourceParcel> currentResourceUsage;
+        mService->getMediaResourceUsageReport(&currentResourceUsage);
+        displayResources("Current Resources", currentResourceUsage);
+
+        // If we subtract the resources added, it should be left with 0 now.
+        subtractResources(trackedResources, currentResourceUsage);
+        displayResources("To Verify Resources", currentResourceUsage);
+
+        // Create a set of added HW resource types.
+        std::set<MediaResourceType> addedResourceTypes;
+        for (const auto& res : trackedResources) {
+            addedResourceTypes.insert(res.type);
+        }
+
+        for (const auto& res : currentResourceUsage) {
+            if (addedResourceTypes.find(res.type) == addedResourceTypes.end()) {
+                // Ignore the resource thats not added by us now.
+                continue;
+            }
+            bool isZero = (res.value == 0);
+            if (!isZero) {
+                ALOGE("%s: Expected resource [%d] to be 0 but its %lld",
+                      __func__, res.type, (long long)res.value);
+                EXPECT_TRUE(isZero);
+            }
+        }
+    }
+
+    // Update each resource by given amount (increasing OR decreasing).
+    void updateAllResources(std::vector<MediaResourceParcel>& resources,
+                            int updateAmount) {
+        std::transform(resources.begin(), resources.end(),
+                       resources.begin(),
+                       [updateAmount](MediaResourceParcel& res) {
+                           res.value += updateAmount;
+                           return res;
+                       });
+    }
+
+    // Update each resource by given amount and verify the result
+    void updateAllAndVerify(const ClientInfoParcel& clientInfo,
+                           std::vector<MediaResourceParcel>& resources,
+                           std::vector<MediaResourceParcel>& trackedResources,
+                           int updateAmount) {
+        updateAllResources(resources, updateAmount);
+        updateAllResources(trackedResources, updateAmount);
+        mService->updateResource(clientInfo, resources);
+        displayResources("Resources", resources);
+
+        // We expect trackedResources and current resource usage to be the same.
+        validateResourceUsage(trackedResources);
+    }
+
+    // Update the resource from oldResources to newResources.
+    // The newResources could have some resources missing compared to oldResources
+    // OR may have additional resources thatn oldResources.
+    void updateResource(const ClientInfoParcel& clientInfo,
+                        const std::vector<MediaResourceParcel>& oldResources,
+                        std::vector<MediaResourceParcel>& newResources,
+                        std::vector<MediaResourceParcel>& trackedResources) {
+      std::vector<MediaResourceParcel> removedResources;
+        // Look for resources that aren't required anymore.
+        for (const MediaResourceParcel& oldRes : oldResources) {
+            auto found = std::find_if(newResources.begin(),
+                                      newResources.end(),
+                                      [oldRes](const MediaResourceParcel& newRes) {
+                                          return oldRes.type == newRes.type; });
+
+            // If this old resource isn't found in updated resources, that means its
+            // not required anymore.
+            // Set the count to 0, so that it will be removed from the RM.
+            if (found == newResources.end()) {
+                // Add this to list of removed resources.
+                removedResources.push_back(oldRes);
+                MediaResourceParcel res = oldRes;
+                res.value = 0;
+                newResources.push_back(res);
+            }
+        }
+
+        if (!removedResources.empty()) {
+            // Remove those resources from the trackedResources.
+            subtractResources(removedResources, trackedResources);
+        }
+
+        // Update with new resources.
+        mService->updateResource(clientInfo, newResources);
+        displayResources("Resources", newResources);
+
+        // We expect trackedResources and current resource usage to be the same.
+        validateResourceUsage(trackedResources);
+    }
+
+    // Verifies the resource usage among all clients.
+    void testResourceUsage() {
+        // Create 2 clients for a low priority pid.
+        std::vector<std::shared_ptr<IResourceManagerClient>> lowPriPidClients;
+        lowPriPidClients.push_back(createTestClient(kLowPriorityPid, kTestUid1));
+        lowPriPidClients.push_back(createTestClient(kLowPriorityPid, kTestUid1));
+        // Create 2 clients for a high priority pid.
+        std::vector<std::shared_ptr<IResourceManagerClient>> highPriPidClients;
+        highPriPidClients.push_back(createTestClient(kHighPriorityPid, kTestUid2));
+        highPriPidClients.push_back(createTestClient(kHighPriorityPid, kTestUid2));
+
+        // Add non secure video codec resources for all these clients.
+        std::vector<ClientInfoParcel> clientInfos;
+        for (auto& client : lowPriPidClients) {
+            addNonSecureVideoCodecResource(client, clientInfos);
+        }
+        // Add non secure video codec resources for all the 3 clients of high priority pid.
+        for (auto& client : highPriPidClients) {
+            addNonSecureVideoCodecResource(client, clientInfos);
+        }
+
+        // Now randomly add some HW resources for these clients.
+        // In trackedResources, we are tracking all the resources locally
+        // and we will compare that with what RM tracks to verify
+        std::vector<MediaResourceParcel> trackedResources;
+        std::vector<MediaResourceParcel> resources;
+        for (auto& clientInfo : clientInfos) {
+            resources = getHwResources();
+            mService->addResource(clientInfo, nullptr, resources);
+            if (trackedResources.empty()) {
+                trackedResources = resources;
+            } else {
+                addResources(resources, trackedResources);
+            }
+            displayResources("Resources", resources);
+            displayResources("Tracked Resources", trackedResources);
+        }
+
+        // We expect trackedResources to be same as current resource usage
+        validateResourceUsage(trackedResources);
+
+        // For one of the client, start updating resources.
+        const ClientInfoParcel& lastClientInfo = clientInfos[clientInfos.size() - 1];
+
+        // Now update the resources by adding a new resource type for the
+        // selected client.
+        const int32_t resourceType = static_cast<int32_t>(MediaResourceType::kHwResourceTypeMin);
+        MediaResourceParcel newResource {.type = static_cast<MediaResourceType>(resourceType + 6),
+                                         .value = 100};
+        {
+            std::vector<MediaResourceParcel> newTrackedResources = trackedResources;
+            newTrackedResources.push_back(newResource);
+            std::vector<MediaResourceParcel> newResources = resources;
+            newResources.push_back(newResource);
+            updateResource(lastClientInfo, resources, newResources, newTrackedResources);
+        }
+
+        // Update the resources for the selected client by increasing the amount by
+        // 10 for all the resources.
+        int updateAmount = 10;
+        updateAllAndVerify(lastClientInfo, resources, trackedResources, updateAmount);
+
+        // Update the resources for the selected client by decreasing the count by 5
+        // for all the resources.
+        updateAmount = -5;
+        updateAllAndVerify(lastClientInfo, resources, trackedResources, updateAmount);
+
+        // Now update the resources by removing one resource type completely.
+        for (size_t index = 0; index < resources.size(); ++index) {
+            std::vector<MediaResourceParcel> newResources = resources;
+            std::vector<MediaResourceParcel> newTrackedResources = trackedResources;
+            newResources.erase(newResources.begin() + index);
+            updateResource(lastClientInfo, resources, newResources, newTrackedResources);
+        }
+
+        // Now update the resources by removing one resource at a time until it exhausts.
+        std::vector<MediaResourceParcel> newResources = resources;
+        for (size_t index = 1; index < newResources.size(); ++index) {
+            std::vector<MediaResourceParcel> newTrackedResources = trackedResources;
+            newResources.pop_back();
+            updateResource(lastClientInfo, resources, newResources, newTrackedResources);
+        }
+    }
 };
 
 class ResourceManagerServiceNewTest : public ResourceManagerServiceTest {
@@ -2021,4 +2265,8 @@
     testReclaimPolicies();
 }
 
+TEST_F(ResourceManagerServiceNewTest, resourceUsage) {
+    testResourceUsage();
+}
+
 } // namespace android
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index c91ead0..407a5d94 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -74,16 +74,28 @@
     }
 
     const std::lock_guard<std::mutex> lock(mLock);
+    sp<NotificationClient> notificationClient;
+    status_t status;
+    sp<IBinder> binder = IInterface::asBinder(client);
     if (mNotificationClients.count(pid) == 0) {
-        const sp<IBinder> binder = IInterface::asBinder(client);
-        const sp<NotificationClient> notificationClient = new NotificationClient(pid, binder);
+        notificationClient = new NotificationClient(pid, binder);
         mNotificationClients[pid] = notificationClient;
 
-        const status_t status = binder->linkToDeath(notificationClient);
+        status = binder->linkToDeath(notificationClient);
         ALOGW_IF(status != NO_ERROR, "registerClient() linkToDeath = %d\n", status);
         return AAudioConvert_androidToAAudioResult(status);
     } else {
         ALOGW("registerClient(%d) already registered!", pid);
+        notificationClient = mNotificationClients[pid];
+        if (notificationClient-> isBinderNull()) {
+            ALOGW("registerClient() need to linkToDeath as notificationClient binder is null");
+            status = binder->linkToDeath(notificationClient);
+            if (status != NO_ERROR) {
+                ALOGE("registerClient() linkToDeath status = %d\n", status);
+            } else {
+                notificationClient->setBinder(binder);
+            }
+        }
         return AAUDIO_OK; // TODO should this be considered an error
     }
 }
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index 3d4ab34..0cc9649 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -104,6 +104,14 @@
             return mExclusiveEnabled;
         }
 
+        bool isBinderNull() {
+            return mBinder == nullptr;
+        }
+
+        void setBinder(android::sp<IBinder>& binder) {
+            mBinder = binder;
+        }
+
         // IBinder::DeathRecipient
         void binderDied(const android::wp<IBinder>& who) override;
 
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 6c31d2c..8aee6e3 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -243,6 +243,21 @@
     ALOGD("%s(format = 0x%X) deviceIds = %s, sessionId = %d",
           __func__, config->format, toString(getDeviceIds()).c_str(), getSessionId());
 
+    ALOGD("%s bufferCapacity = %d, deviceSampleRate = %d, requestedSampleRate = %d",
+          __func__, getBufferCapacity(), config->sample_rate, getSampleRate());
+
+    const int32_t requestedSampleRate = getSampleRate();
+    const int32_t deviceSampleRate = config->sample_rate;
+
+    // When sample rate conversion is needed, we use the device sample rate and the
+    // requested sample rate to scale the capacity in configureDataInformation().
+    // Thus, we should scale the capacity here to cancel out the
+    // (requestedSampleRate / deviceSampleRate) scaling there.
+    if (requestedSampleRate != AAUDIO_UNSPECIFIED && requestedSampleRate != deviceSampleRate) {
+        setBufferCapacity(static_cast<int64_t>(getBufferCapacity()) * deviceSampleRate /
+                          requestedSampleRate);
+    }
+
     // Create MMAP/NOIRQ buffer.
     result = createMmapBuffer_l();
     if (result != AAUDIO_OK) {
@@ -478,6 +493,10 @@
 
 void AAudioServiceEndpointMMAP::onVolumeChanged(float volume) {
     ALOGD("%s() volume = %f", __func__, volume);
+    if (std::isnan(volume)) {
+        ALOGE("%s reject to set volume as nan", __func__);
+        return;
+    }
     const std::lock_guard<std::mutex> lock(mLockStreams);
     for(const auto& stream : mRegisteredStreams) {
         stream->onVolumeChanged(volume);
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index bd58fa2..7da14a4 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -76,7 +76,21 @@
 }
 
 int32_t AAudioServiceStreamShared::calculateBufferCapacity(int32_t requestedCapacityFrames,
-                                                           int32_t framesPerBurst) {
+                                                           int32_t framesPerBurst,
+                                                           int32_t requestedSampleRate,
+                                                           int32_t deviceSampleRate) {
+    if (requestedSampleRate != AAUDIO_UNSPECIFIED && requestedSampleRate != deviceSampleRate) {
+        // When sample rate conversion is needed, we use the device sample rate and the
+        // requested sample rate to scale the capacity in configureDataInformation().
+        // Thus, we should scale the capacity here to cancel out the
+        // (requestedSampleRate / deviceSampleRate) scaling there.
+
+        requestedCapacityFrames = static_cast<int64_t>(requestedCapacityFrames) * deviceSampleRate
+                                  / requestedSampleRate;
+        ALOGV("calculateBufferCapacity() scaled buffer capacity to %d frames, requested SR = %d"
+              ", device SR = %d",
+              requestedCapacityFrames, requestedSampleRate, deviceSampleRate);
+    }
 
     if (requestedCapacityFrames > MAX_FRAMES_PER_BUFFER) {
         ALOGE("calculateBufferCapacity() requested capacity %d > max %d",
@@ -168,7 +182,8 @@
     }
 
     setBufferCapacity(calculateBufferCapacity(configurationInput.getBufferCapacity(),
-                                     mFramesPerBurst));
+                                              mFramesPerBurst, configurationInput.getSampleRate(),
+                                              getSampleRate()));
     if (getBufferCapacity() < 0) {
         result = getBufferCapacity(); // negative error code
         setBufferCapacity(0);
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index c6b74e1..3175613 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -104,7 +104,9 @@
      * @return capacity or negative error
      */
     static int32_t calculateBufferCapacity(int32_t requestedCapacityFrames,
-                                            int32_t framesPerBurst);
+                                           int32_t framesPerBurst,
+                                           int32_t requestedSampleRate,
+                                           int32_t deviceSampleRate);
 
 private:
 
diff --git a/services/tuner/TunerHelper.cpp b/services/tuner/TunerHelper.cpp
index 8357a9e..47a9452 100644
--- a/services/tuner/TunerHelper.cpp
+++ b/services/tuner/TunerHelper.cpp
@@ -101,7 +101,7 @@
 }
 
 // TODO: create a map between resource id and handles.
-int TunerHelper::getResourceIdFromHandle(long resourceHandle, int /*type*/) {
+int TunerHelper::getResourceIdFromHandle(int64_t resourceHandle, int /*type*/) {
     return (int)((resourceHandle >> RESOURCE_ID_SHIFT) & RESOURCE_ID_MASK);
 }
 
@@ -112,7 +112,7 @@
  *   32 bits - id
  *   24 bits - resourceRequestCount
  */
-long TunerHelper::getResourceHandleFromId(int id, int resourceType) {
+int64_t TunerHelper::getResourceHandleFromId(int id, int resourceType) {
     // TODO: build up randomly generated id to handle mapping
     return static_cast<int64_t>(resourceType & RESOURCE_TYPE_MASK) << RESOURCE_TYPE_SHIFT |
            static_cast<int64_t>(id & RESOURCE_ID_MASK) << RESOURCE_ID_SHIFT |
diff --git a/services/tuner/TunerHelper.h b/services/tuner/TunerHelper.h
index 74e1662..10058d6 100644
--- a/services/tuner/TunerHelper.h
+++ b/services/tuner/TunerHelper.h
@@ -62,8 +62,8 @@
                                      const vector<TunerDemuxInfo>& demuxInfos,
                                      const vector<int64_t>& lnbHandles);
     // TODO: create a map between resource id and handles.
-    static int getResourceIdFromHandle(long resourceHandle, int type);
-    static long getResourceHandleFromId(int id, int resourceType);
+    static int getResourceIdFromHandle(int64_t resourceHandle, int type);
+    static int64_t getResourceHandleFromId(int id, int resourceType);
 
 private:
     static int32_t sResourceRequestCount;