Merge "Add fg playback restriction checks and logging" into main
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index e73222b..06e1d34 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -1103,12 +1103,19 @@
 /**
  * Request continuous streaming of a sequence of images for the shared capture session
  * when more than one clients can open the same camera in shared mode by calling
- * {@link ACameraManager_openSharedCamera}. In shared session, only primary clients can create
- * a capture request and change capture parameters. Secondary clients can only request streaming of
- * images by calling this api {@link ACameraCaptureSessionShared_startStreaming}. Calling this api
- * for normal sessions when {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACameraManager_openSharedCamera}. In shared mode, the highest priority client among all
+ * the clients will be the primary client while the others would be secondary clients. In shared
+ * capture session, only primary clients can create a capture request and change capture parameters.
+ * Secondary clients can only request streaming of images by calling this api
+ * {@link ACameraCaptureSessionShared_startStreaming}. Calling this api for normal sessions when
+ * {@link ACameraManager_openCamera} is used to open the camera will throw
  * {@link ACAMERA_ERROR_INVALID_OPERATION}.
  *
+ * <p>The priority of client access is determined by considering two factors: its current process
+ * state and its "out of memory" score. Clients operating in the background are assigned a lower
+ * priority. In contrast, clients running in the foreground, along with system-level clients, are
+ * given a higher priority.</p>
+ *
  * <p>With this method, the camera device will continually capture images, cycling through the
  * settings in the list of {@link ACaptureRequest} specified by the primary client. If primary
  * client does not have ongoing repeating request, camera service will use a capture request with
@@ -1145,20 +1152,72 @@
  *         </ul>
  */
 camera_status_t ACameraCaptureSessionShared_startStreaming(
-    ACameraCaptureSession* sharedSession, ACameraCaptureSession_captureCallbacksV2 *callbacks,
+    ACameraCaptureSession* sharedSession,
+    /*optional*/ACameraCaptureSession_captureCallbacksV2 *callbacks,
     int numOutputWindows, ANativeWindow **window,
-    int *captureSequenceId) __INTRODUCED_IN(36);
+    /*optional*/int *captureSequenceId) __INTRODUCED_IN(36);
 
 /**
  * This has the same functionality as ACameraCaptureSessionShared_startStreaming, with added
  * support for logical multi-camera where the capture callbacks supports result metadata for
  * physical cameras.
+ *
+ * Request continuous streaming of a sequence of images for the shared capture session
+ * when more than one clients can open the same camera in shared mode by calling
+ * {@link ACameraManager_openSharedCamera}. In shared mode, the highest priority client among all
+ * the clients will be the primary client while the others would be secondary clients. In shared
+ * capture session, only primary clients can create a capture request and change capture parameters.
+ * Secondary clients can only request streaming of images by calling this api
+ * {@link ACameraCaptureSessionShared_logicalCamera_startStreaming}. Calling this api for normal
+ * sessions when {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACAMERA_ERROR_INVALID_OPERATION}.
+ *
+ * <p>The priority of client access is determined by considering two factors: its current process
+ * state and its "out of memory" score. Clients operating in the background are assigned a lower
+ * priority. In contrast, clients running in the foreground, along with system-level clients, are
+ * given a higher priority.</p>
+ *
+ * <p>With this method, the camera device will continually capture images, cycling through the
+ * settings in the list of {@link ACaptureRequest} specified by the primary client. If primary
+ * client does not have ongoing repeating request, camera service will use a capture request with
+ * default capture parameters for preview template.</p>
+ *
+ * <p>To stop the continuous streaming, call {@link ACameraCaptureSessionShared_stopStreaming}.</p>
+ *
+ * <p>Calling this method will replace an existing continuous streaming request.</p>
+ *
+ * @param sharedSession the shared capture session when camera is opened in
+ *        shared mode.
+ * @param callbacks the {@link ACameraCaptureSession_logicalCamera_captureCallbacksV2} to be
+ *        associated with this capture sequence. No capture callback will be fired if callbacks
+ *        is set to NULL.
+ * @param numOutputWindows number of native windows to be used for streaming. Must be at least 1.
+ * @param windows an array of {@link ANativeWindow} to be used for streaming. Length must be at
+ *        least numOutputWindows.
+ * @param captureSequenceId the capture sequence ID associated with this capture method invocation
+ *        will be stored here if this argument is not NULL and the method call succeeds.
+ *        When this argument is set to NULL, the capture sequence ID will not be returned.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ *             if it is not NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or requests is NULL, or
+ *             if numRequests < 1</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error
+ *         </li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if the session passed is not a shared
+ *              session</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for  some other reasons</li>
+ *         </ul>
  */
 camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
     ACameraCaptureSession* sharedSession,
-    ACameraCaptureSession_logicalCamera_captureCallbacksV2 *callbacks,
+    /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2 *callbacks,
     int numOutputWindows, ANativeWindow **windows,
-    int *captureSequenceId) __INTRODUCED_IN(36);
+    /*optional*/int *captureSequenceId) __INTRODUCED_IN(36);
 
 /**
  * Cancel any ongoing streaming started by {@link ACameraCaptureSessionShared_startStreaming}.
@@ -1183,7 +1242,7 @@
  *         </ul>
  */
 camera_status_t ACameraCaptureSessionShared_stopStreaming(
-  ACameraCaptureSession *sharedSession
+    ACameraCaptureSession *sharedSession
 )  __INTRODUCED_IN(36);
 __END_DECLS
 
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index a9b0174..492c41b 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -299,6 +299,21 @@
  * ACameraDevice**)} except that it opens the camera in shared mode so that more
  * than one client can access the camera at the same time.
  *
+ * <p>When camera is opened in shared mode, the highest priority client among all the clients will
+ * be the primary client while the others would be secondary clients. Primary clients can create
+ * capture requests, modify any capture parameters and send them to the capture session for a
+ * one-shot capture or as a repeating request.</p>
+ *
+ * <p>Secondary clients cannot create a capture request and modify any capture parameters. However,
+ * they can start the camera streaming to desired surface targets using
+ * {@link ACameraCaptureSessionShared_startStreaming}. Once the streaming has successfully started,
+ * then they can stop the streaming using {@link ACameraCaptureSessionShared_stopStreaming}.</p>
+ *
+ * <p>The priority of client access is determined by considering two factors: its current process
+ * state and its "out of memory" score. Clients operating in the background are assigned a lower
+ * priority. In contrast, clients running in the foreground, along with system-level clients, are
+ * given a higher priority.</p>
+ *
  * <p>Processes need to have android.permission.SYSTEM_CAMERA in addition to
  * android.permission.CAMERA in order to connect to this camera device in shared
  * mode.</p>
@@ -308,7 +323,7 @@
  * @param callback the {@link ACameraDevice_StateCallbacks} associated with the opened camera
  *                 device.
  * @param device the opened {@link ACameraDevice} will be filled here if the method call succeeds.
- * @param primaryClient will return as true if the client is primaryClient.
+ * @param isPrimaryClient will return as true if the client is a primary client.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
@@ -449,8 +464,8 @@
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
- *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any parameter is not
- *         valid.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager, cameraId, or isSharingSupported
+ *                  is NULL, or cameraId does not match any camera devices connected.</li>
  *         </ul>
  */
 camera_status_t ACameraManager_isCameraDeviceSharingSupported(
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
index 5ad9530..abe292f 100644
--- a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -21,6 +21,7 @@
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <gui/view/Surface.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include "camera2common.h"
 
 using namespace std;
@@ -90,9 +91,13 @@
                 sp<Surface> surface = surfaceControl->getSurface();
                 captureRequest->mSurfaceList.push_back(surface);
                 if (fdp.ConsumeBool()) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                    view::Surface surfaceShim = view::Surface::fromSurface(surface);
+#else
                     view::Surface surfaceShim;
-                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
                     surfaceShim.graphicBufferProducer = surface->getIGraphicBufferProducer();
+#endif
+                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
                     surfaceShim.writeToParcel(&parcelCamCaptureReq);
                 }
                 surface.clear();
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index cb08b8c..99b5381 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -1100,7 +1100,7 @@
     // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
     // and universaly administered
     constexpr std::array<uint8_t, 4> BTANON_PREFIX {0xFD, 0xFF, 0xFF, 0xFF};
-    // Keep sync with ServiceUtilities.cpp mustAnonymizeBluetoothAddress
+    // Keep sync with ServiceUtilities.cpp anonymizeBluetoothAddress
     constexpr const char * BTANON_PREFIX_STR = "XX:XX:XX:XX:";
 }
 
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index 9d84bc7..9036df1 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -1056,10 +1056,20 @@
                                          input->width(), input->width(), input->width(),
                                          input->width(), input->width(), input->height(),
                                          CONV_FORMAT_I420);
-            } else if (IsYUV420(*input)) {
-                return C2_BAD_VALUE;
             } else if (IsI420(*input)) {
-                return C2_BAD_VALUE;
+                uint8_t  *srcY  = (uint8_t*)input->data()[0];
+                uint8_t  *srcU  = (uint8_t*)input->data()[1];
+                uint8_t  *srcV  = (uint8_t*)input->data()[2];
+                uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                convertPlanar8ToP210(dstY, dstUV, srcY, srcU, srcV,
+                                        layout.planes[C2PlanarLayout::PLANE_Y].rowInc,
+                                        layout.planes[C2PlanarLayout::PLANE_U].rowInc,
+                                        layout.planes[C2PlanarLayout::PLANE_V].rowInc,
+                                        input->width(), input->width(),
+                                        input->width(), input->height(),
+                                        CONV_FORMAT_I420);
+
             } else {
                 ALOGE("Not supported color format. %d", mColorFormat);
                 return C2_BAD_VALUE;
@@ -1317,10 +1327,6 @@
         return;
     }
 
-    if (work->input.buffers.empty()) {
-        return;
-    }
-
     std::shared_ptr<C2GraphicView> view;
     std::shared_ptr<C2Buffer> inputBuffer = nullptr;
     if (!work->input.buffers.empty()) {
@@ -1332,7 +1338,19 @@
             work->workletsProcessed = 1u;
             return;
         }
+    } else {
+        ALOGV("Empty input Buffer");
+        uint32_t flags = 0;
+        if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+        return;
     }
+
     if (!inputBuffer) {
         fillEmptyWork(work);
         return;
@@ -1361,6 +1379,7 @@
 
     error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
     if (error != C2_OK) {
+        ALOGE("setEncodeArgs has failed. err = %d", error);
         mSignalledError = true;
         work->result = error;
         work->workletsProcessed = 1u;
@@ -1382,6 +1401,7 @@
         int32_t status =
                 oapve_encode(mEncoderId, &mInputFrames, mMetaId, bits.get(), &stat, &mReconFrames);
         if (status != C2_OK) {
+            ALOGE("oapve_encode has failed. err = %d", status);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index a03f24f..ea67bf4 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -713,6 +713,39 @@
   }
 }
 
+void convertPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+                              const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+                              size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                              size_t dstYStride, size_t dstUVStride,
+                              uint32_t width, uint32_t height,
+                              CONV_FORMAT_T format) {
+  if (format != CONV_FORMAT_I420) {
+    ALOGE("No support for planar8 to P210. format is %d", format);
+    return;
+  }
+
+  for (int32_t y = 0; y < height; ++y) {
+    for (int32_t x = 0; x < width; ++x) {
+      dstY[x] = ((uint16_t)((double)srcY[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+    }
+    dstY += dstYStride;
+    srcY += srcYStride;
+  }
+
+  for (int32_t y = 0; y < height / 2; ++y) {
+    for (int32_t x = 0; x < width / 2; ++x) {
+      dstUV[x<<1] = dstUV[(x<<1) + dstUVStride] =
+                ((uint16_t)((double)srcU[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+      dstUV[(x<<1) + 1] = dstUV[(x<<1) + dstUVStride + 1] =
+                ((uint16_t)((double)srcV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+    }
+    dstUV += dstUVStride << 1;
+    srcU += srcUStride;
+    srcV += srcVStride;
+  }
+}
+
+
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 4306e55..5d2e8cd 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -111,6 +111,12 @@
                               size_t dstYStride, size_t dstUVStride,
                               uint32_t width, uint32_t height,
                               CONV_FORMAT_T format);
+void convertPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+                              const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+                              size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                              size_t dstYStride, size_t dstUVStride,
+                              uint32_t width, uint32_t height,
+                              CONV_FORMAT_T format);
 
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index fa5ce77..fbd1b36 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -269,7 +269,7 @@
     kParamIndexSuspendAt, // input-surface, struct
     kParamIndexResumeAt, // input-surface, struct
     kParamIndexStopAt, // input-surface, struct
-    kParamIndexTimeOffset, // input-surface, struct
+    kParamIndexTimeOffset, // input-surface, int64_t
     kParamIndexMinFrameRate, // input-surface, float
     kParamIndexTimestampGapAdjustment, // input-surface, struct
 
@@ -299,6 +299,10 @@
 
     // allow tunnel peek behavior to be unspecified for app compatibility
     kParamIndexTunnelPeekMode, // tunnel mode, enum
+
+    // input surface
+    kParamIndexCaptureFrameRate, // input-surface, float
+    kParamIndexStopTimeOffset, // input-surface, int64_t
 };
 
 }
@@ -2651,6 +2655,14 @@
 constexpr char C2_PARAMKEY_INPUT_SURFACE_MIN_FRAME_RATE[] = "input-surface.min-frame-rate";
 
 /**
+ * Maximum fps for input surface.
+ *
+ * Drop frame to meet this.
+ */
+typedef C2PortParam<C2Tuning, C2FloatValue, kParamIndexMaxFrameRate> C2PortMaxFrameRateTuning;
+constexpr char C2_PARAMKEY_INPUT_SURFACE_MAX_FRAME_RATE[] = "input-surface.max-frame-rate";
+
+/**
  * Timestamp adjustment (override) for input surface buffers. These control the input timestamp
  * fed to the codec, but do not impact the output timestamp.
  */
@@ -2680,9 +2692,26 @@
 inline C2TimestampGapAdjustmentStruct::C2TimestampGapAdjustmentStruct()
     : mode(C2TimestampGapAdjustmentStruct::NONE), value(0) { }
 
-typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct> C2PortTimestampGapTuning;
+typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct, kParamIndexTimestampGapAdjustment>
+        C2PortTimestampGapTuning;
 constexpr char C2_PARAMKEY_INPUT_SURFACE_TIMESTAMP_ADJUSTMENT[] = "input-surface.timestamp-adjustment";
 
+/**
+ * Capture frame rate for input surface. During timelapse or slowmo encoding,
+ * this represents the frame rate of input surface.
+ */
+typedef C2PortParam<C2Tuning, C2FloatValue, kParamIndexCaptureFrameRate>
+        C2PortCaptureFrameRateTuning;
+constexpr char C2_PARAMKEY_INPUT_SURFACE_CAPTURE_FRAME_RATE[] = "input-surface.capture-frame-rate";
+
+/**
+ * Stop time offset for input surface. Stop time offset is the elapsed time
+ * offset to the last frame time from the stop time. This could be returned from
+ * IInputSurface when it is queried.
+ */
+typedef C2PortParam<C2Tuning, C2Int64Value, kParamIndexStopTimeOffset> C2PortStopTimeOffset;
+constexpr char C2_PARAMKEY_INPUT_SURFACE_STOP_TIME_OFFSET[] = "input-surface.stop-time-offset";
+
 /* ===================================== TUNNELED CODEC ==================================== */
 
 /**
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index e16e2b1..eaabc33 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -78,6 +78,8 @@
         "Configurable.cpp",
         "InputBufferManager.cpp",
         "ParamTypes.cpp",
+        "inputsurface/InputSurface.cpp",
+        "inputsurface/InputSurfaceConnection.cpp",
     ],
 
     header_libs: [
@@ -98,6 +100,7 @@
         "libhidlbase",
         "liblog",
         "libnativewindow",
+        "libmediandk",
         "libstagefright_aidl_bufferpool2",
         "libstagefright_bufferpool@2.0.1",
         "libui",
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
new file mode 100644
index 0000000..5c2cc2e
--- /dev/null
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/media/c2/BnInputSurface.h>
+
+#include <codec2/aidl/Configurable.h>
+#include <util/C2InterfaceHelper.h>
+
+#include <C2.h>
+
+#include <memory>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+struct InputSurface : public BnInputSurface {
+    InputSurface();
+    c2_status_t status() const;
+
+    // Methods from IInputSurface follow.
+    ::ndk::ScopedAStatus getSurface(
+            ::aidl::android::view::Surface* surface) override;
+    ::ndk::ScopedAStatus getConfigurable(
+            std::shared_ptr<IConfigurable>* configurable) override;
+    ::ndk::ScopedAStatus connect(
+            const std::shared_ptr<IInputSink>& sink,
+            std::shared_ptr<IInputSurfaceConnection>* connection) override;
+
+protected:
+    class Interface;
+    class ConfigurableIntf;
+
+    c2_status_t mInit;
+    std::shared_ptr<Interface> mIntf;
+    std::shared_ptr<CachedConfigurable> mConfigurable;
+
+    virtual ~InputSurface() override;
+
+
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+    static void OnBinderDied(void *cookie);
+    static void OnBinderUnlinked(void *cookie);
+    struct DeathContext;
+    DeathContext *mDeathContext;
+};
+
+}  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
new file mode 100644
index 0000000..59361e1
--- /dev/null
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/media/c2/BnInputSurfaceConnection.h>
+#include <media/NdkImage.h>
+
+#include <C2.h>
+
+#include <memory>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+struct InputSurfaceConnection : public BnInputSurfaceConnection {
+    InputSurfaceConnection();
+    c2_status_t status() const;
+
+    // Methods from IInputSurfaceConnection follow.
+    ::ndk::ScopedAStatus disconnect() override;
+    ::ndk::ScopedAStatus signalEndOfStream() override;
+
+    // implementation specific interface.
+
+    // Submit a buffer to the connected component.
+    c2_status_t submitBuffer(
+            int32_t bufferId,
+            const AImage *buffer = nullptr,
+            int64_t timestamp = 0,
+            int fenceFd = -1);
+
+    // Submit eos to the connected component.
+    c2_status_t submitEos(int32_t bufferId);
+
+    // notify dataspace being changed to the component.
+    void dispatchDataSpaceChanged(
+            int32_t dataSpace, int32_t aspects, int32_t pixelFormat);
+
+protected:
+    virtual ~InputSurfaceConnection() override;
+};
+
+}  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurface.cpp b/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
new file mode 100644
index 0000000..5f6d176
--- /dev/null
+++ b/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-InputSurface"
+#include <android-base/logging.h>
+
+#include <codec2/aidl/inputsurface/InputSurface.h>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+// Derived class of C2InterfaceHelper
+class InputSurface::Interface : public C2InterfaceHelper {
+public:
+    explicit Interface(
+            const std::shared_ptr<C2ReflectorHelper> &helper)
+        : C2InterfaceHelper(helper) {
+
+        setDerivedInstance(this);
+
+    }
+
+private:
+};
+
+class InputSurface::ConfigurableIntf : public ConfigurableC2Intf {
+public:
+};
+
+struct InputSurface::DeathContext {
+    // TODO;
+};
+
+void InputSurface::OnBinderDied(void *cookie) {
+    (void) cookie;
+}
+
+void InputSurface::OnBinderUnlinked(void *cookie) {
+    (void) cookie;
+}
+
+InputSurface::InputSurface() : mDeathContext(nullptr) {
+    mInit = C2_OK;
+}
+
+InputSurface::~InputSurface() {
+}
+
+::ndk::ScopedAStatus InputSurface::getSurface(::aidl::android::view::Surface* surface) {
+    (void) surface;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus InputSurface::getConfigurable(
+        std::shared_ptr<IConfigurable>* configurable) {
+    *configurable = mConfigurable;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus InputSurface::connect(
+        const std::shared_ptr<IInputSink>& sink,
+        std::shared_ptr<IInputSurfaceConnection>* connection) {
+    (void) sink;
+    (void) connection;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+}  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp b/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
new file mode 100644
index 0000000..44ca924
--- /dev/null
+++ b/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-InputSurface"
+#include <android-base/logging.h>
+
+#include <codec2/aidl/inputsurface/InputSurfaceConnection.h>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+InputSurfaceConnection::InputSurfaceConnection() {
+}
+
+InputSurfaceConnection::~InputSurfaceConnection() {
+}
+
+::ndk::ScopedAStatus InputSurfaceConnection::disconnect() {
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus InputSurfaceConnection::signalEndOfStream() {
+    return ::ndk::ScopedAStatus::ok();
+}
+
+c2_status_t InputSurfaceConnection::submitBuffer(
+        int32_t bufferId, const AImage *buffer, int64_t timestamp, int fenceFd) {
+    (void)bufferId;
+    (void)buffer;
+    (void)timestamp;
+    (void)fenceFd;
+    return C2_OK;
+}
+
+c2_status_t InputSurfaceConnection::submitEos(int32_t bufferId) {
+    (void)bufferId;
+    return C2_OK;
+}
+
+void InputSurfaceConnection::dispatchDataSpaceChanged(
+            int32_t dataSpace, int32_t aspects, int32_t pixelFormat) {
+    (void)dataSpace;
+    (void)aspects;
+    (void)pixelFormat;
+}
+
+}  // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 6a6da0f..4055f9b 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -96,6 +96,10 @@
     mGraphicsTracker->onAttached(generation);
 }
 
+void GraphicBufferAllocator::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    mGraphicsTracker->pollForRenderedFrames(delta);
+}
+
 c2_status_t GraphicBufferAllocator::allocate(
         uint32_t width, uint32_t height, ::android::PixelFormat format, uint64_t usage,
         AHardwareBuffer **buf, ::android::sp<::android::Fence> *fence) {
@@ -119,6 +123,10 @@
     return mGraphicsTracker->render(block, input, output);
 }
 
+void GraphicBufferAllocator::onRequestStop() {
+    mGraphicsTracker->onRequestStop();
+}
+
 GraphicBufferAllocator::~GraphicBufferAllocator() {}
 
 std::shared_ptr<GraphicBufferAllocator> GraphicBufferAllocator::CreateGraphicBufferAllocator(
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index bdfc409..ff356fc 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -32,6 +32,9 @@
 static constexpr int kMaxDequeueMin = 1;
 static constexpr int kMaxDequeueMax = ::android::BufferQueueDefs::NUM_BUFFER_SLOTS - 2;
 
+// Just some delay for HAL to receive the stop()/release() request.
+static constexpr int kAllocateDirectDelayUs = 16666;
+
 c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
     std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
     if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
@@ -177,7 +180,7 @@
     mMaxDequeueCommitted{maxDequeueCount},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
-    mInConfig{false}, mStopped{false} {
+    mInConfig{false}, mStopped{false}, mStopRequested{false}, mAllocAfterStopRequested{0} {
     if (maxDequeueCount < kMaxDequeueMin) {
         mMaxDequeue = kMaxDequeueMin;
         mMaxDequeueCommitted = kMaxDequeueMin;
@@ -490,6 +493,18 @@
     }
 }
 
+void GraphicsTracker::onRequestStop() {
+    std::unique_lock<std::mutex> l(mLock);
+    if (mStopped) {
+        return;
+    }
+    if (mStopRequested) {
+        return;
+    }
+    mStopRequested = true;
+    writeIncDequeueableLocked(kMaxDequeueMax - 1);
+}
+
 void GraphicsTracker::writeIncDequeueableLocked(int inc) {
     CHECK(inc > 0 && inc < kMaxDequeueMax);
     thread_local char buf[kMaxDequeueMax];
@@ -544,8 +559,7 @@
     return C2_OK;
 }
 
-c2_status_t GraphicsTracker::requestAllocate(std::shared_ptr<BufferCache> *cache) {
-    std::lock_guard<std::mutex> l(mLock);
+c2_status_t GraphicsTracker::requestAllocateLocked(std::shared_ptr<BufferCache> *cache) {
     if (mDequeueable > 0) {
         char buf[1];
         int ret = ::read(mReadPipeFd.get(), buf, 1);
@@ -728,6 +742,34 @@
     return C2_OK;
 }
 
+c2_status_t GraphicsTracker::_allocateDirect(
+        uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
+        AHardwareBuffer **buf, sp<Fence> *rFence) {
+    AHardwareBuffer_Desc desc;
+    desc.width = width;
+    desc.height = height;
+    desc.layers = 1u;
+    desc.format = ::android::AHardwareBuffer_convertFromPixelFormat(format);
+    desc.usage = ::android::AHardwareBuffer_convertFromGrallocUsageBits(usage);
+    desc.rfu0 = 0;
+    desc.rfu1 = 0;
+
+    int res = AHardwareBuffer_allocate(&desc, buf);
+    if (res != ::android::OK) {
+        ALOGE("_allocateDirect() failed(%d)", res);
+        if (res == ::android::NO_MEMORY) {
+            return C2_NO_MEMORY;
+        } else {
+            return C2_CORRUPTED;
+        }
+    }
+
+    int alloced = mAllocAfterStopRequested++;
+    *rFence = Fence::NO_FENCE;
+    ALOGD("_allocateDirect() allocated %d buffer", alloced);
+    return C2_OK;
+}
+
 c2_status_t GraphicsTracker::allocate(
         uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
         AHardwareBuffer **buf, sp<Fence> *rFence) {
@@ -735,10 +777,21 @@
         ALOGE("cannot allocate due to being stopped");
         return C2_BAD_STATE;
     }
+    c2_status_t res = C2_OK;
     std::shared_ptr<BufferCache> cache;
-    c2_status_t res = requestAllocate(&cache);
-    if (res != C2_OK) {
-        return res;
+    {
+        std::unique_lock<std::mutex> l(mLock);
+        if (mStopRequested) {
+            l.unlock();
+            res = _allocateDirect(width, height, format, usage, buf, rFence);
+            // Delay a little bit for HAL to receive stop()/release() request.
+            ::usleep(kAllocateDirectDelayUs);
+            return res;
+        }
+        c2_status_t res = requestAllocateLocked(&cache);
+        if (res != C2_OK) {
+            return res;
+        }
     }
     ALOGV("allocatable or dequeueable");
 
@@ -1003,6 +1056,19 @@
     return C2_OK;
 }
 
+void GraphicsTracker::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    sp<IGraphicBufferProducer> igbp;
+    {
+        std::unique_lock<std::mutex> l(mLock);
+        if (mBufferCache) {
+            igbp = mBufferCache->mIgbp;
+        }
+    }
+    if (igbp) {
+        igbp->getFrameTimestamps(delta);
+    }
+}
+
 void GraphicsTracker::onReleased(uint32_t generation) {
     bool updateDequeue = false;
     {
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9ee9b9e..17e5b62 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -3170,6 +3170,11 @@
 
 c2_status_t Codec2Client::Component::stop() {
     if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->onRequestStop();
+        }
         ::ndk::ScopedAStatus transStatus = mAidlBase->stop();
         return GetC2Status(transStatus, "stop");
     }
@@ -3220,6 +3225,11 @@
         }
     }
     if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->onRequestStop();
+        }
         ::ndk::ScopedAStatus transStatus = mAidlBase->release();
         return GetC2Status(transStatus, "release");
     }
@@ -3407,7 +3417,11 @@
 
 void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
     if (mAidlBase) {
-        // TODO b/311348680
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->pollForRenderedFrames(delta);
+        }
         return;
     }
     mOutputBufferQueue->pollForRenderedFrames(delta);
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
index a797cb7..a70ffef 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
@@ -85,6 +85,11 @@
     void onBufferAttached(uint32_t generation);
 
     /**
+     * Retrieve frame event history from the crurrent surface if any.
+     */
+    void pollForRenderedFrames(::android::FrameEventHistoryDelta* delta);
+
+    /**
      * Allocates a buffer.
      *
      * @param   width             width of the requested buffer.
@@ -125,6 +130,11 @@
             const ::android::IGraphicBufferProducer::QueueBufferInput& input,
             ::android::IGraphicBufferProducer::QueueBufferOutput *output);
 
+    /**
+     * Notify stop()/release() is in progress.
+     */
+    void onRequestStop();
+
     ~GraphicBufferAllocator();
 
     /**
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 9a4fa12..536caaa 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -35,6 +35,7 @@
 
 using ::android::IGraphicBufferProducer;
 using ::android::GraphicBuffer;
+using ::android::FrameEventHistoryDelta;
 using ::android::Fence;
 using ::android::PixelFormat;
 using ::android::sp;
@@ -133,6 +134,11 @@
                        IGraphicBufferProducer::QueueBufferOutput *output);
 
     /**
+     * Retrieve frame event history from the crurrent surface if any.
+     */
+    void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
+    /**
      * Notifies when a Buffer is ready to allocate from Graphics.
      * If generation does not match to the current, notifications via the interface
      * will be ignored. (In the case, the notifications are from one of the old surfaces
@@ -175,6 +181,14 @@
      */
     void stop();
 
+    /**
+     * stop()/release() request to HAL is in process from the client.
+     * The class will never be active again after the request.
+     * Still, allocation requests from HAL should be served until stop()
+     * is being called.
+     */
+    void onRequestStop();
+
 private:
     struct BufferCache;
 
@@ -290,6 +304,10 @@
 
     std::atomic<bool> mStopped;
 
+    bool mStopRequested;
+    std::atomic<int> mAllocAfterStopRequested;
+
+
 private:
     explicit GraphicsTracker(int maxDequeueCount);
 
@@ -304,7 +322,7 @@
             const std::shared_ptr<BufferCache> &cache,
             int maxDequeueCommitted);
 
-    c2_status_t requestAllocate(std::shared_ptr<BufferCache> *cache);
+    c2_status_t requestAllocateLocked(std::shared_ptr<BufferCache> *cache);
     c2_status_t requestDeallocate(uint64_t bid, const sp<Fence> &fence,
                                   bool *completed, bool *updateDequeue,
                                   std::shared_ptr<BufferCache> *cache, int *slotId,
@@ -334,6 +352,10 @@
             bool *cached, int *rSlotId, sp<Fence> *rFence,
             std::shared_ptr<BufferItem> *buffer);
 
+    c2_status_t _allocateDirect(
+            uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
+            AHardwareBuffer **buf, sp<Fence> *fence);
+
     void writeIncDequeueableLocked(int inc);
     void drainDequeueableLocked(int dec);
 };
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 07fed18..9014c3b 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -323,7 +323,7 @@
             [&]() {
                 char** tags = nullptr;
                 (void)AAudioStream_obtainTags(mAaudioStream, &tags);
-                AAudioStream_releaseTags(mAaudioStream, tags);
+                AAudioStream_destroyTags(mAaudioStream, tags);
             },
             [&]() {
                 (void)AAudioStream_isMMapUsed(mAaudioStream);
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index e72685c..ddafd57 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -1721,9 +1721,13 @@
  * audio hardware) have been played.
  *
  * The presentation end callback must be used together with the data callback.
- * The presentation edn callback won't be called if the stream is closed before all the data
+ * The presentation end callback won't be called if the stream is closed before all the data
  * is played.
  *
+ * The callback function will be called from the same thread as the data callback thread,
+ * which is a real-time thread owned by audio framework.
+ * The callback function will not be called after AAudioStream_close() is called.
+ *
  * Available since API level 36.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
diff --git a/media/libaaudio/include/system/aaudio/AAudio.h b/media/libaaudio/include/system/aaudio/AAudio.h
index 4c2d291..ae139d9 100644
--- a/media/libaaudio/include/system/aaudio/AAudio.h
+++ b/media/libaaudio/include/system/aaudio/AAudio.h
@@ -65,7 +65,7 @@
 /**
  * Allocate and read the audio attributes' tags for the stream into a buffer.
  * The client is responsible to free the memory for tags by calling
- * {@link #AAudioStream_releaseTags} unless the number of tags is 0.
+ * {@link #AAudioStream_destroyTags} unless the number of tags is 0.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param tags a pointer to a variable that will be set to a pointer to an array of char* pointers
@@ -82,7 +82,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param tags reference provided by AAudioStream_obtainTags()
  */
-void AAudioStream_releaseTags(AAudioStream* _Nonnull stream, char* _Nonnull * _Nullable tags);
+void AAudioStream_destroyTags(AAudioStream* _Nonnull stream, char* _Nonnull * _Nullable tags);
 
 #ifdef __cplusplus
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 33f152c..e59f0ec 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -134,7 +134,12 @@
     request.getConfiguration().setInputPreset(getInputPreset());
     request.getConfiguration().setPrivacySensitive(isPrivacySensitive());
 
-    request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
+    // When sample rate conversion is needed, we use the device sample rate instead of the
+    // requested sample rate to scale the capacity in configureDataInformation().
+    // Thus, we should scale the capacity here to cancel out the (sampleRate / deviceSampleRate)
+    // scaling there.
+    request.getConfiguration().setBufferCapacity(builder.getBufferCapacity()
+            * 48000 / getSampleRate());
 
     mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
     if (getServiceHandle() < 0
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 64f115c..ecffcbd 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -629,7 +629,7 @@
     return aaTags.size();
 }
 
-AAUDIO_API void AAudioStream_releaseTags(AAudioStream* stream, char** tags) {
+AAUDIO_API void AAudioStream_destroyTags(AAudioStream* stream, char** tags) {
     if (tags == nullptr) {
         return;
     }
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 2425ae4..0de0546 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -82,7 +82,7 @@
     AAudioStreamBuilder_addTag; # systemapi
     AAudioStreamBuilder_clearTags; # systemapi
     AAudioStream_obtainTags; # systemapi
-    AAudioStream_releaseTags; #systemapi
+    AAudioStream_destroyTags; #systemapi
   local:
     *;
 };
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index fcb083d..52c17cf 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -132,7 +132,7 @@
         readTagsSet.insert(readTags[i]);
     }
     EXPECT_EQ(addedTags, readTagsSet);
-    AAudioStream_releaseTags(aaudioStream, readTags);
+    AAudioStream_destroyTags(aaudioStream, readTags);
 
     aaudio_input_preset_t expectedPreset =
             (preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index dcfef45..c675c34 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -35,6 +35,7 @@
 #include <media/TypeConverter.h>
 #include <mediautils/ServiceSingleton.h>
 #include <math.h>
+#include <private/android_filesystem_config.h>
 
 #include <system/audio.h>
 #include <android/media/GetInputForAttrResponse.h>
@@ -145,12 +146,25 @@
         onNewServiceWithAdapter(createServiceAdapter(afs));
     }
 
-    static void onServiceDied(const sp<media::IAudioFlingerService>&) {
-        ALOGW("%s: %s service died", __func__, getServiceName());
+    static void onServiceDied(const sp<media::IAudioFlingerService>& service) {
+        ALOGW("%s: %s service died %p", __func__, getServiceName(), service.get());
         {
             std::lock_guard l(mMutex);
+            if (!mValid) {
+                ALOGW("%s: %s service already invalidated, ignoring", __func__, getServiceName());
+                return;
+            }
+            if (!mService || mService->getDelegate() != service) {
+                ALOGW("%s: %s unmatched service death pointers, ignoring",
+                        __func__, getServiceName());
+                return;
+            }
             mValid = false;
-            mClient->clearIoCache();
+            if (mClient) {
+                mClient->clearIoCache();
+            } else {
+                ALOGW("%s: null client", __func__);
+            }
         }
         AudioSystem::reportError(DEAD_OBJECT);
     }
@@ -169,9 +183,14 @@
             if (!mDisableThreadPoolStart) {
                 ProcessState::self()->startThreadPool();
             }
-            mediautils::initService<media::IAudioFlingerService, AudioFlingerServiceTraits>();
-            mWaitMs = std::chrono::milliseconds(
-                property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
+            if (multiuser_get_app_id(getuid()) == AID_AUDIOSERVER) {
+                mediautils::skipService<media::IAudioFlingerService>(mediautils::SkipMode::kWait);
+                mWaitMs = std::chrono::milliseconds(INT32_MAX);
+            } else {
+                mediautils::initService<media::IAudioFlingerService, AudioFlingerServiceTraits>();
+                mWaitMs = std::chrono::milliseconds(
+                        property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
+            }
             init = true;
         }
         if (mValid) return mService;
@@ -209,12 +228,18 @@
     static status_t setLocalService(const sp<IAudioFlinger>& af) {
         mediautils::skipService<media::IAudioFlingerService>();
         sp<IAudioFlinger> old;
-        {
-            std::lock_guard l(mMutex);
-            old = mService;
-            mService = af;
+
+        audio_utils::unique_lock ul(mMutex);
+        old = mService;
+        if (old) {
+            ul.unlock();
+            onServiceDied(old->getDelegate());
+            ul.lock();
+            ALOGW_IF(old != mService,
+                    "%s: service changed during callback, continuing.", __func__);
         }
-        if (old) onServiceDied({});
+        mService = af;
+        ul.unlock();
         if (af) onNewServiceWithAdapter(af);
         return OK;
     }
@@ -245,6 +270,8 @@
         bool reportNoError = false;
         {
             std::lock_guard l(mMutex);
+            ALOGW_IF(mValid, "%s: %s service already valid, continuing with initialization",
+                    __func__, getServiceName());
             if (mClient == nullptr) {
                 mClient = sp<AudioSystem::AudioFlingerClient>::make();
             } else {
@@ -975,6 +1002,8 @@
         sp<AudioSystem::AudioPolicyServiceClient> client;
         {
             std::lock_guard l(mMutex);
+            ALOGW_IF(mValid, "%s: %s service already valid, continuing with initialization",
+                    __func__, getServiceName());
             if (mClient == nullptr) {
                 mClient = sp<AudioSystem::AudioPolicyServiceClient>::make();
             }
@@ -985,18 +1014,33 @@
         // TODO(b/375280520) consider registerClient() within mMutex lock.
         const int64_t token = IPCThreadState::self()->clearCallingIdentity();
         aps->registerClient(client);
+        aps->setAudioPortCallbacksEnabled(client->isAudioPortCbEnabled());
+        aps->setAudioVolumeGroupCallbacksEnabled(client->isAudioVolumeGroupCbEnabled());
         IPCThreadState::self()->restoreCallingIdentity(token);
     }
 
-    static void onServiceDied(const sp<IAudioPolicyService>&) {
-        ALOGW("%s: %s service died", __func__, getServiceName());
+    static void onServiceDied(const sp<IAudioPolicyService>& service) {
+        ALOGW("%s: %s service died %p", __func__, getServiceName(), service.get());
         sp<AudioSystem::AudioPolicyServiceClient> client;
         {
             std::lock_guard l(mMutex);
+            if (!mValid) {
+                ALOGW("%s: %s service already invalidated, ignoring", __func__, getServiceName());
+                return;
+            }
+            if (mService != service) {
+                ALOGW("%s: %s unmatched service death pointers, ignoring",
+                        __func__, getServiceName());
+                return;
+            }
             mValid = false;
             client = mClient;
         }
-        client->onServiceDied();
+        if (client) {
+            client->onServiceDied();
+        } else {
+            ALOGW("%s: null client", __func__);
+        }
     }
 
     static constexpr mediautils::ServiceOptions options() {
@@ -1013,9 +1057,14 @@
             if (!mDisableThreadPoolStart) {
                 ProcessState::self()->startThreadPool();
             }
-            mediautils::initService<IAudioPolicyService, AudioPolicyServiceTraits>();
-            mWaitMs = std::chrono::milliseconds(
-                    property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
+            if (multiuser_get_app_id(getuid()) == AID_AUDIOSERVER) {
+                mediautils::skipService<IAudioPolicyService>(mediautils::SkipMode::kWait);
+                mWaitMs = std::chrono::milliseconds(INT32_MAX);
+            } else {
+                mediautils::initService<IAudioPolicyService, AudioPolicyServiceTraits>();
+                mWaitMs = std::chrono::milliseconds(
+                        property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
+            }
             init = true;
         }
         if (mValid) return mService;
@@ -1048,12 +1097,19 @@
     static status_t setLocalService(const sp<IAudioPolicyService>& aps) {
         mediautils::skipService<IAudioPolicyService>();
         sp<IAudioPolicyService> old;
-        {
-            std::lock_guard l(mMutex);
-            old = mService;
-            mService = aps;
+        audio_utils::unique_lock ul(mMutex);
+        old = mService;
+        if (old) {
+            ul.unlock();
+            onServiceDied(old);
+            ul.lock();
+            if (mService != old) {
+                ALOGD("%s: service changed during callback, ignoring.", __func__);
+                return OK;
+            }
         }
-        if (old) onServiceDied(old);
+        mService = aps;
+        ul.unlock();
         if (aps) onNewService(aps);
         return OK;
     }
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 3591fbf..2202539 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1300,7 +1300,7 @@
     if (isAudioPlaybackRateEqual(playbackRate, mPlaybackRate)) {
         return NO_ERROR;
     }
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         const status_t status = statusTFromBinderStatus(mAudioTrack->setPlaybackRateParameters(
                 VALUE_OR_RETURN_STATUS(
                         legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate))));
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 8292eef..6b501a7 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -181,6 +181,8 @@
         fromAidl(const media::CreateRecordResponse& aidl);
     };
 
+    virtual sp<media::IAudioFlingerService> getDelegate() const { return {}; }
+
     /* create an audio track and registers it with AudioFlinger.
      * The audioTrack field will be null if the track cannot be created and the status will reflect
      * failure.
@@ -414,6 +416,8 @@
 public:
     explicit AudioFlingerClientAdapter(const sp<media::IAudioFlingerService> delegate);
 
+    sp<media::IAudioFlingerService> getDelegate() const final { return mDelegate; }
+
     status_t createTrack(const media::CreateTrackRequest& input,
                          media::CreateTrackResponse& output) override;
     status_t createRecord(const media::CreateRecordRequest& input,
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 629cd7c..0a131fa 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -79,13 +79,13 @@
 using aidl::android::hardware::audio::core::VendorParameter;
 
 #define RETURN_IF_MODULE_NOT_INIT(retVal)         \
-    if (mModule == nullptr) {                     \
+    if (!isModuleInitialized()) {                 \
         AUGMENT_LOG(E, "module not initialized"); \
         return retVal;                            \
     }
 
 #define RETURN_IF_TELEPHONY_NOT_INIT(retVal)         \
-    if (mTelephony == nullptr) {                     \
+    if (!isTelephonyInitialized()) {                  \
         AUGMENT_LOG(E, "telephony not initialized"); \
         return retVal;                               \
     }
@@ -124,12 +124,12 @@
                              const std::shared_ptr<IHalAdapterVendorExtension>& vext)
     : ConversionHelperAidl("DeviceHalAidl", instance),
       mModule(module),
-      mVendorExt(vext),
       mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
       mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
       mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
       mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
       mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
+      mVendorExt(vext),
       mMapper(instance, module),
       mMapperAccessor(mMapper, mLock) {}
 
@@ -154,8 +154,11 @@
         return BAD_VALUE;
     }
     std::vector<AudioMode> aidlModes;
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mTelephony->getSupportedAudioModes(&aidlModes)));
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mTelephony->getSupportedAudioModes(&aidlModes)));
+    }
     *modes = VALUE_OR_RETURN_STATUS(
             ::aidl::android::convertContainer<std::vector<media::audio::common::AudioMode>>(
                     aidlModes, ndk2cpp_AudioMode));
@@ -182,8 +185,11 @@
     RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
 
     ITelephony::TelecomConfig inConfig{.voiceVolume = Float{volume}}, outConfig;
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
+    }
     AUGMENT_LOG_IF(
             W, outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
             "the resulting voice volume %f is not the same as requested %f",
@@ -196,6 +202,7 @@
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->setMasterVolume(volume));
 }
 
@@ -207,6 +214,7 @@
         AUGMENT_LOG(E, "uninitialized volumes");
         return BAD_VALUE;
     }
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->getMasterVolume(volume));
 }
 
@@ -216,6 +224,7 @@
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
+    std::lock_guard l(mLock);
     if (mTelephony != nullptr) {
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
     }
@@ -227,6 +236,7 @@
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->setMicMute(state));
 }
 
@@ -239,6 +249,7 @@
         AUGMENT_LOG(E, "uninitialized mute state");
         return BAD_VALUE;
     }
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->getMicMute(state));
 }
 
@@ -247,6 +258,7 @@
 
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->setMasterMute(state));
 }
 
@@ -259,6 +271,7 @@
         AUGMENT_LOG(E, "uninitialized mute state");
         return BAD_VALUE;
     }
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->getMasterMute(state));
 }
 
@@ -286,6 +299,7 @@
     if (status_t status = filterAndUpdateTelephonyParameters(parameters); status != OK) {
         AUGMENT_LOG(W, "filterAndUpdateTelephonyParameters failed: %d", status);
     }
+    std::lock_guard l(mLock);
     return parseAndSetVendorParameters(mVendorExt, mModule, parameters);
 }
 
@@ -306,6 +320,7 @@
         AUGMENT_LOG(W, "filterAndRetrieveBtLeParameters failed: %d", status);
     }
     *values = result.toString();
+    std::lock_guard l(mLock);
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
@@ -526,7 +541,10 @@
     args.eventCallback = eventCb;
     args.sourceMetadata = aidlMetadata;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
+    }
     StreamContextAidl context(ret.desc, isOffload, aidlHandle);
     if (!context.isValid()) {
         AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
@@ -605,7 +623,10 @@
     args.sinkMetadata.tracks.push_back(std::move(aidlTrackMetadata));
     args.bufferSizeFrames = aidlConfig.frameCount;
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn ret;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
+    }
     StreamContextAidl context(ret.desc, false /*isAsynchronous*/, aidlHandle);
     if (!context.isValid()) {
         AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
@@ -904,8 +925,11 @@
                     requestedPortConfig, {} /*destinationPortIds*/, &devicePortConfig, &cleanups));
     }
     auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->addDeviceEffect(
-                            devicePortConfig.id, aidlEffect->getIEffect())));
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                mModule->addDeviceEffect(devicePortConfig.id, aidlEffect->getIEffect())));
+    }
     cleanups.disarmAll();
     return OK;
 }
@@ -936,6 +960,7 @@
                         &devicePortConfig));
     }
     auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->removeDeviceEffect(
                     devicePortConfig.id, aidlEffect->getIEffect()));
 }
@@ -953,9 +978,10 @@
 
     std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
 
-    if (status_t status = statusTFromBinderStatus(
-            mModule->getMmapPolicyInfos(mmapPolicyType, &mmapPolicyInfos)); status != OK) {
-        return status;
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                        mModule->getMmapPolicyInfos(mmapPolicyType, &mmapPolicyInfos)));
     }
 
     *policyInfos = VALUE_OR_RETURN_STATUS(
@@ -970,10 +996,8 @@
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t mixerBurstCount = 0;
-    if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
-        return mixerBurstCount;
-    }
-    return 0;
+    std::lock_guard l(mLock);
+    return mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk() ? mixerBurstCount : 0;
 }
 
 int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
@@ -982,10 +1006,9 @@
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t hardwareBurstMinUsec = 0;
-    if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
-        return hardwareBurstMinUsec;
-    }
-    return 0;
+    std::lock_guard l(mLock);
+    return mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk() ?
+            hardwareBurstMinUsec : 0;
 }
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
@@ -994,6 +1017,7 @@
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t aidlHwAvSync;
+    std::lock_guard l(mLock);
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
     return VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_int32_t_audio_hw_sync_t(aidlHwAvSync));
@@ -1001,9 +1025,10 @@
 
 status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    if (!isModuleInitialized()) return NO_INIT;
     Vector<String16> newArgs = args;
     newArgs.push(String16(kDumpFromAudioServerArgument));
+    std::lock_guard l(mLock);
     return mModule->dump(fd, Args(newArgs).args(), newArgs.size());
 }
 
@@ -1015,6 +1040,7 @@
     if (supports == nullptr) {
         return BAD_VALUE;
     }
+    std::lock_guard l(mLock);
     return statusTFromBinderStatus(mModule->supportsVariableLatency(supports));
 }
 
@@ -1028,15 +1054,15 @@
     }
     if (mSoundDose == nullptr) {
         AUGMENT_LOG(E, "failed to retrieve the sound dose interface");
-        return BAD_VALUE;
-    }
-
-    if (mSoundDose == nullptr) {
-        AUGMENT_LOG(E, "failed to return the sound dose interface not implemented");
         return NO_INIT;
     }
 
     *soundDoseBinder = mSoundDose->asBinder();
+    if (soundDoseBinder == nullptr) {
+        AUGMENT_LOG(E, "failed to return the sound dose interface not implemented");
+        return NO_INIT;
+    }
+
     AUGMENT_LOG(I, "using audio AIDL HAL sound dose interface");
     return OK;
 }
@@ -1116,10 +1142,8 @@
     AUGMENT_LOG(V);
     TIME_CHECK();
     RETURN_IF_MODULE_NOT_INIT(NO_INIT);
-    {
-        std::lock_guard l(mLock);
-        mMapper.resetUnusedPatchesAndPortConfigs();
-    }
+    std::lock_guard l(mLock);
+    mMapper.resetUnusedPatchesAndPortConfigs();
     ModuleDebug debug{ .simulateDeviceConnections = enabled };
     status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
     // This is important to log as it affects HAL behavior.
@@ -1135,6 +1159,7 @@
         AudioParameter &keys, AudioParameter *result) {
     if (String8 key = String8(AudioParameter::keyReconfigA2dpSupported); keys.containsKey(key)) {
         keys.remove(key);
+        std::lock_guard l(mLock);
         if (mBluetoothA2dp != nullptr) {
             bool supports;
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1152,6 +1177,7 @@
         AudioParameter &keys, AudioParameter *result) {
     if (String8 key = String8(AudioParameter::keyReconfigLeSupported); keys.containsKey(key)) {
         keys.remove(key);
+        std::lock_guard l(mLock);
         if (mBluetoothLe != nullptr) {
             bool supports;
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1192,6 +1218,7 @@
                 reconfigureOffload = std::move(result);
                 return OK;
             }));
+    std::lock_guard l(mLock);
     if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
         return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
     }
@@ -1232,6 +1259,7 @@
                 }
                 return BAD_VALUE;
             }));
+    std::lock_guard l(mLock);
     if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
         IBluetooth::HfpConfig newHfpConfig;
         return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
@@ -1270,6 +1298,7 @@
                 }
                 return OK;
             }));
+    std::lock_guard l(mLock);
     if (mBluetoothLe != nullptr && leEnabled.has_value()) {
         return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
     }
@@ -1330,6 +1359,7 @@
                             AudioParameter::keyBtScoWb, onOrOff.c_str());
                 return BAD_VALUE;
             }));
+    std::lock_guard l(mLock);
     if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
         IBluetooth::ScoConfig newScoConfig;
         return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
@@ -1352,6 +1382,7 @@
                                 AudioParameter::keyScreenState, onOrOff.c_str());
                     return BAD_VALUE;
                 }
+                std::lock_guard l(mLock);
                 return statusTFromBinderStatus(mModule->updateScreenState(isTurnedOn.value()));
             }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
@@ -1376,6 +1407,7 @@
                                     AudioParameter::keyScreenRotation, rotationDegrees);
                         return BAD_VALUE;
                 }
+                std::lock_guard l(mLock);
                 return statusTFromBinderStatus(mModule->updateScreenRotation(rotation));
             }));
     return OK;
@@ -1418,6 +1450,7 @@
                             AudioParameter::keyHacSetting, onOrOff.c_str());
                 return BAD_VALUE;
             }));
+    std::lock_guard l(mLock);
     if (mTelephony != nullptr && telConfig != ITelephony::TelecomConfig{}) {
         ITelephony::TelecomConfig newTelConfig;
         return statusTFromBinderStatus(mTelephony->setTelecomConfig(telConfig, &newTelConfig));
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 6ae6402..af8b423 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -235,19 +235,32 @@
     // MicrophoneInfoProvider implementation
     MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
 
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
-    const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
-    const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
+    // See below, the lock is only used to serialize calling into the interface.
+    bool isModuleInitialized() const NO_THREAD_SAFETY_ANALYSIS { return mModule != nullptr; }
+    bool isTelephonyInitialized() const NO_THREAD_SAFETY_ANALYSIS { return mTelephony != nullptr; }
+
+    mutable std::mutex mLock;
+    // GUARDED_BY is used to prevent concurrent calls into these interfaces from multiple threads.
+    // There is no requirement for IModule and its helper interfaces implementations
+    // to be thread-safe.
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule
+            GUARDED_BY(mLock);
+    const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony
+            GUARDED_BY(mLock);
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth
+            GUARDED_BY(mLock);
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp
+            GUARDED_BY(mLock);
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe
+            GUARDED_BY(mLock);
+
     const std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
+    const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
 
     std::mutex mCallbacksLock;
     // Use 'mCallbacksLock' only to implement exclusive access to 'mCallbacks'. Never hold it
     // while making any calls.
     std::map<void*, Callbacks> mCallbacks GUARDED_BY(mCallbacksLock);
-    std::mutex mLock;
     std::set<audio_port_handle_t> mDeviceDisconnectionNotified GUARDED_BY(mLock);
     Hal2AidlMapper mMapper GUARDED_BY(mLock);
     LockedAccessor<Hal2AidlMapper> mMapperAccessor;
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 9fdde49..658fc18b 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -184,7 +184,7 @@
 status_t EffectHalAidl::process() {
     State state = State::INIT;
     if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
-        state != State::PROCESSING) {
+        (state != State::PROCESSING && state != State::DRAINING)) {
         ALOGI("%s skipping process because it's %s", mEffectName.c_str(),
               mConversion->isBypassing()
                       ? "bypassing"
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index edd7e78..0bfb3dd 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -4212,13 +4212,23 @@
                 trackProgressStatus(timestampUs);
             }
         }
-        if (!hasMultipleTracks || isGainmapMeta || isGainmap) {
+
+        if (flags_camera::camera_heif_gainmap() && mOwner->mHasGainmap) {
+            Mutex::Autolock lock(mOwner->mLock);
+            size_t bytesWritten;
+            off64_t offset = mOwner->addSample_l(copy, usePrefix, tiffHdrOffset, &bytesWritten);
+            addItemOffsetAndSize(offset, bytesWritten, isExif, isGainmapMeta, isGainmap);
+            copy->release();
+            copy = NULL;
+            continue;
+        }
+
+        if (!hasMultipleTracks) {
             size_t bytesWritten;
             off64_t offset = mOwner->addSample_l(
                     copy, usePrefix, tiffHdrOffset, &bytesWritten);
-
             if (mIsHeif) {
-                addItemOffsetAndSize(offset, bytesWritten, isExif, isGainmapMeta, isGainmap);
+                addItemOffsetAndSize(offset, bytesWritten, isExif);
             } else {
                 if (mCo64TableEntries->count() == 0) {
                     addChunkOffset(offset);
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index b250a03..e7fc106 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -93,6 +93,8 @@
     srcs: [
         "NdkJavaVMHelper.cpp",
         "NdkMediaCodec.cpp",
+        "NdkMediaCodecInfo.cpp",
+        "NdkMediaCodecStore.cpp",
         "NdkMediaCrypto.cpp",
         "NdkMediaDataSource.cpp",
         "NdkMediaExtractor.cpp",
@@ -131,6 +133,8 @@
         "libbase",
         "libdatasource",
         "libmedia",
+        "libmedia_codeclist",
+        "libmedia_codeclist_capabilities",
         "libmediadrm",
         "libmedia_omx",
         "libmedia_jni_utils",
diff --git a/media/ndk/NdkMediaCodecInfo.cpp b/media/ndk/NdkMediaCodecInfo.cpp
new file mode 100644
index 0000000..82ceb61
--- /dev/null
+++ b/media/ndk/NdkMediaCodecInfo.cpp
@@ -0,0 +1,520 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodecInfo"
+
+#include "NdkMediaCodecInfoPriv.h"
+
+#include <media/NdkMediaFormatPriv.h>
+
+using namespace android;
+
+extern "C" {
+
+// Utils
+
+EXPORT
+void AIntRange_delete(AIntRange *range) {
+    free(range);
+}
+
+EXPORT
+void ADoubleRange_delete(ADoubleRange *range) {
+    free(range);
+}
+
+// AMediaCodecInfo
+
+EXPORT
+const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) {
+    if (info == nullptr || info->mInfo == nullptr) {
+        return nullptr;
+    }
+
+    return info->mInfo->getCodecName();
+}
+
+EXPORT
+bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) {
+    return info->mInfo->isEncoder();
+}
+
+EXPORT
+bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) {
+    int32_t attributes = info->mInfo->getAttributes();
+    return (attributes & android::MediaCodecInfo::kFlagIsVendor);
+}
+
+EXPORT
+AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(const AMediaCodecInfo *info) {
+    if (info == nullptr || info->mInfo == nullptr) {
+        return (AMediaCodecType)0;
+    }
+
+    int32_t attributes = info->mInfo->getAttributes();
+
+    if (attributes & android::MediaCodecInfo::kFlagIsSoftwareOnly) {
+        return SOFTWARE_ONLY;
+    }
+    if (attributes & android::MediaCodecInfo::kFlagIsHardwareAccelerated) {
+        return HARDWARE_ACCELERATED;
+    }
+    return SOFTWARE_WITH_DEVICE_ACCESS;
+}
+
+EXPORT
+const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) {
+    if (info == nullptr || info->mInfo == nullptr) {
+        return nullptr;
+    }
+
+    return info->mMediaType.c_str();
+}
+
+EXPORT
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) {
+    if (info == nullptr) {
+        return -1;
+    }
+
+    return info->mCodecCaps->getMaxSupportedInstances();
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info, const char *featureName) {
+    if (featureName == nullptr) {
+        return -1;
+    }
+    return info->mCodecCaps->isFeatureSupported(std::string(featureName));
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info, const char *featureName) {
+    if (featureName == nullptr) {
+        return -1;
+    }
+    return info->mCodecCaps->isFeatureRequired(std::string(featureName));
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info, const AMediaFormat *format) {
+    if (format == nullptr) {
+        return -1;
+    }
+
+    sp<AMessage> nativeFormat;
+    AMediaFormat_getFormat(format, &nativeFormat);
+
+    return info->mCodecCaps->isFormatSupported(nativeFormat);
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
+        const ACodecAudioCapabilities **outAudioCaps) {
+    if (info == nullptr || info->mInfo == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    *outAudioCaps = info->mAAudioCaps.get();
+
+    if ((*outAudioCaps) == nullptr) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
+        const ACodecVideoCapabilities **outVideoCaps) {
+    if (info == nullptr || info->mInfo == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    *outVideoCaps = info->mAVideoCaps.get();
+
+    if ((*outVideoCaps) == nullptr) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
+        const ACodecEncoderCapabilities **outEncoderCaps) {
+    if (info == nullptr || info->mInfo == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    *outEncoderCaps = info->mAEncoderCaps.get();
+
+    if ((*outEncoderCaps) == nullptr) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    return AMEDIA_OK;
+}
+
+// ACodecAudioCapabilities
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
+        AIntRange *outRange) {
+    if (audioCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& bitrateRange = audioCaps->mAudioCaps->getBitrateRange();
+    outRange->mLower = bitrateRange.lower();
+    outRange->mUpper = bitrateRange.upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
+        const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr, size_t *outCount) {
+    if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (audioCaps->mSampleRates.empty()) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    *outArrayPtr = audioCaps->mSampleRates.data();
+    *outCount = audioCaps->mSampleRates.size();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
+        const ACodecAudioCapabilities *audioCaps, const AIntRange **outArrayPtr, size_t *outCount) {
+    if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    *outArrayPtr = audioCaps->mSampleRateRanges.data();
+    *outCount = audioCaps->mSampleRateRanges.size();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_getMaxInputChannelCount(const ACodecAudioCapabilities *audioCaps) {
+    if (audioCaps == nullptr) {
+        return -1;
+    }
+    return audioCaps->mAudioCaps->getMaxInputChannelCount();
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_getMinInputChannelCount(const ACodecAudioCapabilities *audioCaps) {
+    if (audioCaps == nullptr) {
+        return -1;
+    }
+    return audioCaps->mAudioCaps->getMinInputChannelCount();
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
+        const ACodecAudioCapabilities *audioCaps, const AIntRange **outArrayPtr, size_t *outCount) {
+    if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    *outArrayPtr = audioCaps->mInputChannelCountRanges.data();
+    *outCount = audioCaps->mInputChannelCountRanges.size();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+        int32_t sampleRate) {
+    if (audioCaps == nullptr) {
+        return -1;
+    }
+    return audioCaps->mAudioCaps->isSampleRateSupported(sampleRate);
+}
+
+// ACodecPerformancePoint
+
+EXPORT
+ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+        int32_t frameRate) {
+    return new ACodecPerformancePoint(
+            std::make_shared<VideoCapabilities::PerformancePoint>(width, height, frameRate));
+}
+
+EXPORT
+media_status_t ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) {
+    if (performancePoint == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    delete performancePoint;
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+        const AMediaFormat *format) {
+    sp<AMessage> nativeFormat;
+    AMediaFormat_getFormat(format, &nativeFormat);
+
+    return performancePoint->mPerformancePoint->covers(nativeFormat);
+}
+
+EXPORT
+bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+        const ACodecPerformancePoint *another) {
+    return one->mPerformancePoint->covers(*(another->mPerformancePoint));
+}
+
+EXPORT
+bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+        const ACodecPerformancePoint *another) {
+    return one->mPerformancePoint->equals(*(another->mPerformancePoint));
+}
+
+// ACodecVideoCapabilities
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
+        AIntRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& bitrateRange = videoCaps->mVideoCaps->getBitrateRange();
+    outRange->mLower = bitrateRange.lower();
+    outRange->mUpper = bitrateRange.upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
+        AIntRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& supportedWidths = videoCaps->mVideoCaps->getSupportedWidths();
+    outRange->mLower = supportedWidths.lower();
+    outRange->mUpper = supportedWidths.upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
+        AIntRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& supportedHeights = videoCaps->mVideoCaps->getSupportedHeights();
+    outRange->mLower = supportedHeights.lower();
+    outRange->mUpper = supportedHeights.upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_getWidthAlignment(const ACodecVideoCapabilities *videoCaps) {
+    if (videoCaps == nullptr) {
+        return -1;
+    }
+    return videoCaps->mVideoCaps->getWidthAlignment();
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_getHeightAlignment(const ACodecVideoCapabilities *videoCaps) {
+    if (videoCaps == nullptr) {
+        return -1;
+    }
+    return videoCaps->mVideoCaps->getHeightAlignment();
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
+        const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& frameRateRange = videoCaps->mVideoCaps->getSupportedFrameRates();
+    outRange->mLower = frameRateRange.lower();
+    outRange->mUpper = frameRateRange.upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t height, AIntRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    std::optional<Range<int32_t>> widthRange = videoCaps->mVideoCaps->getSupportedWidthsFor(height);
+    if (!widthRange) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    outRange->mLower = widthRange.value().lower();
+    outRange->mUpper = widthRange.value().upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t width, AIntRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    std::optional<Range<int32_t>> heightRange
+            = videoCaps->mVideoCaps->getSupportedHeightsFor(width);
+    if (!heightRange) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    outRange->mLower = heightRange.value().lower();
+    outRange->mUpper = heightRange.value().upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+        ADoubleRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    std::optional<Range<double>> frameRates
+            = videoCaps->mVideoCaps->getSupportedFrameRatesFor(width, height);
+    if (!frameRates) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    outRange->mLower = frameRates.value().lower();
+    outRange->mUpper = frameRates.value().upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+        ADoubleRange *outRange) {
+    if (videoCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    std::optional<Range<double>> frameRates
+            = videoCaps->mVideoCaps->getAchievableFrameRatesFor(width, height);
+    if (!frameRates) {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    outRange->mLower = frameRates.value().lower();
+    outRange->mUpper = frameRates.value().upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
+        const ACodecVideoCapabilities *videoCaps,
+        const ACodecPerformancePoint **outPerformancePointArray, size_t *outCount) {
+    if (videoCaps == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    *outPerformancePointArray = videoCaps->mPerformancePoints.data();
+    *outCount = videoCaps->mPerformancePoints.size();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+        int32_t width, int32_t height, double frameRate) {
+    if (videoCaps == nullptr) {
+        return -1;
+    }
+    return videoCaps->mVideoCaps->areSizeAndRateSupported(width, height, frameRate);
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+        int32_t width, int32_t height) {
+    if (videoCaps == nullptr) {
+        return -1;
+    }
+    return videoCaps->mVideoCaps->isSizeSupported(width, height);
+}
+
+// ACodecEncoderCapabilities
+
+EXPORT
+media_status_t ACodecEncoderCapabilities_getQualityRange(
+        const ACodecEncoderCapabilities *encoderCaps, AIntRange *outRange) {
+    if (encoderCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& qualityRange = encoderCaps->mEncoderCaps->getQualityRange();
+    outRange->mLower = qualityRange.lower();
+    outRange->mUpper = qualityRange.upper();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecEncoderCapabilities_getComplexityRange(
+        const ACodecEncoderCapabilities *encoderCaps, AIntRange *outRange) {
+    if (encoderCaps == nullptr || outRange == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const Range<int32_t>& complexityRange = encoderCaps->mEncoderCaps->getComplexityRange();
+    outRange->mLower = complexityRange.lower();
+    outRange->mUpper = complexityRange.upper();
+
+    return AMEDIA_OK;
+}
+
+int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
+        const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) {
+    if (encoderCaps == nullptr) {
+        return -1;
+    }
+    return encoderCaps->mEncoderCaps->isBitrateModeSupported(mode);
+}
+
+
+}
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecInfoPriv.h b/media/ndk/NdkMediaCodecInfoPriv.h
new file mode 100644
index 0000000..6d9188b
--- /dev/null
+++ b/media/ndk/NdkMediaCodecInfoPriv.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_INFO_PRIV_H
+#define _NDK_MEDIA_CODEC_INFO_PRIV_H
+
+#include <media/MediaCodecInfo.h>
+#include <media/NdkMediaCodecInfo.h>
+
+struct ACodecAudioCapabilities {
+    std::shared_ptr<android::AudioCapabilities> mAudioCaps;
+
+    std::vector<int> mSampleRates;
+    std::vector<AIntRange> mSampleRateRanges;
+    std::vector<AIntRange> mInputChannelCountRanges;
+
+    void initSampleRates() {
+        mSampleRates = mAudioCaps->getSupportedSampleRates();
+    }
+
+    void initSampleRateRanges() {
+        const std::vector<android::Range<int>>& sampleRateRanges
+                = mAudioCaps->getSupportedSampleRateRanges();
+        for (auto it = sampleRateRanges.begin(); it != sampleRateRanges.end(); it++) {
+            mSampleRateRanges.emplace_back(it->lower(), it->upper());
+        }
+    }
+
+    void initInputChannelCountRanges() {
+        const std::vector<android::Range<int>>& inputChannels
+                = mAudioCaps->getInputChannelCountRanges();
+        for (auto it = inputChannels.begin(); it != inputChannels.end(); it++) {
+            mInputChannelCountRanges.emplace_back(it->lower(), it->upper());
+        }
+    }
+
+    ACodecAudioCapabilities(std::shared_ptr<android::AudioCapabilities> audioCaps)
+            : mAudioCaps(audioCaps) {
+        initSampleRates();
+        initSampleRateRanges();
+        initInputChannelCountRanges();
+    }
+};
+
+struct ACodecPerformancePoint {
+    std::shared_ptr<const android::VideoCapabilities::PerformancePoint> mPerformancePoint;
+
+    ACodecPerformancePoint(std::shared_ptr<const android::VideoCapabilities::PerformancePoint>
+            performancePoint) : mPerformancePoint(performancePoint) {}
+};
+
+struct ACodecVideoCapabilities {
+    std::shared_ptr<android::VideoCapabilities> mVideoCaps;
+
+    std::vector<ACodecPerformancePoint> mPerformancePoints;
+
+    void initPerformancePoints() {
+        const std::vector<android::VideoCapabilities::PerformancePoint>& performancePoints
+            = mVideoCaps->getSupportedPerformancePoints();
+        for (auto it = performancePoints.begin(); it != performancePoints.end(); it++) {
+            mPerformancePoints.emplace_back(
+                    std::shared_ptr<const android::VideoCapabilities::PerformancePoint>(&(*it)));
+        }
+    }
+
+    ACodecVideoCapabilities(std::shared_ptr<android::VideoCapabilities> videoCaps)
+            : mVideoCaps(videoCaps) {
+        initPerformancePoints();
+    }
+};
+
+struct ACodecEncoderCapabilities {
+    std::shared_ptr<android::EncoderCapabilities> mEncoderCaps;
+
+    ACodecEncoderCapabilities(std::shared_ptr<android::EncoderCapabilities> encoderCaps)
+            : mEncoderCaps(encoderCaps) {}
+};
+
+struct AMediaCodecInfo {
+    std::string mName;
+    android::sp<android::MediaCodecInfo> mInfo;
+    std::string mMediaType;
+    std::shared_ptr<android::CodecCapabilities> mCodecCaps;
+
+    std::shared_ptr<const ACodecAudioCapabilities> mAAudioCaps;
+    std::shared_ptr<const ACodecVideoCapabilities> mAVideoCaps;
+    std::shared_ptr<const ACodecEncoderCapabilities> mAEncoderCaps;
+
+    AMediaCodecInfo(std::string name, android::sp<android::MediaCodecInfo> info,
+            std::shared_ptr<android::CodecCapabilities> codecCaps, std::string mediaType)
+            : mName(name), mInfo(info), mMediaType(mediaType), mCodecCaps(codecCaps) {
+        if (!mName.empty() && mInfo != nullptr && !mMediaType.empty() && mCodecCaps != nullptr) {
+            if (mCodecCaps->getAudioCapabilities() != nullptr) {
+                mAAudioCaps = std::make_shared<const ACodecAudioCapabilities>(
+                        mCodecCaps->getAudioCapabilities());
+            }
+            if (mCodecCaps->getVideoCapabilities() != nullptr) {
+                mAVideoCaps = std::make_shared<const ACodecVideoCapabilities>(
+                        mCodecCaps->getVideoCapabilities());
+            }
+            if (mCodecCaps->getEncoderCapabilities() != nullptr) {
+                mAEncoderCaps = std::make_shared<const ACodecEncoderCapabilities>(
+                    mCodecCaps->getEncoderCapabilities());
+            }
+        }
+    }
+};
+
+#endif //_NDK_MEDIA_CODEC_INFO_PRIV_H
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecStore.cpp b/media/ndk/NdkMediaCodecStore.cpp
new file mode 100644
index 0000000..d911593
--- /dev/null
+++ b/media/ndk/NdkMediaCodecStore.cpp
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodecStore"
+
+#include "NdkMediaCodecInfoPriv.h"
+
+#include <media/NdkMediaCodecStore.h>
+#include <media/NdkMediaFormatPriv.h>
+
+#include <media/IMediaCodecList.h>
+
+#include <media/MediaCodecInfo.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+
+using namespace android;
+
+static sp<IMediaCodecList> sCodecList;
+static std::vector<AMediaCodecSupportedMediaType> sMediaTypes;
+static std::vector<AMediaCodecInfo> sCodecInfos;
+
+static std::map<std::string, AMediaCodecInfo> sNameToInfoMap;
+static std::map<std::string, std::vector<AMediaCodecInfo>> sTypeToInfoList;
+
+static void initMediaTypes() {
+    if (sCodecList == nullptr) {
+        sCodecList = MediaCodecList::getInstance();
+    }
+
+    std::map<std::string, AMediaCodecSupportedMediaType> typesInfoMap;
+    std::vector<std::string> mediaTypes; // Keep the order of media types appearing in sCodecList.
+    for (size_t idx = 0; idx < sCodecList->countCodecs(); idx++) {
+        sp<MediaCodecInfo> codecInfo = sCodecList->getCodecInfo(idx);
+        if (codecInfo == nullptr) {
+            ALOGW("NULL MediaCodecInfo in MediaCodecList");
+            continue;
+        }
+        Vector<AString> codecMediaTypes;
+        codecInfo->getSupportedMediaTypes(&codecMediaTypes);
+        for (AString codecMediaType : codecMediaTypes) {
+            std::string mediaType = std::string(codecMediaType.c_str());
+
+            // Excludes special codecs from NDK
+            const std::shared_ptr<CodecCapabilities> codecCaps
+                    = codecInfo->getCodecCapsFor(mediaType.c_str());
+            if (codecCaps->isFeatureSupported(FEATURE_SpecialCodec)) {
+                continue;
+            }
+
+            auto it = typesInfoMap.find(mediaType);
+            if (it == typesInfoMap.end()) {
+                AMediaCodecSupportedMediaType supportedType = { mediaType.c_str(), 0 };
+                it = typesInfoMap.emplace(mediaType, supportedType).first;
+                mediaTypes.push_back(mediaType);
+            }
+            uint32_t &mode = it->second.mMode;
+            mode |= (codecInfo->isEncoder() ? AMediaCodecSupportedMediaType::FLAG_ENCODER
+                                            : AMediaCodecSupportedMediaType::FLAG_DECODER);
+        }
+    }
+
+    // sMediaTypes keeps the order of media types appearing in sCodecList.
+    for (std::string &type : mediaTypes) {
+        sMediaTypes.push_back(typesInfoMap.find(type)->second);
+    }
+}
+
+static void initCodecInfoMap() {
+    if (sCodecList == nullptr) {
+        sCodecList = MediaCodecList::getInstance();
+    }
+
+    for (size_t idx = 0; idx < sCodecList->countCodecs(); idx++) {
+        sp<MediaCodecInfo> codecInfo = sCodecList->getCodecInfo(idx);
+        if (codecInfo == nullptr) {
+            ALOGW("NULL MediaCodecInfo in MediaCodecList");
+            continue;
+        }
+
+        Vector<AString> codecMediaTypes;
+        codecInfo->getSupportedMediaTypes(&codecMediaTypes);
+        bool useTypeSuffix = codecMediaTypes.size() > 1;
+        for (AString codecMediaType : codecMediaTypes) {
+            std::string mediaType = std::string(codecMediaType.c_str());
+
+            // Excludes special codecs from NDK
+            const std::shared_ptr<CodecCapabilities> codecCaps
+                    = codecInfo->getCodecCapsFor(mediaType.c_str());
+            if (codecCaps->isFeatureSupported(FEATURE_SpecialCodec)) {
+                continue;
+            }
+
+            // get the type name after the slash. e.g. video/x.on2.vp8
+            size_t slashIx = mediaType.find_last_of('/');
+            if (slashIx == std::string::npos) {
+                slashIx = 0;
+            } else {
+                slashIx++;
+            }
+            std::string ndkBaseName = std::string(codecInfo->getCodecName());
+            if (useTypeSuffix) {
+                // If there are multiple supported media types,
+                // add the type to the end of the name to disambiguate names.
+                ndkBaseName += "." + mediaType.substr(slashIx);
+            }
+
+            int32_t copyIx = 0;
+            std::string ndkName;
+            // if a name is already registered,
+            // add ".1", ".2", ... at the end to disambiguate names.
+            while (true) {
+                ndkName = ndkBaseName;
+                if (copyIx > 0) {
+                    ndkName += "." + std::to_string(copyIx);
+                }
+                if (!sNameToInfoMap.contains(ndkName)) {
+                    break;
+                }
+                copyIx++;
+            }
+
+            AMediaCodecInfo info = AMediaCodecInfo(ndkName, codecInfo, codecCaps, mediaType);
+            sCodecInfos.push_back(info);
+            sNameToInfoMap.emplace(ndkName, info);
+
+            auto it = sTypeToInfoList.find(mediaType);
+            if (it == sTypeToInfoList.end()) {
+                std::vector<AMediaCodecInfo> infoList;
+                infoList.push_back(info);
+                sTypeToInfoList.emplace(mediaType, infoList);
+            } else {
+                it->second.push_back(info);
+            }
+        }
+    }
+}
+
+static bool codecHandlesFormat(const AMediaCodecInfo codecInfo,
+        sp<AMessage> format, bool isEncoder) {
+    return codecInfo.mCodecCaps->isEncoder() == isEncoder
+            && codecInfo.mCodecCaps->isFormatSupported(format);
+}
+
+static media_status_t findNextCodecForFormat(
+        const AMediaFormat *format, bool isEncoder, const AMediaCodecInfo **outCodecInfo) {
+    if (outCodecInfo == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (sCodecInfos.empty()) {
+        initCodecInfoMap();
+    }
+
+    std::unique_ptr<std::vector<AMediaCodecInfo>> infos;
+    sp<AMessage> nativeFormat;
+    if (format == nullptr) {
+        infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&sCodecInfos);
+    } else {
+        AMediaFormat_getFormat(format, &nativeFormat);
+        AString mime;
+        if (!nativeFormat->findString(KEY_MIME, &mime)) {
+            return AMEDIA_ERROR_INVALID_PARAMETER;
+        }
+
+        std::string mediaType = std::string(mime.c_str());
+        auto it = sTypeToInfoList.find(mediaType);
+        if (it == sTypeToInfoList.end()) {
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+        infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&(it->second));
+    }
+
+    bool found = *outCodecInfo == nullptr;
+    for (const AMediaCodecInfo &info : *infos) {
+        if (found && (format == nullptr
+                || codecHandlesFormat(info, nativeFormat, isEncoder))) {
+            *outCodecInfo = &info;
+            return AMEDIA_OK;
+        }
+        if (*outCodecInfo == &info) {
+            found = true;
+        }
+
+    }
+    *outCodecInfo = nullptr;
+    return AMEDIA_ERROR_UNSUPPORTED;
+}
+
+extern "C" {
+
+EXPORT
+media_status_t AMediaCodecStore_getSupportedMediaTypes(
+        const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) {
+    if (outMediaTypes == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (sMediaTypes.empty()) {
+        initMediaTypes();
+    }
+
+    *outCount = sMediaTypes.size();
+    *outMediaTypes = sMediaTypes.data();
+
+    return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecStore_findNextDecoderForFormat(
+        const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo){
+    return findNextCodecForFormat(format, false, outCodecInfo);
+}
+
+EXPORT
+media_status_t AMediaCodecStore_findNextEncoderForFormat(
+        const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo){
+    return findNextCodecForFormat(format, true, outCodecInfo);
+}
+
+EXPORT
+media_status_t AMediaCodecStore_getCodecInfo(
+        const char *name, const AMediaCodecInfo **outCodecInfo) {
+    if (outCodecInfo == nullptr || name == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    auto it = sNameToInfoMap.find(std::string(name));
+    if (it == sNameToInfoMap.end()) {
+        *outCodecInfo = nullptr;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    } else {
+        *outCodecInfo = &(it->second);
+        return AMEDIA_OK;
+    }
+}
+
+}
\ No newline at end of file
diff --git a/media/ndk/include/media/NdkMediaCodecInfo.h b/media/ndk/include/media/NdkMediaCodecInfo.h
new file mode 100644
index 0000000..558e82c
--- /dev/null
+++ b/media/ndk/include/media/NdkMediaCodecInfo.h
@@ -0,0 +1,625 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaCodecInfo.h
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_INFO_H
+#define _NDK_MEDIA_CODEC_INFO_H
+
+#include "NdkMediaError.h"
+#include "NdkMediaFormat.h"
+
+__BEGIN_DECLS
+
+struct ACodecAudioCapabilities;
+typedef struct ACodecAudioCapabilities ACodecAudioCapabilities;
+struct ACodecPerformancePoint;
+typedef struct ACodecPerformancePoint ACodecPerformancePoint;
+struct ACodecVideoCapabilities;
+typedef struct ACodecVideoCapabilities ACodecVideoCapabilities;
+struct ACodecEncoderCapabilities;
+typedef struct ACodecEncoderCapabilities ACodecEncoderCapabilities;
+struct AMediaCodecInfo;
+typedef struct AMediaCodecInfo AMediaCodecInfo;
+
+/**
+ * A uitlity structure describing the range of two integer values.
+ */
+typedef struct AIntRange {
+    int32_t mLower;
+    int32_t mUpper;
+} AIntRange;
+
+/**
+ * A uitlity structure describing the range of two double values.
+ */
+typedef struct ADoubleRange {
+    double mLower;
+    double mUpper;
+} ADoubleRange;
+
+// AMediaCodecInfo
+
+/**
+ * Get the canonical name of a codec.
+ *
+ * \return      The char pointer to the canonical name.
+ *              It is owned by the framework. No lifetime management needed for users.
+ *
+ * Return NULL if @param info is invalid.
+ */
+const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query if the codec is an encoder.
+ */
+bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query if the codec is provided by the Android platform (false) or the device manufacturer (true).
+ */
+bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * The type of codecs.
+ */
+typedef enum AMediaCodecType : int32_t {
+    /**
+     * Not a codec type. Used for indicating an invalid operation occurred.
+     */
+    INVALID_CODEC_INFO = 0,
+
+    /**
+     * Software codec.
+     *
+     * Software-only codecs are more secure as they run in a tighter security sandbox.
+     * On the other hand, software-only codecs do not provide any performance guarantees.
+     */
+    SOFTWARE_ONLY = 1,
+
+    /**
+     * Hardware accelerated codec.
+     *
+     * Hardware codecs generally have higher performance or lower power consumption than
+     * software codecs, but since they are specific to each device,
+     * the actual performance details can vary.
+     */
+    HARDWARE_ACCELERATED = 2,
+
+    /**
+     * Software codec but have device access.
+     * Mainly referring to software codecs provided by vendors.
+     */
+    SOFTWARE_WITH_DEVICE_ACCESS = 3,
+} AMediaCodecType;
+
+/**
+ * Query if the codec is SOFTWARE_ONLY, HARDWARE_ACCELERATED or SOFTWARE_WITH_DEVICE_ACCESS.
+ *
+ * Return INVALID_CODEC_INFO if @param info is invalid.
+ */
+AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(
+        const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported media type of the codec.
+ *
+ * \return  The char pointer to the media type.
+ *          It is owned by the framework with infinite lifetime.
+ *
+ * Return NULL if @param info is invalid.
+ */
+const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Returns the max number of the supported concurrent codec instances.
+ *
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ *
+ * Return -1 if @param info is invalid.
+ */
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query codec feature capabilities.
+ *
+ * These features are supported to be used by the codec.  These
+ * include optional features that can be turned on, as well as
+ * features that are always on.
+ *
+ * Return 1 if the feature is supported;
+ * Return 0 if the feature is unsupported;
+ * Return -1 if @param featureName is invalid.
+ */
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info,
+        const char *featureName) __INTRODUCED_IN(36);
+
+/**
+ * Query codec feature requirements.
+ *
+ * These features are required to be used by the codec, and as such,
+ * they are always turned on.
+ *
+ * Return 1 if the feature is required;
+ * Return 0 if the feature is not required;
+ * Return -1 if @param featureName is invalid.
+ */
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info,
+        const char *featureName) __INTRODUCED_IN(36);
+
+/**
+ * Query whether codec supports a given @param format.
+ *
+ * Return 1 if the format is supported;
+ * Return 0 if the format is unsupported;
+ * Return -1 if @param format is invalid.
+ */
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info,
+        const AMediaFormat *format) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecAudioCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outAudioCaps        The pointer to the output ACodecAudioCapabilities.
+ *                            It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecAudioCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an audio codec.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
+        const ACodecAudioCapabilities **outAudioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecVideoCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outVideoCaps        The pointer to the output ACodecVideoCapabilities.
+ *                            It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecVideoCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not a video codec.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
+        const ACodecVideoCapabilities **outVideoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecEncoderCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outEncoderCaps        The pointer to the output ACodecEncoderCapabilities.
+ *                              It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecEncoderCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an encoder.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
+        const ACodecEncoderCapabilities **outEncoderCaps) __INTRODUCED_IN(36);
+
+// ACodecAudioCapabilities
+
+/**
+ * Get the range of supported bitrates in bits/second.
+ *
+ * @param outRange  The pointer to the range of supported bitrates.
+ *                  Users are responsible for allocating a valid AIntRange structure and
+ *                  managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got bitrates successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps and @param outRange is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the array of supported sample rates
+ *
+ * The array is sorted in ascending order.
+ *
+ * @param outArrayPtr   The pointer to the output sample rates array.
+ *                      The array is owned by the framework and has an infinite lifetime.
+ * @param outCount      The size of the output array.
+ *
+ * Return AMEDIA_OK if the codec supports only discrete values.
+ * Otherwise, it returns AMEDIA_ERROR_UNSUPPORTED.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
+        const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr,
+        size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Get the array of supported sample rate ranges.
+ *
+ * The array is sorted in ascending order, and the ranges are distinct (non-intersecting).
+ *
+ * @param outArrayPtr   The pointer to the out sample rate ranges array.
+ *                      The array is owned by the framework and has an infinite lifetime.
+ * @param outCount      The size of the out array.
+ *
+ * Return AMEDIA_OK if got the sample rate ranges successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
+        const ACodecAudioCapabilities *audioCaps,
+        const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Return the maximum number of input channels supported.
+ *
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_getMaxInputChannelCount(
+        const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Returns the minimum number of input channels supported.
+ * This is often 1, but does vary for certain mime types.
+ *
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_getMinInputChannelCount(
+        const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get an array of ranges representing the number of input channels supported.
+ * The codec supports any number of input channels within this range.
+ * For many codecs, this will be a single range [1..N], for some N.
+ *
+ * The array is sorted in ascending order, and the ranges are distinct (non-intersecting).
+ *
+ * @param outArrayPtr   The pointer to the output array of input-channels ranges.
+ *                      The array is owned by the framework and has an infinite lifetime.
+ * @param outCount      The size of the output array.
+ *
+ * Return AMEDIA_OK if got the input channel array successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param audioCaps is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
+        const ACodecAudioCapabilities *audioCaps,
+        const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Query whether the sample rate is supported by the codec.
+ *
+ * Return 1 if the sample rate is supported.
+ * Return 0 if the sample rate is unsupported
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+        int32_t sampleRate) __INTRODUCED_IN(36);
+
+// ACodecPerformancePoint
+
+/**
+ * Create a performance point for a given frame size and frame rate.
+ *
+ * Performance points are defined by number of pixels, pixel rate and frame rate.
+ *
+ * Users are responsible for calling
+ * ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) after use.
+ *
+ * @param width width of the frame in pixels
+ * @param height height of the frame in pixels
+ * @param frameRate frame rate in frames per second
+ */
+ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+        int32_t frameRate) __INTRODUCED_IN(36);
+
+/**
+ * Delete a created performance point.
+ *
+ * Return AMEDIA_OK if it is successfully deleted.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param performancePoint is invalid.
+ */
+media_status_t ACodecPerformancePoint_delete(
+        ACodecPerformancePoint *performancePoint) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether the performance point covers a media format.
+ *
+ * @param format Stream format considered.
+ * Return true if the performance point covers the format.
+ */
+bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+        const AMediaFormat *format) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether a performance point covers another performance point.
+ *
+ * Use this method to determine if a performance point advertised by a codec covers the
+ * performance point required. This method can also be used for loose ordering as this
+ * method is transitive.
+ *
+ * A Performance point represents an upper bound. This means that
+ * it covers all performance points with fewer pixels, pixel rate and frame rate.
+ *
+ * Return true if @param one covers @param another.
+ */
+bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+        const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether two performance points are equal.
+ */
+bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+        const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+
+// ACodecVideoCapabilities
+
+/**
+ * Get the range of supported bitrates in bits/second.
+ *
+ * @param outRange  The pointer to the range of output bitrates.
+ *                  Users are responsible for allocating a valid AIntRange structure and
+ *                  managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported bitrates successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video widths.
+ *
+ * @param outRange  The pointer to the range of output supported widths.
+ *                  Users are responsible for allocating a valid AIntRange structure and
+ *                  managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video widths successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video heights.
+ *
+ * @param outRange  The pointer to the range of output supported heights.
+ *                  Users are responsible for allocating a valid AIntRange structure and
+ *                  managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video heights successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Return the alignment requirement for video width (in pixels).
+ *
+ * This is a power-of-2 value that video width must be a multiple of.
+ *
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_getWidthAlignment(
+        const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Return the alignment requirement for video height (in pixels).
+ *
+ * This is a power-of-2 value that video height must be a multiple of.
+ *
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_getHeightAlignment(
+        const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported frame rates.
+ *
+ * This is not a performance indicator. Rather, it expresses the limits specified in the coding
+ * standard, based on the complexities of encoding material for later playback at a certain
+ * frame rate, or the decoding of such material in non-realtime.
+ *
+ * @param outRange  The pointer to the range of output supported frame rates.
+ *                  Users are responsible for allocating a valid AIntRange structure and
+ *                  managing the lifetime of it.
+ *
+ * \return AMEDIA_OK if got the frame rate range successfully.
+ * \return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
+        const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video widths for a video height.
+ *
+ * @param outRange      The pointer to the range of supported widths.
+ *                      Users are responsible for allocating a valid AIntRange structure and
+ *                      managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video width range successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the height query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t height,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video heights for a video width.
+ *
+ * @param outRange      The pointer to the range of supported heights.
+ *                      Users are responsible for allocating a valid AIntRange structure and
+ *                      managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video height range successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the width query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t width,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video frame rates for a video size.
+ *
+ * This is not a performance indicator.  Rather, it expresses the limits specified in the coding
+ * standard, based on the complexities of encoding material of a given size for later playback at
+ * a certain frame rate, or the decoding of such material in non-realtime.
+ *
+ * @param outRange      The pointer to the range of frame rates.
+ *                      Users are responsible for allocating a valid ADoubleRange structure and
+ *                      managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video frame rates successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the size query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+        ADoubleRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of achievable video frame rates for a video size.
+ *
+ * This is based on manufacturer's performance measurements for this device and codec.
+ * The measurements may not be available for all codecs or devices.
+ *
+ * @param outRange      The pointer to the range of frame rates.
+  *                     Users are responsible for allocating a valid ADoubleRange structure and
+ *                      managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the achievable video frame rates successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec did not publish any measurement data.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
+        const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+        ADoubleRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported performance points.
+ *
+ * @param outPerformancePointArray      The pointer to the output performance points array.
+ *                                      The array is owned by the framework and has an infinite
+ *                                      lifetime.
+ * @param outCount                      The size of the output array.
+ *
+ * Return AMEDIA_OK if successfully got the performance points.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param videoCaps is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
+        const ACodecVideoCapabilities *videoCaps,
+        const ACodecPerformancePoint **outPerformancePointArray,
+        size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Return whether a given video size and frameRate combination is supported.
+ *
+ * Return 1 if the size and rate are supported.
+ * Return 0 if they are not supported.
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+        int32_t width, int32_t height, double frameRate) __INTRODUCED_IN(36);
+
+/**
+ * Return whether a given video size is supported.
+ *
+ * Return 1 if the size is supported.
+ * Return 0 if the size is not supported.
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+        int32_t width, int32_t height) __INTRODUCED_IN(36);
+
+// ACodecEncoderCapabilities
+
+/**
+ * Get the supported range of quality values.
+ *
+ * Quality is implementation-specific. As a general rule, a higher quality
+ * setting results in a better image quality and a lower compression ratio.
+ *
+ * @param outRange      The pointer to the range of quality values.
+ *                      Users are responsible for allocating a valid AIntRange structure and
+ *                      managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if successfully got the quality range.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecEncoderCapabilities_getQualityRange(
+        const ACodecEncoderCapabilities *encoderCaps,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported range of encoder complexity values.
+ *
+ * Some codecs may support multiple complexity levels, where higher complexity values use more
+ * encoder tools (e.g. perform more intensive calculations) to improve the quality or the
+ * compression ratio. Use a lower value to save power and/or time.
+ *
+ * @param outRange      The pointer to the range of encoder complexity values.
+ *                      Users are responsible for allocating a valid AIntRange structure and
+ *                      managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if successfully got the complexity range.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecEncoderCapabilities_getComplexityRange(
+        const ACodecEncoderCapabilities *encoderCaps,
+        AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Encoder bitrate modes.
+ */
+typedef enum ABiterateMode : int32_t {
+    BITRATE_MODE_CQ = 0,
+    BITRATE_MODE_VBR = 1,
+    BITRATE_MODE_CBR = 2,
+    BITRATE_MODE_CBR_FD = 3
+} ABiterateMode;
+
+/**
+ * Query whether a bitrate mode is supported.
+ *
+ * Return 1 if the bitrate mode is supported.
+ * Return 0 if the bitrate mode is unsupported.
+ * Return -1 if @param encoderCaps is invalid.
+ */
+int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
+        const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_INFO_H
+
+/** @} */
\ No newline at end of file
diff --git a/media/ndk/include/media/NdkMediaCodecStore.h b/media/ndk/include/media/NdkMediaCodecStore.h
new file mode 100644
index 0000000..aab8689
--- /dev/null
+++ b/media/ndk/include/media/NdkMediaCodecStore.h
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaCodecStore.h
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_STORE_H
+#define _NDK_MEDIA_CODEC_STORE_H
+
+#include <stdint.h>
+
+#include "NdkMediaCodecInfo.h"
+#include "NdkMediaError.h"
+#include "NdkMediaFormat.h"
+
+__BEGIN_DECLS
+
+/**
+ * The media type definition with bitfeids indicating if it is
+ * supported by decoders/ encoders/ both.
+ */
+typedef struct AMediaCodecSupportedMediaType {
+    enum Mode : uint32_t {
+        FLAG_DECODER = 1 << 0,
+        FLAG_ENCODER = 1 << 1,
+    };
+
+    // The media type.
+    const char *mMediaType;
+    // bitfields for modes.
+    uint32_t mMode;
+} AMediaCodecSupportedMediaType;
+
+/**
+ * Get an array of all the supported media types of a device.
+ *
+ * @param outMediaTypes The pointer to the output AMediaCodecSupportedMediaType array.
+ *                      It is owned by the fraework and has an infinite lifetime.
+ *
+ * @param outCount size of the out array.
+ *
+ * Return AMEDIA_OK if successfully made the copy.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if the @param outMediaTypes is invalid.
+ */
+media_status_t AMediaCodecStore_getSupportedMediaTypes(
+        const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Get the next decoder info that supports the format.
+ *
+ * @param outCodecInfo  should be set as NULL to start the iteration.
+ *                      Keep the last codecInfo you got from a previous call to get the next one.
+ *                      *outCodecInfo will be set to NULL if reached the end.
+ *                      It is owned by the framework and has an infinite lifetime.
+ *
+ * @param format        If set as NULL, this API will iterate through all available decoders.
+ *                      If NOT NULL, it MUST contain key "mime" implying the media type.
+ *
+ * Return AMEDIA_OK if successfully got the info.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param format is invalid.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no more decoder supporting the format.
+ *
+ * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
+ * and a different @param format during an iteration.
+ */
+media_status_t AMediaCodecStore_findNextDecoderForFormat(
+        const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+/**
+ * Get the next encoder info that supports the format.
+ *
+ * @param outCodecInfo  should be set as NULL to start the iteration.
+ *                      Keep the last codecInfo you got from a previous call to get the next one.
+ *                      *outCodecInfo will be set to NULL if reached the end.
+ *                      It is owned by the framework and has an infinite lifetime.
+ *
+ * @param format        If set as NULL, this API will iterate through all available encoders.
+ *                      If NOT NULL, it MUST contain key "mime" implying the media type.
+ *
+ * Return AMEDIA_OK if successfully got the info.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo is invalid.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no more encoder supporting the format.
+ *
+ * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
+ * and a different @param format during an iteration.
+ *
+ * No secure encoder will show in the output.
+ */
+media_status_t AMediaCodecStore_findNextEncoderForFormat(
+        const AMediaFormat* format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+/**
+ * Get the codecInfo corresponding to a given codec name.
+ *
+ * @param name          Media codec name.
+ *                      Users can get valid codec names from the AMediaCodecInfo structures
+ *                      returned from findNextDecoder|EncoderForFormat methods.
+ *                      Note that this name may not correspond to the name the same codec used
+ *                      by the SDK API, but will always do for codec names starting with "c2.".
+ *
+ * @param outCodecInfo  Output parameter for the corresponding AMeidaCodecInfo structure.
+ *                      It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if got the codecInfo successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no corresponding codec found.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param name is invalid.
+ */
+media_status_t AMediaCodecStore_getCodecInfo(
+        const char *name, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_STORE_H
+
+/** @} */
\ No newline at end of file
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 8fb203f..939f151 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -1,5 +1,33 @@
 LIBMEDIANDK {
   global:
+    ACodecAudioCapabilities_getBitrateRange; # introduced=36
+    ACodecAudioCapabilities_getInputChannelCountRanges; # introduced=36
+    ACodecAudioCapabilities_getMaxInputChannelCount; # introduced=36
+    ACodecAudioCapabilities_getMinInputChannelCount; # introduced=36
+    ACodecAudioCapabilities_getSupportedSampleRates; # introduced=36
+    ACodecAudioCapabilities_getSupportedSampleRateRanges; # introduced=36
+    ACodecAudioCapabilities_isSampleRateSupported; # introduced=36
+    ACodecEncoderCapabilities_getComplexityRange; # introduced=36
+    ACodecEncoderCapabilities_getQualityRange; # introduced=36
+    ACodecEncoderCapabilities_isBitrateModeSupported; # introduced=36
+    ACodecPerformancePoint_create; # introduced=36
+    ACodecPerformancePoint_covers; # introduced=36
+    ACodecPerformancePoint_coversFormat; # introduced=36
+    ACodecPerformancePoint_delete; # introduced=36
+    ACodecPerformancePoint_equals; # introduced=36
+    ACodecVideoCapabilities_areSizeAndRateSupported; # introduced=36
+    ACodecVideoCapabilities_getAchievableFrameRatesFor; # introduced=36
+    ACodecVideoCapabilities_getBitrateRange; # introduced=36
+    ACodecVideoCapabilities_getHeightAlignment; # introduced=36
+    ACodecVideoCapabilities_getSupportedFrameRates; # introduced=36
+    ACodecVideoCapabilities_getSupportedFrameRatesFor; # introduced=36
+    ACodecVideoCapabilities_getSupportedHeights; # introduced=36
+    ACodecVideoCapabilities_getSupportedHeightsFor; # introduced=36
+    ACodecVideoCapabilities_getSupportedPerformancePoints; # introduced=36
+    ACodecVideoCapabilities_getSupportedWidths; # introduced=36
+    ACodecVideoCapabilities_getSupportedWidthsFor; # introduced=36
+    ACodecVideoCapabilities_getWidthAlignment; # introduced=36
+    ACodecVideoCapabilities_isSizeSupported; # introduced=36
     AImageReader_acquireLatestImage; # introduced=24
     AImageReader_acquireLatestImageAsync; # introduced=26
     AImageReader_acquireNextImage; # introduced=24
@@ -217,6 +245,22 @@
     AMediaCodec_createPersistentInputSurface; # introduced=26
     AMediaCodec_start;
     AMediaCodec_stop;
+    AMediaCodecInfo_getAudioCapabilities; # introduced=36
+    AMediaCodecInfo_getEncoderCapabilities; # introduced=36
+    AMediaCodecInfo_getVideoCapabilities; # introduced=36
+    AMediaCodecInfo_getCanonicalName; # introduced=36
+    AMediaCodecInfo_getMaxSupportedInstances; # introduced=36
+    AMediaCodecInfo_getMediaCodecInfoType; # introduced=36
+    AMediaCodecInfo_getMediaType; # introduced=36
+    AMediaCodecInfo_isEncoder; # introduced=36
+    AMediaCodecInfo_isFeatureRequired; # introduced=36
+    AMediaCodecInfo_isFeatureSupported; # introduced=36
+    AMediaCodecInfo_isFormatSupported; # introduced=36
+    AMediaCodecInfo_isVendor; # introduced=36
+    AMediaCodecStore_getCodecInfo; # introduced=36
+    AMediaCodecStore_getSupportedMediaTypes; # introduced=36
+    AMediaCodecStore_findNextDecoderForFormat; # introduced=36
+    AMediaCodecStore_findNextEncoderForFormat; # introduced=36
     AMediaCrypto_delete;
     AMediaCrypto_isCryptoSchemeSupported;
     AMediaCrypto_new;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 8ceff96..ce8e00e 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -544,7 +544,7 @@
  * @param caller string identifying the caller for logging.
  * @return true if the MAC addresses must be anonymized, false otherwise.
  */
-bool mustAnonymizeBluetoothAddress(
+bool mustAnonymizeBluetoothAddressLegacy(
         const AttributionSourceState& attributionSource, const String16&) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     bool res;
diff --git a/media/utils/include/mediautils/ServiceSingleton.h b/media/utils/include/mediautils/ServiceSingleton.h
index fe8e9f2..177c3ba 100644
--- a/media/utils/include/mediautils/ServiceSingleton.h
+++ b/media/utils/include/mediautils/ServiceSingleton.h
@@ -57,12 +57,20 @@
  */
 namespace android::mediautils {
 
-enum ServiceOptions {
+enum class ServiceOptions {
     kNone = 0,
     kNonNull = (1 << 0),  // don't return a null interface unless disabled.
                           // partially implemented and experimental.
 };
 
+enum class SkipMode {
+    kNone = 0,       // do not skip the cache (normal behavior for caching services).
+    kImmediate = 1,  // do not cache or find the service, return null to the caller immediately,
+                     // which is the normal behavior for skipping the service cache.
+    kWait = 2,       // do not cache or find the service, but block the caller;
+                     // this is used for cases where a local service override is desired.
+};
+
 // Traits may come through a constexpr static function collection.
 // This participates in small buffer optimization SBO in std::function impl.
 template <typename Service>
@@ -135,7 +143,7 @@
         std::swap(oldTraits, mTraits);
         const bool existing = oldTraits != nullptr;
         mTraits = std::move(traits);
-        mSkip = false;
+        mSkipMode = SkipMode::kNone;
         return existing;
     }
 
@@ -154,7 +162,8 @@
         audio_utils::unique_lock ul(mMutex);
         auto& service = std::get<BaseInterfaceType<Service>>(mService);
 
-        if (mSkip || (service && mValid)) return service;  // early check.
+        // early check.
+        if (mSkipMode == SkipMode::kImmediate || (service && mValid)) return service;
 
         // clamp to avoid numeric overflow.  INT64_MAX / 2 is effectively forever for a device.
         std::chrono::nanoseconds kWaitLimitNs(
@@ -164,39 +173,44 @@
 
         for (bool first = true; true; first = false) {
             // we may have released mMutex, so see if service has been obtained.
-            if (mSkip || (service && mValid))  return service;
+            if (mSkipMode == SkipMode::kImmediate || (service && mValid))  return service;
 
-            const auto traits = getTraits_l<Service>();
+            int options = 0;
+            if (mSkipMode == SkipMode::kNone) {
+                const auto traits = getTraits_l<Service>();
 
-            // first time or not using callback, check the service.
-            if (first || !useCallback) {
-                auto service_new = checkServicePassThrough<Service>(
-                        traits->getServiceName());
-                if (service_new) {
-                    mValid = true;
-                    service = std::move(service_new);
-                    // service is a reference, so we copy to service_fixed as
-                    // we're releasing the mutex.
-                    const auto service_fixed = service;
-                    ul.unlock();
-                    traits->onNewService(interfaceFromBase<Service>(service_fixed));
-                    ul.lock();
-                    setDeathNotifier_l<Service>(service_fixed);
-                    ul.unlock();
-                    mCv.notify_all();
-                    return service_fixed;
+                // first time or not using callback, check the service.
+                if (first || !useCallback) {
+                    auto service_new = checkServicePassThrough<Service>(
+                            traits->getServiceName());
+                    if (service_new) {
+                        mValid = true;
+                        service = std::move(service_new);
+                        // service is a reference, so we copy to service_fixed as
+                        // we're releasing the mutex.
+                        const auto service_fixed = service;
+                        ul.unlock();
+                        traits->onNewService(interfaceFromBase<Service>(service_fixed));
+                        ul.lock();
+                        setDeathNotifier_l<Service>(service_fixed);
+                        ul.unlock();
+                        mCv.notify_all();
+                        return service_fixed;
+                    }
                 }
-            }
-
-            // install service callback if needed.
-            if (useCallback && !mServiceNotificationHandle) {
-                setServiceNotifier_l<Service>();
+                // install service callback if needed.
+                if (useCallback && !mServiceNotificationHandle) {
+                    setServiceNotifier_l<Service>();
+                }
+                options = static_cast<int>(traits->options());
             }
 
             // check time expiration.
             const auto now = std::chrono::steady_clock::now();
-            if (now >= end
-                && (service || !(traits->options() & ServiceOptions::kNonNull))) {
+            if (now >= end &&
+                    (service
+                    || mSkipMode != SkipMode::kNone  // skip is set.
+                    || !(options & static_cast<int>(ServiceOptions::kNonNull)))) { // null allowed
                 return service;
             }
 
@@ -241,11 +255,16 @@
      *
      * All notifiers removed.
      * Service pointer is released.
+     *
+     * If skipMode is kNone,      then cache management is immediately reenabled.
+     * If skipMode is kImmediate, then any new waiters will return null immediately.
+     * If skipMode is kWait,      then any new waiters will be blocked until an update occurs
+     *                            or the timeout expires.
      */
     template<typename Service>
-    void skip() {
+    void skip(SkipMode skipMode) {
         audio_utils::unique_lock ul(mMutex);
-        mSkip = true;
+        mSkipMode = skipMode;
         // remove notifiers.  OK to hold lock as presuming notifications one-way
         // or manually triggered outside of lock.
         mDeathNotificationHandle.reset();
@@ -274,7 +293,8 @@
         mDeathNotificationHandle.reset();
         const auto traits = getTraits_l<Service>();
         mValid = false;
-        if (!(traits->options() & ServiceOptions::kNonNull) || mSkip) {
+        if (!(static_cast<int>(traits->options()) & static_cast<int>(ServiceOptions::kNonNull))
+                || mSkipMode != SkipMode::kNone) {
             auto &service = std::get<BaseInterfaceType<Service>>(mService);
             service = nullptr;
         }
@@ -299,9 +319,15 @@
                     audio_utils::unique_lock ul(mMutex);
                     auto originalService = std::get<BaseInterfaceType<Service>>(mService);
                     if (originalService != service) {
+                        if (originalService != nullptr) {
+                            invalidateService_l<Service>();
+                        }
                         mService = service;
                         mValid = true;
                         ul.unlock();
+                        if (originalService != nullptr) {
+                            traits->onServiceDied(interfaceFromBase<Service>(originalService));
+                        }
                         traits->onNewService(service);
                         ul.lock();
                         setDeathNotifier_l<Service>(service);
@@ -374,8 +400,10 @@
     // mValid is true iff the service is non-null and alive.
     bool mValid GUARDED_BY(mMutex) = false;
 
-    // mSkip indicates that the service is not cached.
-    bool mSkip GUARDED_BY(mMutex) = false;
+    // mSkipMode indicates the service cache state:
+    //
+    // one may either wait (blocked) until the service is reinitialized.
+    SkipMode mSkipMode GUARDED_BY(mMutex) = SkipMode::kNone;
 };
 
 } // details
@@ -468,9 +496,9 @@
  * another initService() can be called seamlessly.
  */
 template<typename Service>
-void skipService() {
+void skipService(SkipMode skipMode = SkipMode::kImmediate) {
     const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
-    serviceHandler->template skip<Service>();
+    serviceHandler->template skip<Service>(skipMode);
 }
 
 } // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 42789d5..2960232 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -116,7 +116,7 @@
 bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
 bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
 void purgePermissionCache();
-bool mustAnonymizeBluetoothAddress(
+bool mustAnonymizeBluetoothAddressLegacy(
         const AttributionSourceState& attributionSource, const String16& caller);
 void anonymizeBluetoothAddress(char *address);
 
diff --git a/media/utils/tests/service_singleton_tests.cpp b/media/utils/tests/service_singleton_tests.cpp
index 18d7f3d..3e389a4 100644
--- a/media/utils/tests/service_singleton_tests.cpp
+++ b/media/utils/tests/service_singleton_tests.cpp
@@ -299,6 +299,42 @@
     EXPECT_EQ(4, sServiceDied);
     EXPECT_EQ(4, listenerServiceCreated);  // our listener picks it up.
 
+    {
+        // in default mode (kNull) a null is returned when the service is skipped and
+        // wait time is ignored.
+
+        const auto ref1 = std::chrono::steady_clock::now();
+        auto service = mediautils::getService<IServiceSingletonTest>(std::chrono::seconds(2));
+        EXPECT_FALSE(service);
+        const auto ref2 = std::chrono::steady_clock::now();
+        EXPECT_LT(ref2 - ref1, std::chrono::seconds(1));
+
+        auto service2 = mediautils::getService<aidl::IServiceSingletonTest>(
+                std::chrono::seconds(2));
+        EXPECT_FALSE(service2);
+        const auto ref3 = std::chrono::steady_clock::now();
+        EXPECT_LT(ref3 - ref2, std::chrono::seconds(1));
+    }
+
+    // Cancel the singleton cache but use wait mode.
+    mediautils::skipService<IServiceSingletonTest>(mediautils::SkipMode::kWait);
+    mediautils::skipService<aidl::IServiceSingletonTest>(mediautils::SkipMode::kWait);
+
+    {
+        // in wait mode, the timeouts are respected
+        const auto ref1 = std::chrono::steady_clock::now();
+        auto service = mediautils::getService<IServiceSingletonTest>(std::chrono::seconds(1));
+        EXPECT_FALSE(service);
+        const auto ref2 = std::chrono::steady_clock::now();
+        EXPECT_GT(ref2 - ref1, std::chrono::seconds(1));
+
+        auto service2 = mediautils::getService<aidl::IServiceSingletonTest>(
+                std::chrono::seconds(1));
+        EXPECT_FALSE(service2);
+        const auto ref3 = std::chrono::steady_clock::now();
+        EXPECT_GT(ref3 - ref2, std::chrono::seconds(1));
+    }
+
     // remove service
     remoteWorker->putc('b');
     EXPECT_EQ('b', remoteWorker->getc());
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 8e533ce..1843030 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8040,7 +8040,8 @@
 // caller must hold mutex()
 void DuplicatingThread::updateWaitTime_l()
 {
-    mWaitTimeMs = UINT_MAX;
+    // Initialize mWaitTimeMs according to the mixer buffer size.
+    mWaitTimeMs = mNormalFrameCount * 2 * 1000 / mSampleRate;
     for (size_t i = 0; i < mOutputTracks.size(); i++) {
         const auto strong = mOutputTracks[i]->thread().promote();
         if (strong != 0) {
@@ -8649,6 +8650,10 @@
 
             timestampCorrectionEnabled = isTimestampCorrectionEnabled_l();
             lockEffectChains_l(effectChains);
+            // We're exiting locked scope with non empty activeTracks, make sure
+            // that we're not in standby mode which we could have entered if some
+            // tracks were muted/unmuted.
+            mStandby = false;
         }
 
         // thread mutex is now unlocked, mActiveTracks unknown, activeTracks.size() > 0
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index d499222..0c03900 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -22,31 +22,77 @@
 
 #include <media/AudioContainers.h>
 
+#include <string.h>
+
 namespace android {
 
 using StreamTypeVector = std::vector<audio_stream_type_t>;
 
+#define AUDIO_ENUM_QUOTE(x) #x
+#define AUDIO_ENUM_STRINGIFY(x) AUDIO_ENUM_QUOTE(x)
+#define AUDIO_DEFINE_ENUM_SYMBOL_V(symbol, value) symbol = value,
+#define AUDIO_DEFINE_STRINGIFY_CASE_V(symbol, _) case symbol: return AUDIO_ENUM_STRINGIFY(symbol);
+#define AUDIO_DEFINE_PARSE_CASE_V(symbol, _) \
+    if (strcmp(s, AUDIO_ENUM_STRINGIFY(symbol)) == 0) { *t = symbol; return true; } else
+#define AUDIO_DEFINE_MAP_ENTRY_V(symbol, _) { AUDIO_ENUM_STRINGIFY(symbol), symbol },
+
 /**
  * Legacy audio policy product strategies IDs. These strategies are supported by the default
  * policy engine.
  * IMPORTANT NOTE: the order of this enum is important as it determines the priority
- * between active strategies for routing decisions: lower enum value => higher prioriy
+ * between active strategies for routing decisions: lower enum value => higher priority
  */
+#define AUDIO_LEGACY_STRATEGY_LIST_DEF(V)      \
+    V(STRATEGY_NONE, -1)                       \
+    V(STRATEGY_PHONE, 0)                       \
+    V(STRATEGY_SONIFICATION, 1)                \
+    V(STRATEGY_ENFORCED_AUDIBLE, 2)            \
+    V(STRATEGY_ACCESSIBILITY, 3)               \
+    V(STRATEGY_SONIFICATION_RESPECTFUL, 4)     \
+    V(STRATEGY_MEDIA, 5)                       \
+    V(STRATEGY_DTMF, 6)                        \
+    V(STRATEGY_CALL_ASSISTANT, 7)              \
+    V(STRATEGY_TRANSMITTED_THROUGH_SPEAKER, 8) \
+    V(STRATEGY_REROUTING, 9)                   \
+    V(STRATEGY_PATCH, 10)
+
 enum legacy_strategy {
-    STRATEGY_NONE = -1,
-    STRATEGY_PHONE,
-    STRATEGY_SONIFICATION,
-    STRATEGY_ENFORCED_AUDIBLE,
-    STRATEGY_ACCESSIBILITY,
-    STRATEGY_SONIFICATION_RESPECTFUL,
-    STRATEGY_MEDIA,
-    STRATEGY_DTMF,
-    STRATEGY_CALL_ASSISTANT,
-    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
-    STRATEGY_REROUTING,
-    STRATEGY_PATCH,
+    AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 };
 
+inline const char* legacy_strategy_to_string(legacy_strategy t) {
+    switch (t) {
+    AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_STRINGIFY_CASE_V)
+    }
+    return "";
+}
+
+inline bool legacy_strategy_from_string(const char* s, legacy_strategy* t) {
+    AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_PARSE_CASE_V)
+    return false;
+}
+
+namespace audio_policy {
+
+struct legacy_strategy_map { const char *name; legacy_strategy id; };
+
+inline std::vector<legacy_strategy_map> getLegacyStrategyMap() {
+    return std::vector<legacy_strategy_map> {
+    AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_MAP_ENTRY_V)
+    };
+}
+
+}  // namespace audio_policy
+
+#undef AUDIO_LEGACY_STRATEGY_LIST_DEF
+
+#undef AUDIO_DEFINE_MAP_ENTRY_V
+#undef AUDIO_DEFINE_PARSE_CASE_V
+#undef AUDIO_DEFINE_STRINGIFY_CASE_V
+#undef AUDIO_DEFINE_ENUM_SYMBOL_V
+#undef AUDIO_ENUM_STRINGIFY
+#undef AUDIO_ENUM_QUOTE
+
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
 static const std::set<audio_usage_t > gHighPriorityUseCases = {
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index f066c09..b29033e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -147,6 +147,7 @@
 private:
     bool mixMatch(const AudioMix* mix, size_t mixIndex,
                             const audio_attributes_t& attributes,
+                            const audio_output_flags_t outputFlags,
                             const audio_config_base_t& config,
                             uid_t uid,
                             audio_session_t session);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 3430f4b..ea78a5d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -361,7 +361,7 @@
             continue; // Primary output already found
         }
 
-        if(!mixMatch(policyMix.get(), i, attributes, config, uid, session)) {
+        if(!mixMatch(policyMix.get(), i, attributes, flags, config, uid, session)) {
             ALOGV("%s: Mix %zu: does not match", __func__, i);
             continue; // skip the mix
         }
@@ -422,8 +422,8 @@
 }
 
 bool AudioPolicyMixCollection::mixMatch(const AudioMix* mix, size_t mixIndex,
-    const audio_attributes_t& attributes, const audio_config_base_t& config,
-    uid_t uid, audio_session_t session) {
+    const audio_attributes_t& attributes, const audio_output_flags_t outputFlags,
+    const audio_config_base_t& config, uid_t uid, audio_session_t session) {
 
     if (mix->mMixType == MIX_TYPE_PLAYERS) {
         // Loopback render mixes are created from a public API and thus restricted
@@ -451,12 +451,17 @@
         }
 
         // Permit match only if requested format and mix format are PCM and can be format
-        // adapted by the mixer, or are the same (compressed) format.
+        // adapted by the mixer, or are the same format on direct output.
         if (!is_mix_loopback(mix->mRouteFlags) &&
-            !((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
-              (config.format == mix->mFormat.format)) &&
-              config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
-            return false;
+                config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
+            if (!audio_output_is_mixed_output_flags(outputFlags)) {
+                // Direct output must match format exactly.
+                if (config.format != mix->mFormat.format) return false;
+            } else {
+                // If mixable, both requested and mix format must be linear pcm.
+                if (!audio_is_linear_pcm(config.format) ||
+                          !audio_is_linear_pcm(mix->mFormat.format)) return false;
+            }
         }
 
         // if there is an address match, prioritize that match
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index edb2e29..4445b66 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -117,9 +117,10 @@
             AudioDeviceTypeAddrVector &devices) const override;
 
     engineConfig::ParsingResult loadAudioPolicyEngineConfig(
-            const media::audio::common::AudioHalEngineConfig& aidlConfig);
+            const media::audio::common::AudioHalEngineConfig& aidlConfig, bool);
 
-    engineConfig::ParsingResult loadAudioPolicyEngineConfig(const std::string& xmlFilePath = "");
+    engineConfig::ParsingResult loadAudioPolicyEngineConfig(
+            const std::string& xmlFilePath = "", bool isConfigurable = false);
 
     const ProductStrategyMap &getProductStrategies() const { return mProductStrategies; }
 
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index fb8379e..0799399 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -126,7 +126,7 @@
 }
 
 engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
-        const media::audio::common::AudioHalEngineConfig& aidlConfig)
+        const media::audio::common::AudioHalEngineConfig& aidlConfig, bool)
 {
     engineConfig::ParsingResult result = engineConfig::convert(aidlConfig);
     if (result.parsedConfig == nullptr) {
@@ -141,7 +141,8 @@
     return processParsingResult(std::move(result));
 }
 
-engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
+engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
+        const std::string& xmlFilePath, bool isConfigurable)
 {
     auto fileExists = [](const char* path) {
         struct stat fileStat;
@@ -150,7 +151,7 @@
     const std::string filePath = xmlFilePath.empty() ? engineConfig::DEFAULT_PATH : xmlFilePath;
     engineConfig::ParsingResult result =
             fileExists(filePath.c_str()) ?
-            engineConfig::parse(filePath.c_str()) : engineConfig::ParsingResult{};
+            engineConfig::parse(filePath.c_str(), isConfigurable) : engineConfig::ParsingResult{};
     if (result.parsedConfig == nullptr) {
         ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
         engineConfig::Config config = gDefaultEngineConfig;
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 054bdae..8a4fc88 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -116,7 +116,7 @@
 /** Parses the provided audio policy usage configuration.
  * @return audio policy usage @see Config
  */
-ParsingResult parse(const char* path = DEFAULT_PATH);
+ParsingResult parse(const char* path = DEFAULT_PATH, bool isConfigurable = false);
 android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
 ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig);
 // Exposed for testing.
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 714ab78..b8d95ee 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -52,6 +52,8 @@
 
 namespace {
 
+static bool gIsConfigurableEngine = false;
+
 ConversionResult<std::string> aidl2legacy_AudioHalProductStrategy_ProductStrategyType(int id) {
     using AudioProductStrategyType = media::audio::common::AudioProductStrategyType;
 
@@ -547,9 +549,16 @@
         if (!convertTo(idLiteral, id)) {
             return BAD_VALUE;
         }
-        ALOGV("%s: %s, %s = %d", __FUNCTION__, name.c_str(), Attributes::id, id);
+    } else {
+        legacy_strategy legacyId;
+        if (legacy_strategy_from_string(name.c_str(), &legacyId)) {
+            id = legacyId;
+        } else if (!gIsConfigurableEngine) {
+            return BAD_VALUE;
+        }
+        // With a configurable engine it can be a vendor-provided strategy name.
     }
-    ALOGV("%s: %s = %s", __FUNCTION__, Attributes::name, name.c_str());
+    ALOGV("%s: %s, %s = %d", __FUNCTION__, name.c_str(), Attributes::id, id);
 
     size_t skipped = 0;
     AttributesGroups attrGroups;
@@ -776,7 +785,7 @@
 
 }  // namespace
 
-ParsingResult parse(const char* path) {
+ParsingResult parse(const char* path, bool isConfigurable) {
     XmlErrorHandler errorHandler;
     auto doc = make_xmlUnique(xmlParseFile(path));
     if (doc == NULL) {
@@ -801,6 +810,7 @@
         ALOGE("%s: No version found", __func__);
         return {nullptr, 0};
     }
+    gIsConfigurableEngine = isConfigurable;
     size_t nbSkippedElements = 0;
     auto config = std::make_unique<Config>();
     config->version = std::stof(version);
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 45da7b0..ad49b19 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -112,7 +112,7 @@
 
 template<typename T>
 status_t Engine::loadWithFallback(const T& configSource) {
-    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
+    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource, true /*isConfigurable*/);
     ALOGE_IF(result.nbSkippedElement != 0,
              "Policy Engine configuration is partially invalid, skipped %zu elements",
              result.nbSkippedElement);
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 1082d31..b140a9d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -38,22 +38,8 @@
 
 namespace android::audio_policy {
 
-struct legacy_strategy_map { const char *name; legacy_strategy id; };
 static const std::vector<legacy_strategy_map>& getLegacyStrategy() {
-    static const std::vector<legacy_strategy_map> legacyStrategy = {
-        { "STRATEGY_NONE", STRATEGY_NONE },
-        { "STRATEGY_MEDIA", STRATEGY_MEDIA },
-        { "STRATEGY_PHONE", STRATEGY_PHONE },
-        { "STRATEGY_SONIFICATION", STRATEGY_SONIFICATION },
-        { "STRATEGY_SONIFICATION_RESPECTFUL", STRATEGY_SONIFICATION_RESPECTFUL },
-        { "STRATEGY_DTMF", STRATEGY_DTMF },
-        { "STRATEGY_ENFORCED_AUDIBLE", STRATEGY_ENFORCED_AUDIBLE },
-        { "STRATEGY_TRANSMITTED_THROUGH_SPEAKER", STRATEGY_TRANSMITTED_THROUGH_SPEAKER },
-        { "STRATEGY_ACCESSIBILITY", STRATEGY_ACCESSIBILITY },
-        { "STRATEGY_REROUTING", STRATEGY_REROUTING },
-        { "STRATEGY_PATCH", STRATEGY_PATCH }, // boiler to manage stream patch volume
-        { "STRATEGY_CALL_ASSISTANT", STRATEGY_CALL_ASSISTANT },
-    };
+    static const std::vector<legacy_strategy_map> legacyStrategy = getLegacyStrategyMap();
     return legacyStrategy;
 }
 
@@ -68,7 +54,7 @@
 
 template<typename T>
 status_t Engine::loadWithFallback(const T& configSource) {
-    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
+    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource, false /*isConfigurable*/);
     ALOGE_IF(result.nbSkippedElement != 0,
              "Policy Engine configuration is partially invalid, skipped %zu elements",
              result.nbSkippedElement);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 802ebeb..21803e0 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -78,6 +78,7 @@
 using com::android::media::permission::PermissionEnum::MODIFY_PHONE_STATE;
 using com::android::media::permission::PermissionEnum::RECORD_AUDIO;
 using com::android::media::permission::PermissionEnum::WRITE_SECURE_SETTINGS;
+using com::android::media::permission::PermissionEnum::BLUETOOTH_CONNECT;
 using com::android::media::permission::PermissionEnum::BYPASS_CONCURRENT_RECORD_AUDIO_RESTRICTION;
 using content::AttributionSourceState;
 using media::audio::common::AudioConfig;
@@ -98,6 +99,33 @@
 namespace {
 constexpr auto PERMISSION_HARD_DENIED = permission::PermissionChecker::PERMISSION_HARD_DENIED;
 constexpr auto PERMISSION_GRANTED = permission::PermissionChecker::PERMISSION_GRANTED;
+
+bool mustAnonymizeBluetoothAddress(const AttributionSourceState& attributionSource,
+                                   const String16& caller,
+                                   const IPermissionProvider& provider) {
+    if (audioserver_permissions()) {
+        switch(multiuser_get_app_id(attributionSource.uid)) {
+            // out of caution, to prevent regression
+            case AID_ROOT:
+            case AID_SYSTEM:
+            case AID_AUDIOSERVER:
+            case AID_RADIO:
+            case AID_BLUETOOTH:
+            case AID_MEDIA:
+                return false;
+        }
+        const auto res = provider.checkPermission(BLUETOOTH_CONNECT, attributionSource.uid);
+        if (res.has_value()) {
+            return !(*res);
+        } else {
+            ALOGE("%s: error: %s", __func__, res.error().toString8().c_str());
+            return true;
+        }
+    } else {
+        return mustAnonymizeBluetoothAddressLegacy(attributionSource, caller);
+    }
+}
+
 }
 
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
@@ -445,9 +473,18 @@
     }
 
     if (strlen(attr.tags) != 0) {
-        if (!(audioserver_permissions() ?
-              CHECK_PERM(MODIFY_AUDIO_SETTINGS_PRIVILEGED, attributionSource.uid)
-              : modifyAudioSettingsPrivilegedAllowed(attributionSource))) {
+        const bool audioAttributesTagsAllowed = audioserver_permissions() ? (
+                CHECK_PERM(MODIFY_AUDIO_SETTINGS_PRIVILEGED, attributionSource.uid) ||
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid) ||
+                CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid) ||
+                CHECK_PERM(CAPTURE_MEDIA_OUTPUT, attributionSource.uid) ||
+                CHECK_PERM(CAPTURE_VOICE_COMMUNICATION_OUTPUT, attributionSource.uid))
+                : (modifyAudioSettingsPrivilegedAllowed(attributionSource) ||
+                   modifyAudioRoutingAllowed() ||
+                   callAudioInterceptionAllowed(attributionSource) ||
+                   captureMediaOutputAllowed(attributionSource) ||
+                   captureVoiceCommunicationOutputAllowed(attributionSource));
+        if (!audioAttributesTagsAllowed) {
             ALOGE("%s: permission denied: audio attributes tags not allowed for uid %d pid %d",
                   __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -1781,7 +1818,8 @@
         numPortsReq = std::min(numPortsReq, num_ports);
     }
 
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__),
+                                      getPermissionProvider())) {
         for (size_t i = 0; i < numPortsReq; ++i) {
             anonymizePortBluetoothAddress(ports[i]);
         }
@@ -1823,7 +1861,8 @@
         RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
     }
 
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__),
+                                      getPermissionProvider())) {
         anonymizePortBluetoothAddress(port);
     }
 
@@ -1903,7 +1942,8 @@
         numPatchesReq = std::min(numPatchesReq, num_patches);
     }
 
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__),
+                                      getPermissionProvider())) {
         for (size_t i = 0; i < numPatchesReq; ++i) {
             for (size_t j = 0; j < patches[i].num_sources; ++j) {
                 anonymizePortBluetoothAddress(patches[i].sources[j]);
@@ -2015,7 +2055,7 @@
 
     if (needCaptureMediaOutput && !(audioserver_permissions() ?
                 CHECK_PERM(CAPTURE_MEDIA_OUTPUT, attributionSource.uid)
-                : modifyAudioRoutingAllowed())) {
+                : captureMediaOutputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index d177b92..e469b2c 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -418,11 +418,13 @@
                                    &spatializedChannelMasks);
     if (status != NO_ERROR) {
         ALOGW("%s: cannot get SPATIALIZER_PARAM_SPATIALIZED_CHANNEL_MASKS", __func__);
-        return status;
+        // do not return an error yet as spatializer implementations may not have been
+        // updated yet to support this parameter
     }
     if (spatializedChannelMasks.empty()) {
         ALOGW("%s: SPATIALIZER_PARAM_SPATIALIZED_CHANNEL_MASKS reports empty", __func__);
-        return BAD_VALUE;
+        // do not return an error yet as spatializer implementations may not have been
+        // updated yet to support this parameter
     }
     for (const audio_channel_mask_t spatializedMask : spatializedChannelMasks) {
         // spatialized masks must be contained in the supported input masks
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a8f79c3..40e99af 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -179,6 +179,7 @@
     void SetUp() override;
     void TearDown() override;
     virtual void SetUpManagerConfig();
+    virtual std::string getEngineConfigFilePath() const { return sTestEngineConfig; }
 
     void dumpToLog();
     // When explicit routing is needed, selectedDeviceId needs to be set as the wanted port
@@ -217,6 +218,7 @@
             const std::string &address, audio_port_v7 *foundPort);
     static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch* patch);
     virtual AudioPolicyManagerTestClient* getClient() { return new AudioPolicyManagerTestClient; }
+    void verifyBuiltInStrategyIdsAreValid();
 
     sp<AudioPolicyConfig> mConfig;
     std::unique_ptr<AudioPolicyManagerTestClient> mClient;
@@ -231,7 +233,7 @@
 void AudioPolicyManagerTest::SetUp() {
     mClient.reset(getClient());
     ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig());  // Subclasses may want to customize the config.
-    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get(), sTestEngineConfig));
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get(), getEngineConfigFilePath()));
     ASSERT_EQ(NO_ERROR, mManager->initialize());
     ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
@@ -397,6 +399,16 @@
     return AUDIO_PORT_HANDLE_NONE;
 }
 
+void AudioPolicyManagerTest::verifyBuiltInStrategyIdsAreValid() {
+    AudioProductStrategyVector strategies;
+    ASSERT_EQ(NO_ERROR, mManager->listAudioProductStrategies(strategies));
+    for (const auto& strategy : strategies) {
+        // Since ids are unsigned, this will also cover the case when the id is 'NONE' which is -1.
+        EXPECT_LT(strategy.getId(),
+                  media::audio::common::AudioHalProductStrategy::VENDOR_STRATEGY_ID_START)
+                << strategy.getName();
+    }
+}
 
 TEST_F(AudioPolicyManagerTest, InitSuccess) {
     // SetUp must finish with no assertions.
@@ -454,6 +466,20 @@
 
 // TODO: Add patch creation tests that involve already existing patch
 
+TEST_F(AudioPolicyManagerTest, BuiltInStrategyIdsAreValid) {
+    verifyBuiltInStrategyIdsAreValid();
+}
+
+class AudioPolicyManagerTestWithDefaultEngineConfig : public AudioPolicyManagerTest {
+  protected:
+    // The APM will use the default engine config from EngineDefaultConfig.h.
+    std::string getEngineConfigFilePath() const override { return ""; }
+};
+
+TEST_F(AudioPolicyManagerTestWithDefaultEngineConfig, BuiltInStrategyIdsAreValid) {
+    verifyBuiltInStrategyIdsAreValid();
+}
+
 enum
 {
     MSD_AUDIO_PATCH_COUNT_NUM_AUDIO_PATCHES_INDEX = 0,
@@ -3432,6 +3458,49 @@
     // SetUp must finish with no assertions.
 }
 
+TEST_F(AudioPolicyManagerPhoneTest, Dump) {
+    dumpToLog();
+}
+
+TEST_F(AudioPolicyManagerPhoneTest, NoPatchChangesDuringAlarmPlayback) {
+    audio_port_handle_t alarmPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t alarmOutput = AUDIO_IO_HANDLE_NONE;
+    {
+        // Uses STRATEGY_SONIFICATION, routed to AUDIO_DEVICE_OUT_SPEAKER_SAFE.
+        audio_attributes_t attr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_ALARM,
+        };
+        DeviceIdVector selectedDeviceIds;
+        ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                        AUDIO_CHANNEL_OUT_STEREO, 48000,
+                        AUDIO_OUTPUT_FLAG_NONE,
+                        &alarmOutput, &alarmPortId, attr));
+        EXPECT_EQ(NO_ERROR, mManager->startOutput(alarmPortId));
+    }
+    const audio_patch lastPatchBefore = *(mClient->getLastAddedPatch());
+
+    {
+        // Uses STRATEGY_MEDIA, routed to AUDIO_DEVICE_OUT_SPEAKER.
+        audio_attributes_t attr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_MEDIA,
+        };
+        DeviceIdVector selectedDeviceIds;
+        audio_port_handle_t notifPortId = AUDIO_PORT_HANDLE_NONE;
+        audio_io_handle_t notifOutput = AUDIO_IO_HANDLE_NONE;
+        ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+                        AUDIO_CHANNEL_OUT_STEREO, 48000,
+                        AUDIO_OUTPUT_FLAG_NONE,
+                        &notifOutput, &notifPortId, attr));
+        EXPECT_EQ(NO_ERROR, mManager->startOutput(notifPortId));
+    }
+    dumpToLog();
+    const audio_patch lastPatchAfter = *(mClient->getLastAddedPatch());
+    EXPECT_TRUE(audio_patches_are_equal(&lastPatchBefore, &lastPatchAfter)) <<
+            "Unexpected change in patches detected";
+}
+
 enum {
     MIX_PORT_ATTR_EXPECTED_NAME_PARAMETER,
     MIX_PORT_ATTR_EXPECTED_NAME_WITH_DBFM_PARAMETER,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 5bcb8e8..ebd8eb1 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1019,12 +1019,9 @@
 
         int mirrorMode = outputConfiguration.getMirrorMode(surface);
         sp<Surface> outSurface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
-                isStreamInfoValid, outSurface, surface
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                .graphicBufferProducer
-#endif
-                , mCameraIdStr,
+        res = SessionConfigurationUtils::createConfiguredSurface(streamInfo,
+                isStreamInfoValid, outSurface,
+                flagtools::convertParcelableSurfaceTypeToSurface(surface), mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                 streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
@@ -1410,15 +1407,10 @@
         OutputStreamInfo outInfo;
         sp<Surface> outSurface;
         int mirrorMode = outputConfiguration.getMirrorMode(newOutputsMap.valueAt(i));
-        res = SessionConfigurationUtils::createSurfaceFromGbp(
+        res = SessionConfigurationUtils::createConfiguredSurface(
                 outInfo,
                 /*isStreamInfoValid*/ false, outSurface,
-                newOutputsMap
-                        .valueAt(i)
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                        .graphicBufferProducer
-#endif
-                ,
+                flagtools::convertParcelableSurfaceTypeToSurface(newOutputsMap.valueAt(i)),
                 mCameraIdStr, mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed,
                 dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
                 /*respectSurfaceSize*/ false);
@@ -1815,15 +1807,11 @@
 
         sp<Surface> outSurface;
         int mirrorMode = outputConfiguration.getMirrorMode(surface);
-        res = SessionConfigurationUtils::createSurfaceFromGbp(
+        res = SessionConfigurationUtils::createConfiguredSurface(
                 mStreamInfoMap[streamId], true /*isStreamInfoValid*/, outSurface,
-                surface
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                .graphicBufferProducer
-#endif
-                , mCameraIdStr, mDevice->infoPhysical(physicalId),
-                sensorPixelModesUsed, dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode,
-                colorSpace, /*respectSurfaceSize*/ false);
+                flagtools::convertParcelableSurfaceTypeToSurface(surface), mCameraIdStr,
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/ false);
 
         if (!res.isOk()) return res;
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f41cb85..bb2d136 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -444,15 +444,15 @@
     return false;
 }
 
-binder::Status createSurfaceFromGbp(
+binder::Status createConfiguredSurface(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        sp<Surface>& out_surface, const sp<SurfaceType>& surface,
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
         int32_t colorSpace, bool respectSurfaceSize) {
     // bufferProducer must be non-null
-    if (gbp == nullptr) {
+    if ( flagtools::isSurfaceTypeValid(surface) == false ) {
         std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
         ALOGW("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
@@ -463,7 +463,7 @@
     bool useAsync = false;
     uint64_t consumerUsage = 0;
     status_t err;
-    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
+    if ((err = surface->getConsumerUsage(&consumerUsage)) != OK) {
         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface consumer usage: %s (%d)",
                 logicalCameraId.c_str(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -483,8 +483,9 @@
     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
             (consumerUsage & allowedFlags) != 0;
 
-    surface = new Surface(gbp, useAsync);
-    ANativeWindow *anw = surface.get();
+    out_surface = new Surface(flagtools::surfaceTypeToIGBP(surface), useAsync);
+
+    ANativeWindow *anw = out_surface.get();
 
     int width, height, format;
     android_dataspace dataSpace;
@@ -923,15 +924,11 @@
         for (auto& surface_type : surfaces) {
             sp<Surface> surface;
             int mirrorMode = it.getMirrorMode(surface_type);
-            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface,
-                                       surface_type
-#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
-                                       .graphicBufferProducer
-#endif
-                                       , logicalCameraId,
-                                       metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                                       streamUseCase, timestampBase, mirrorMode, colorSpace,
-                                       /*respectSurfaceSize*/ true);
+            res = createConfiguredSurface(streamInfo, isStreamInfoValid, surface,
+                                    flagtools::convertParcelableSurfaceTypeToSurface(surface_type),
+                                    logicalCameraId,  metadataChosen, sensorPixelModesUsed,
+                                    dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode,
+                                    colorSpace, /*respectSurfaceSize*/ true);
 
             if (!res.isOk()) return res;
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 3c0f109..51b07bd 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -104,11 +104,11 @@
 // check if format is not custom format
 bool isPublicFormat(int32_t format);
 
-// Create a Surface from an IGraphicBufferProducer. Returns error if
-// IGraphicBufferProducer's property doesn't match with streamInfo
-binder::Status createSurfaceFromGbp(
+// Recreates a Surface from another Surface setting the controlledByApp correctly. Returns error if
+// previous Surface property doesn't match with streamInfo
+binder::Status createConfiguredSurface(
         camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        sp<Surface>& out_surface, const sp<SurfaceType>& surface,
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,