Merge changes from topic "chartype" into sc-dev

* changes:
  Use char instead of int in Tuner AIDL interface for all the 16-bit data defined in TunerHAL
  Cast HIDL uint16_t to char16_t in AIDL cpp backend service
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 6c1cf33..2b7f040 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -61,6 +61,10 @@
  */
 typedef void (*ACameraCaptureSession_stateCallback)(void* context, ACameraCaptureSession *session);
 
+/**
+ * Capture session state callbacks used in {@link ACameraDevice_createCaptureSession} and
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters}
+ */
 typedef struct ACameraCaptureSession_stateCallbacks {
     /// optional application context.
     void*                               context;
@@ -246,6 +250,10 @@
         void* context, ACameraCaptureSession* session,
         ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
 
+/**
+ * ACaptureCaptureSession_captureCallbacks structure used in
+ * {@link ACameraCaptureSession_capture} and {@link ACameraCaptureSession_setRepeatingRequest}.
+ */
 typedef struct ACameraCaptureSession_captureCallbacks {
     /// optional application context.
     void*                                               context;
@@ -413,7 +421,10 @@
  */
 void ACameraCaptureSession_close(ACameraCaptureSession* session);
 
-struct ACameraDevice;
+/**
+ * ACameraDevice is opaque type that provides access to a camera device.
+ * A pointer can be obtained using {@link ACameraManager_openCamera} method.
+ */
 typedef struct ACameraDevice ACameraDevice;
 
 /**
@@ -591,6 +602,10 @@
 camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
         __INTRODUCED_IN(24);
 
+/**
+ * Opaque object for capture session output, use {@link ACaptureSessionOutput_create} or
+ * {@link ACaptureSessionSharedOutput_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
@@ -604,9 +619,9 @@
  *
  * <p>Native windows that get removed must not be part of any active repeating or single/burst
  * request or have any pending results. Consider updating repeating requests via
- * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * {@link ACameraCaptureSession_setRepeatingRequest} and then wait for the last frame number
  * when the sequence completes
- * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.</p>
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.</p>
  *
  * <p>Native windows that get added must not be part of any other registered ACaptureSessionOutput
  * and must be compatible. Compatible windows must have matching format, rotation and
@@ -713,7 +728,15 @@
      * Same as ACameraCaptureSession_captureCallbacks
      */
     void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}.
+     */
     ACameraCaptureSession_captureCallback_start         onCaptureStarted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
     ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
 
     /**
@@ -751,10 +774,18 @@
     ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
 
     /**
-     * Same as ACameraCaptureSession_captureCallbacks
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
      */
     ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
     ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
     ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
 } ACameraCaptureSession_logicalCamera_captureCallbacks;
 
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index f72fe8d..7be4bd3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -124,6 +124,10 @@
  */
 typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
 
+/**
+ * Applications' callbacks for camera device state changes, register with
+ * {@link ACameraManager_openCamera}.
+ */
 typedef struct ACameraDevice_StateCallbacks {
     /// optional application context.
     void*                             context;
@@ -198,6 +202,10 @@
  */
 const char* ACameraDevice_getId(const ACameraDevice* device) __INTRODUCED_IN(24);
 
+/**
+ * Capture request pre-defined template types, used in {@link ACameraDevice_createCaptureRequest}
+ * and {@link ACameraDevice_createCaptureRequest_withPhysicalIds}.
+ */
 typedef enum {
     /**
      * Create a request suitable for a camera preview window. Specifically, this
@@ -301,10 +309,12 @@
         const ACameraDevice* device, ACameraDevice_request_template templateId,
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(24);
 
-
+/**
+ * Opaque object for CaptureSessionOutput container, use
+ * {@link ACaptureSessionOutputContainer_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
 
-typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
  * Create a capture session output container.
@@ -844,7 +854,7 @@
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(29);
 
 /**
- * Check whether a particular {@ACaptureSessionOutputContainer} is supported by
+ * Check whether a particular {@link ACaptureSessionOutputContainer} is supported by
  * the camera device.
  *
  * <p>This method performs a runtime check of a given {@link
@@ -875,6 +885,7 @@
  *                                                         device.</li>
  *        <li>{@link ACAMERA_ERROR_UNSUPPORTED_OPERATION} if the query operation is not
  *                                                        supported by the camera device.</li>
+ *        </ul>
  */
 camera_status_t ACameraDevice_isSessionConfigurationSupported(
         const ACameraDevice* device,
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 9d77eb4..26db7f2 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -40,7 +40,13 @@
 
 __BEGIN_DECLS
 
+/**
+ * Camera status enum types.
+ */
 typedef enum {
+    /**
+     * Camera operation has succeeded.
+     */
     ACAMERA_OK = 0,
 
     ACAMERA_ERROR_BASE                  = -10000,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index be32b11..729182e 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -326,7 +326,7 @@
  * @see ACameraManager_registerExtendedAvailabilityCallback
  */
 typedef struct ACameraManager_ExtendedAvailabilityListener {
-    ///
+    /// Called when a camera becomes available or unavailable
     ACameraManager_AvailabilityCallbacks availabilityCallbacks;
 
     /// Called when there is camera access permission change
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 0d5e6c4..b331d50 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -256,10 +256,12 @@
 
 /**
  * Return a {@link ACameraMetadata} that references the same data as
- * {@link cameraMetadata}, which is an instance of
- * {@link android.hardware.camera2.CameraMetadata} (e.g., a
- * {@link android.hardware.camera2.CameraCharacteristics} or
- * {@link android.hardware.camera2.CaptureResult}).
+ * <a href="/reference/android/hardware/camera2/CameraMetadata">
+ *     android.hardware.camera2.CameraMetadata</a> from Java API. (e.g., a
+ * <a href="/reference/android/hardware/camera2/CameraCharacteristics">
+ *     android.hardware.camera2.CameraCharacteristics</a>
+ * or <a href="/reference/android/hardware/camera2/CaptureResult">
+ *     android.hardware.camera2.CaptureResult</a>).
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
  * after application is done using it.</p>
@@ -269,11 +271,13 @@
  * the Java metadata is garbage collected.
  *
  * @param env the JNI environment.
- * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * @param cameraMetadata the source <a href="/reference/android/hardware/camera2/CameraMetadata">
+                         android.hardware.camera2.CameraMetadata </a>from which the
  *                       returned {@link ACameraMetadata} is a view.
  *
- * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
- *         instance of {@link android.hardware.camera2.CameraMetadata}.
+ * @return a valid ACameraMetadata pointer or NULL if cameraMetadata is null or not a valid
+ *         instance of <a href="android/hardware/camera2/CameraMetadata">
+ *         android.hardware.camera2.CameraMetadata</a>.
  *
  */
 ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 90515ab..20ffd48 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -1868,7 +1868,7 @@
      * <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
      * BURST_CAPTURE must still be met.</li>
      * <li>All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
-     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES })
+     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES })
      * will have preview bokeh effect applied.</li>
      * </ul>
      * <p>When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index a4dc374..d83c5b3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -44,10 +44,10 @@
 
 __BEGIN_DECLS
 
-// Container for output targets
+/** Container for output targets */
 typedef struct ACameraOutputTargets ACameraOutputTargets;
 
-// Container for a single output target
+/** Container for a single output target */
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
 /**
@@ -383,10 +383,10 @@
  * Set/change a camera capture control entry with unsigned 8 bits data type for
  * a physical camera backing a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_u8, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_u8, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -413,10 +413,10 @@
  * Set/change a camera capture control entry with signed 32 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i32, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i32, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -443,10 +443,10 @@
  * Set/change a camera capture control entry with float data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_float, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_float, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -473,10 +473,10 @@
  * Set/change a camera capture control entry with signed 64 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i64, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i64, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -503,10 +503,10 @@
  * Set/change a camera capture control entry with double data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_double, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_double, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -533,10 +533,10 @@
  * Set/change a camera capture control entry with rational data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_rational, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_rational, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index fc5b75d..bab651f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -454,19 +454,11 @@
 
 }  // namespace
 
-static IV_COLOR_FORMAT_T GetIvColorFormat() {
-    static IV_COLOR_FORMAT_T sColorFormat =
-        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
-        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
-        IV_YUV_420P;
-    return sColorFormat;
-}
-
 C2SoftAvcEnc::C2SoftAvcEnc(
         const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mIvVideoColorFormat(GetIvColorFormat()),
+      mIvVideoColorFormat(IV_YUV_420P),
       mAVCEncProfile(IV_PROFILE_BASE),
       mAVCEncLevel(41),
       mStarted(false),
@@ -1034,7 +1026,8 @@
     // Assume worst case output buffer size to be equal to number of bytes in input
     mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
 
-    mIvVideoColorFormat = GetIvColorFormat();
+    // TODO
+    mIvVideoColorFormat = IV_YUV_420P;
 
     ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
             height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
@@ -1332,6 +1325,7 @@
               mSize->width, input->height(), mSize->height);
         return C2_BAD_VALUE;
     }
+    ALOGV("width = %d, height = %d", input->width(), input->height());
     const C2PlanarLayout &layout = input->layout();
     uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
     uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
@@ -1368,8 +1362,7 @@
                 return C2_BAD_VALUE;
             }
 
-            if (mIvVideoColorFormat == IV_YUV_420P
-                    && layout.planes[layout.PLANE_Y].colInc == 1
+            if (layout.planes[layout.PLANE_Y].colInc == 1
                     && layout.planes[layout.PLANE_U].colInc == 1
                     && layout.planes[layout.PLANE_V].colInc == 1
                     && uStride == vStride
@@ -1377,61 +1370,21 @@
                 // I420 compatible - already set up above
                 break;
             }
-            if (mIvVideoColorFormat == IV_YUV_420SP_UV
-                    && layout.planes[layout.PLANE_Y].colInc == 1
-                    && layout.planes[layout.PLANE_U].colInc == 2
-                    && layout.planes[layout.PLANE_V].colInc == 2
-                    && uStride == vStride
-                    && yStride == vStride
-                    && uPlane + 1 == vPlane) {
-                // NV12 compatible - already set up above
-                break;
-            }
-            if (mIvVideoColorFormat == IV_YUV_420SP_VU
-                    && layout.planes[layout.PLANE_Y].colInc == 1
-                    && layout.planes[layout.PLANE_U].colInc == 2
-                    && layout.planes[layout.PLANE_V].colInc == 2
-                    && uStride == vStride
-                    && yStride == vStride
-                    && uPlane == vPlane + 1) {
-                // NV21 compatible - already set up above
-                break;
-            }
 
             // copy to I420
             yStride = width;
             uStride = vStride = yStride / 2;
             MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
             mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
-            MediaImage2 img;
-            switch (mIvVideoColorFormat) {
-                case IV_YUV_420P:
-                    img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
-                    yPlane = conversionBuffer.data();
-                    uPlane = yPlane + yPlaneSize;
-                    vPlane = uPlane + yPlaneSize / 4;
-                    break;
-                case IV_YUV_420SP_VU:
-                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
-                    img.mPlane[MediaImage2::U].mOffset++;
-                    img.mPlane[MediaImage2::V].mOffset--;
-                    yPlane = conversionBuffer.data();
-                    vPlane = yPlane + yPlaneSize;
-                    uPlane = vPlane + 1;
-                    break;
-                case IV_YUV_420SP_UV:
-                default:
-                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
-                    yPlane = conversionBuffer.data();
-                    uPlane = yPlane + yPlaneSize;
-                    vPlane = uPlane + 1;
-                    break;
-            }
+            MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
             status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
             if (err != OK) {
                 ALOGE("Buffer conversion failed: %d", err);
                 return C2_BAD_VALUE;
             }
+            yPlane = conversionBuffer.data();
+            uPlane = yPlane + yPlaneSize;
+            vPlane = uPlane + yPlaneSize / 4;
             break;
 
         }
@@ -1477,17 +1430,15 @@
             break;
         }
 
-        case IV_YUV_420SP_VU:
-            uPlane = vPlane;
-            [[fallthrough]];
         case IV_YUV_420SP_UV:
+        case IV_YUV_420SP_VU:
         default:
         {
             ps_inp_raw_buf->apv_bufs[0] = yPlane;
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
 
             ps_inp_raw_buf->au4_wd[0] = mSize->width;
-            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width;
 
             ps_inp_raw_buf->au4_ht[0] = mSize->height;
             ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index fb3fbd0..dfad226 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -110,17 +110,20 @@
         }
         case kWhatStop: {
             int32_t err = thiz->onStop();
+            thiz->mOutputBlockPool.reset();
             Reply(msg, &err);
             break;
         }
         case kWhatReset: {
             thiz->onReset();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
         }
         case kWhatRelease: {
             thiz->onRelease();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
index f701987..5d0284f 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
@@ -35,7 +35,7 @@
 # on ARM is statically loaded at 0xffff 0000. See
 # http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0211h/Babfeega.html
 # for more details.
-mremap: arg3 == 3
+mremap: arg3 == 3 || arg3 == MREMAP_MAYMOVE
 munmap: 1
 prctl: 1
 writev: 1
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 63ae5cd..15d2989 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1798,17 +1798,19 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
-    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-    const std::unique_ptr<Config> &config = *configLocked;
-    if (config->mTunneled && config->mSidebandHandle != nullptr) {
-        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
-        status_t err = native_window_set_sideband_stream(
-                nativeWindow.get(),
-                const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
-        if (err != OK) {
-            ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
-                    nativeWindow.get(), config->mSidebandHandle->handle(), err);
-            return err;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mTunneled && config->mSidebandHandle != nullptr) {
+            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+            status_t err = native_window_set_sideband_stream(
+                    nativeWindow.get(),
+                    const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+            if (err != OK) {
+                ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                        nativeWindow.get(), config->mSidebandHandle->handle(), err);
+                return err;
+            }
         }
     }
     return mChannel->setSurface(surface);
@@ -2149,80 +2151,92 @@
             }
 
             // handle configuration changes in work done
-            Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-            const std::unique_ptr<Config> &config = *configLocked;
-            Config::Watcher<C2StreamInitDataInfo::output> initData =
-                config->watch<C2StreamInitDataInfo::output>();
-            if (!work->worklets.empty()
-                    && (work->worklets.front()->output.flags
-                            & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
+            std::unique_ptr<C2Param> initData;
+            sp<AMessage> outputFormat = nullptr;
+            {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                Config::Watcher<C2StreamInitDataInfo::output> initDataWatcher =
+                    config->watch<C2StreamInitDataInfo::output>();
+                if (!work->worklets.empty()
+                        && (work->worklets.front()->output.flags
+                                & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
 
-                // copy buffer info to config
-                std::vector<std::unique_ptr<C2Param>> updates;
-                for (const std::unique_ptr<C2Param> &param
-                        : work->worklets.front()->output.configUpdate) {
-                    updates.push_back(C2Param::Copy(*param));
-                }
-                unsigned stream = 0;
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    for (const std::shared_ptr<const C2Info> &info : buf->info()) {
-                        // move all info into output-stream #0 domain
-                        updates.emplace_back(C2Param::CopyAsStream(*info, true /* output */, stream));
+                    // copy buffer info to config
+                    std::vector<std::unique_ptr<C2Param>> updates;
+                    for (const std::unique_ptr<C2Param> &param
+                            : work->worklets.front()->output.configUpdate) {
+                        updates.push_back(C2Param::Copy(*param));
+                    }
+                    unsigned stream = 0;
+                    std::vector<std::shared_ptr<C2Buffer>> &outputBuffers =
+                        work->worklets.front()->output.buffers;
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        for (const std::shared_ptr<const C2Info> &info : buf->info()) {
+                            // move all info into output-stream #0 domain
+                            updates.emplace_back(
+                                    C2Param::CopyAsStream(*info, true /* output */, stream));
+                        }
+
+                        const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
+                        // for now only do the first block
+                        if (!blocks.empty()) {
+                            // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
+                            //      block.crop().left, block.crop().top,
+                            //      block.crop().width, block.crop().height,
+                            //      block.width(), block.height());
+                            const C2ConstGraphicBlock &block = blocks[0];
+                            updates.emplace_back(new C2StreamCropRectInfo::output(
+                                    stream, block.crop()));
+                            updates.emplace_back(new C2StreamPictureSizeInfo::output(
+                                    stream, block.crop().width, block.crop().height));
+                        }
+                        ++stream;
                     }
 
-                    const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
-                    // for now only do the first block
-                    if (!blocks.empty()) {
-                        // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
-                        //      block.crop().left, block.crop().top,
-                        //      block.crop().width, block.crop().height,
-                        //      block.width(), block.height());
-                        const C2ConstGraphicBlock &block = blocks[0];
-                        updates.emplace_back(new C2StreamCropRectInfo::output(stream, block.crop()));
-                        updates.emplace_back(new C2StreamPictureSizeInfo::output(
-                                stream, block.crop().width, block.crop().height));
-                    }
-                    ++stream;
-                }
+                    sp<AMessage> oldFormat = config->mOutputFormat;
+                    config->updateConfiguration(updates, config->mOutputDomain);
+                    RevertOutputFormatIfNeeded(oldFormat, config->mOutputFormat);
 
-                sp<AMessage> outputFormat = config->mOutputFormat;
-                config->updateConfiguration(updates, config->mOutputDomain);
-                RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
-
-                // copy standard infos to graphic buffers if not already present (otherwise, we
-                // may overwrite the actual intermediate value with a final value)
-                stream = 0;
-                const static C2Param::Index stdGfxInfos[] = {
-                    C2StreamRotationInfo::output::PARAM_TYPE,
-                    C2StreamColorAspectsInfo::output::PARAM_TYPE,
-                    C2StreamDataSpaceInfo::output::PARAM_TYPE,
-                    C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                    C2StreamHdr10PlusInfo::output::PARAM_TYPE,
-                    C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
-                    C2StreamSurfaceScalingInfo::output::PARAM_TYPE
-                };
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    if (buf->data().graphicBlocks().size()) {
-                        for (C2Param::Index ix : stdGfxInfos) {
-                            if (!buf->hasInfo(ix)) {
-                                const C2Param *param =
-                                    config->getConfigParameterValue(ix.withStream(stream));
-                                if (param) {
-                                    std::shared_ptr<C2Param> info(C2Param::Copy(*param));
-                                    buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                    // copy standard infos to graphic buffers if not already present (otherwise, we
+                    // may overwrite the actual intermediate value with a final value)
+                    stream = 0;
+                    const static C2Param::Index stdGfxInfos[] = {
+                        C2StreamRotationInfo::output::PARAM_TYPE,
+                        C2StreamColorAspectsInfo::output::PARAM_TYPE,
+                        C2StreamDataSpaceInfo::output::PARAM_TYPE,
+                        C2StreamHdrStaticInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
+                        C2StreamSurfaceScalingInfo::output::PARAM_TYPE
+                    };
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        if (buf->data().graphicBlocks().size()) {
+                            for (C2Param::Index ix : stdGfxInfos) {
+                                if (!buf->hasInfo(ix)) {
+                                    const C2Param *param =
+                                        config->getConfigParameterValue(ix.withStream(stream));
+                                    if (param) {
+                                        std::shared_ptr<C2Param> info(C2Param::Copy(*param));
+                                        buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                                    }
                                 }
                             }
                         }
+                        ++stream;
                     }
-                    ++stream;
                 }
-            }
-            if (config->mInputSurface) {
-                config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                if (config->mInputSurface) {
+                    config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                }
+                if (initDataWatcher.hasChanged()) {
+                    initData = C2Param::Copy(*initDataWatcher.update().get());
+                }
+                outputFormat = config->mOutputFormat;
             }
             mChannel->onWorkDone(
-                    std::move(work), config->mOutputFormat,
-                    initData.hasChanged() ? initData.update().get() : nullptr);
+                    std::move(work), outputFormat,
+                    initData ? (C2StreamInitDataInfo::output *)initData.get() : nullptr);
             break;
         }
         case kWhatWatch: {
@@ -2307,9 +2321,13 @@
             pendingDeadline = true;
         }
     }
-    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-    const std::unique_ptr<Config> &config = *configLocked;
-    if (config->mTunneled == false && name.empty()) {
+    bool tunneled = false;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        tunneled = config->mTunneled;
+    }
+    if (!tunneled && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index c4f9d84..d0c1357 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1368,7 +1368,7 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    {
+    if (inputFormat || outputFormat) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
@@ -1468,14 +1468,14 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
-    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::reset() {
     stop();
+    if (mInputSurface != nullptr) {
+        mInputSurface.reset();
+    }
+    mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -1503,8 +1503,10 @@
 
 void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     ALOGV("[%s] flush", mName);
+    std::vector<uint64_t> indices;
     std::list<std::unique_ptr<C2Work>> configs;
     for (const std::unique_ptr<C2Work> &work : flushedWork) {
+        indices.push_back(work->input.ordinal.frameIndex.peeku());
         if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
             continue;
         }
@@ -1517,6 +1519,7 @@
         std::unique_ptr<C2Work> copy(new C2Work);
         copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
         copy->input.ordinal = work->input.ordinal;
+        copy->input.ordinal.frameIndex = mFrameIndex++;
         copy->input.buffers.insert(
                 copy->input.buffers.begin(),
                 work->input.buffers.begin(),
@@ -1545,7 +1548,12 @@
             output->buffers->flushStash();
         }
     }
-    mPipelineWatcher.lock()->flush();
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (uint64_t index : indices) {
+            watcher->onWorkDone(index);
+        }
+    }
 }
 
 void CCodecBufferChannel::onWorkDone(
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
 }
 
 void PipelineWatcher::flush() {
+    ALOGV("flush");
     mFramesInPipeline.clear();
 }
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index a78d811..0966988 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -346,7 +346,7 @@
     }
     return (img->mPlane[1].mColInc == 2
             && img->mPlane[2].mColInc == 2
-            && (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
+            && (img->mPlane[2].mOffset == img->mPlane[1].mOffset + 1));
 }
 
 bool IsNV21(const MediaImage2 *img) {
@@ -355,7 +355,7 @@
     }
     return (img->mPlane[1].mColInc == 2
             && img->mPlane[2].mColInc == 2
-            && (img->mPlane[1].mOffset - img->mPlane[2].mOffset == 1));
+            && (img->mPlane[1].mOffset == img->mPlane[2].mOffset + 1));
 }
 
 bool IsI420(const MediaImage2 *img) {
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 1bbe443..f4a40a8 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -20,6 +20,7 @@
 #include <algorithm>
 #include <audio_utils/primitives.h>
 #include <aaudio/AAudio.h>
+#include <media/MediaMetricsItem.h>
 
 #include "client/AudioStreamInternalCapture.h"
 #include "utility/AudioClock.h"
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 3f17e6b..71bde90 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,7 @@
 
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
+#include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
 #include "client/AudioStreamInternalPlay.h"
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index e8f71be..ef83c8e 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -59,6 +59,10 @@
     if (!mMetricsId.empty()) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM)
+                .set(AMEDIAMETRICS_PROP_ENCODINGREQUESTED,
+                     android::toString(mDeviceFormat).c_str())
+                .set(AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL,
+                     AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
                 .record();
     }
 
@@ -124,7 +128,12 @@
             .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
                 AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
             .set(AMEDIAMETRICS_PROP_SHARINGMODE,
-                AudioGlobal_convertSharingModeToText(getSharingMode()));
+                AudioGlobal_convertSharingModeToText(getSharingMode()))
+            .set(AMEDIAMETRICS_PROP_BUFFERCAPACITYFRAMES, getBufferCapacity())
+            .set(AMEDIAMETRICS_PROP_BURSTFRAMES, getFramesPerBurst())
+            .set(AMEDIAMETRICS_PROP_DIRECTION,
+                AudioGlobal_convertDirectionToText(getDirection()));
+
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
         }
@@ -338,6 +347,22 @@
     return AAUDIO_OK;
 }
 
+void AudioStream::close_l() {
+    // Releasing the stream will set the state to CLOSING.
+    assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
+    // setState() prevents a transition from CLOSING to any state other than CLOSED.
+    // State is checked by destructor.
+    setState(AAUDIO_STREAM_STATE_CLOSED);
+
+    if (!mMetricsId.empty()) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_FRAMESTRANSFERRED,
+                        getDirection() == AAUDIO_DIRECTION_INPUT ? getFramesWritten()
+                                                                 : getFramesRead())
+                .record();
+    }
+}
+
 void AudioStream::setState(aaudio_stream_state_t state) {
     ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
     if (state == mState) {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index abf62f3..3930964 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -146,13 +146,7 @@
      * Free any resources not already freed by release_l().
      * Assume release_l() already called.
      */
-    virtual void close_l() REQUIRES(mStreamLock) {
-        // Releasing the stream will set the state to CLOSING.
-        assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
-        // setState() prevents a transition from CLOSING to any state other than CLOSED.
-        // State is checked by destructor.
-        setState(AAUDIO_STREAM_STATE_CLOSED);
-    }
+    virtual void close_l() REQUIRES(mStreamLock);
 
 public:
     // This is only used to identify a stream in the logs without
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 7733a04..e3ac6ff 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -308,11 +308,19 @@
 }
 
 void AudioStreamRecord::close_l() {
+    // The callbacks are normally joined in the AudioRecord destructor.
+    // But if another object has a reference to the AudioRecord then
+    // it will not get deleted here.
+    // So we should join callbacks explicitly before returning.
+    // Unlock around the join to avoid deadlocks if the callback tries to lock.
+    // This can happen if the callback returns AAUDIO_CALLBACK_RESULT_STOP
+    mStreamLock.unlock();
+    mAudioRecord->stopAndJoinCallbacks();
+    mStreamLock.lock();
+
     mAudioRecord.clear();
-    // Do not close mFixedBlockWriter because a data callback
-    // thread might still be running if someone else has a reference
-    // to mAudioRecord.
-    // It has a unique_ptr to its buffer so it will clean up by itself.
+    // Do not close mFixedBlockReader. It has a unique_ptr to its buffer
+    // so it will clean up by itself.
     AudioStream::close_l();
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 142a85c..df97658 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -259,12 +259,18 @@
 }
 
 void AudioStreamTrack::close_l() {
-    // Stop callbacks before deleting mFixedBlockReader memory.
+    // The callbacks are normally joined in the AudioTrack destructor.
+    // But if another object has a reference to the AudioTrack then
+    // it will not get deleted here.
+    // So we should join callbacks explicitly before returning.
+    // Unlock around the join to avoid deadlocks if the callback tries to lock.
+    // This can happen if the callback returns AAUDIO_CALLBACK_RESULT_STOP
+    mStreamLock.unlock();
+    mAudioTrack->stopAndJoinCallbacks();
+    mStreamLock.lock();
     mAudioTrack.clear();
-    // Do not close mFixedBlockReader because a data callback
-    // thread might still be running if someone else has a reference
-    // to mAudioRecord.
-    // It has a unique_ptr to its buffer so it will clean up by itself.
+    // Do not close mFixedBlockReader. It has a unique_ptr to its buffer
+    // so it will clean up by itself.
     AudioStream::close_l();
 }
 
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index f9eebd7..98e9727 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -209,9 +209,9 @@
 }
 
 cc_test {
-    name: "test_stop_hang",
+    name: "test_callback_race",
     defaults: ["libaaudio_tests_defaults"],
-    srcs: ["test_stop_hang.cpp"],
+    srcs: ["test_callback_race.cpp"],
     shared_libs: [
         "libaaudio",
         "libbinder",
diff --git a/media/libaaudio/tests/test_callback_race.cpp b/media/libaaudio/tests/test_callback_race.cpp
new file mode 100644
index 0000000..843d5d7
--- /dev/null
+++ b/media/libaaudio/tests/test_callback_race.cpp
@@ -0,0 +1,209 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Test whether the callback is joined before the close finishes.
+ *
+ * Start a stream with a callback.
+ * The callback just sleeps for a long time.
+ * While the callback is sleeping, close() the stream from the main thread.
+ * Then check to make sure the callback was joined before the close() returns.
+ *
+ * This can hang if there are deadlocks. So make sure you get a PASSED result.
+ */
+
+#include <atomic>
+#include <stdio.h>
+#include <unistd.h>
+
+#include <gtest/gtest.h>
+
+#include <aaudio/AAudio.h>
+
+// Sleep long enough that the foreground has a change to call close.
+static constexpr int kCallbackSleepMicros = 600 * 1000;
+
+class AudioEngine {
+public:
+
+    // Check for a crash or late callback if we close without stopping.
+    void checkCloseJoins(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode,
+                             aaudio_data_callback_result_t callbackResult) {
+
+        // Make printf print immediately so that debug info is not stuck
+        // in a buffer if we hang or crash.
+        setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+        mCallbackResult = callbackResult;
+        startStreamForStall(direction, perfMode);
+        // When the callback starts it will go to sleep.
+        waitForCallbackToStart();
+
+        printf("call AAudioStream_close()\n");
+        ASSERT_FALSE(mCallbackFinished); // Still sleeping?
+        aaudio_result_t result = AAudioStream_close(mStream); // May hang here!
+        ASSERT_TRUE(mCallbackFinished);
+        ASSERT_EQ(AAUDIO_OK, result);
+        printf("AAudioStream_close() returned %d\n", result);
+
+        ASSERT_EQ(AAUDIO_OK, mError.load());
+        // Did calling stop() from callback fail? It should have.
+        ASSERT_NE(AAUDIO_OK, mStopResult.load());
+    }
+
+private:
+    void startStreamForStall(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode) {
+        AAudioStreamBuilder* builder = nullptr;
+        aaudio_result_t result = AAUDIO_OK;
+
+        // Use an AAudioStreamBuilder to contain requested parameters.
+        result = AAudio_createStreamBuilder(&builder);
+        ASSERT_EQ(AAUDIO_OK, result);
+
+        // Request stream properties.
+        AAudioStreamBuilder_setDirection(builder, direction);
+        AAudioStreamBuilder_setPerformanceMode(builder, perfMode);
+        AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, this);
+        AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, this);
+
+        // Create an AAudioStream using the Builder.
+        result = AAudioStreamBuilder_openStream(builder, &mStream);
+        AAudioStreamBuilder_delete(builder);
+        ASSERT_EQ(AAUDIO_OK, result);
+
+        // Check to see what kind of stream we actually got.
+        int32_t deviceId = AAudioStream_getDeviceId(mStream);
+        aaudio_performance_mode_t
+            actualPerfMode = AAudioStream_getPerformanceMode(mStream);
+        printf("-------- opened: deviceId = %3d, perfMode = %d\n",
+               deviceId,
+               actualPerfMode);
+
+        // Start stream.
+        result = AAudioStream_requestStart(mStream);
+        ASSERT_EQ(AAUDIO_OK, result);
+    }
+
+    void waitForCallbackToStart() {
+        // Wait for callback to say it has been called.
+        int countDownMillis = 2000;
+        constexpr int countDownPeriodMillis = 50;
+        while (!mCallbackStarted && countDownMillis > 0) {
+            printf("Waiting for callback to start, %d\n", countDownMillis);
+            usleep(countDownPeriodMillis * 1000);
+            countDownMillis -= countDownPeriodMillis;
+        }
+        ASSERT_LT(0, countDownMillis);
+        ASSERT_TRUE(mCallbackStarted);
+    }
+
+// Callback function that fills the audio output buffer.
+    static aaudio_data_callback_result_t s_myDataCallbackProc(
+            AAudioStream *stream,
+            void *userData,
+            void * /*audioData */,
+            int32_t /* numFrames */
+    ) {
+        AudioEngine* engine = (AudioEngine*) userData;
+        engine->mCallbackStarted = true;
+        usleep(kCallbackSleepMicros);
+        // it is illegal to call stop() from the callback. It should
+        // return an error and not hang.
+        engine->mStopResult = AAudioStream_requestStop(stream);
+        engine->mCallbackFinished = true;
+        return engine->mCallbackResult;
+    }
+
+    static void s_myErrorCallbackProc(
+                AAudioStream * /* stream */,
+                void *userData,
+                aaudio_result_t error) {
+        AudioEngine *engine = (AudioEngine *)userData;
+        engine->mError = error;
+    }
+
+    AAudioStream* mStream = nullptr;
+
+    std::atomic<aaudio_result_t> mError{AAUDIO_OK}; // written by error callback
+    std::atomic<bool> mCallbackStarted{false};   // written by data callback
+    std::atomic<bool> mCallbackFinished{false};  // written by data callback
+    std::atomic<aaudio_data_callback_result_t> mCallbackResult{AAUDIO_CALLBACK_RESULT_CONTINUE};
+    std::atomic<aaudio_result_t> mStopResult{AAUDIO_OK};
+};
+
+/*********************************************************************/
+// Tell the callback to return AAUDIO_CALLBACK_RESULT_CONTINUE.
+
+TEST(test_close_timing, aaudio_close_joins_input_none) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_none) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_input_lowlat) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_lowlat) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+/*********************************************************************/
+// Tell the callback to return AAUDIO_CALLBACK_RESULT_STOP.
+
+TEST(test_close_timing, aaudio_close_joins_input_lowlat_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_lowlat_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_none_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_input_none_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
diff --git a/media/libaaudio/tests/test_stop_hang.cpp b/media/libaaudio/tests/test_stop_hang.cpp
deleted file mode 100644
index 982ff4a..0000000
--- a/media/libaaudio/tests/test_stop_hang.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Return stop from the callback
- * and then close the stream immediately.
- */
-
-#include <atomic>
-#include <mutex>
-#include <stdio.h>
-#include <thread>
-#include <unistd.h>
-
-#include <aaudio/AAudio.h>
-
-#define DURATION_SECONDS   5
-
-struct AudioEngine {
-    AAudioStreamBuilder *builder = nullptr;
-    AAudioStream        *stream = nullptr;
-    std::thread         *thread = nullptr;
-
-    std::atomic<bool>   started{false};
-    std::mutex          doneLock; // Use a mutex so we can sleep on it while join()ing.
-    std::atomic<bool>   done{false};
-
-    aaudio_result_t join() {
-        aaudio_result_t result = AAUDIO_ERROR_INVALID_STATE;
-        if (stream != nullptr) {
-            while (true) {
-                {
-                    // Will block if the thread is running.
-                    // This mutex is used to close() immediately after the callback returns
-                    // and before the requestStop_l() is called.
-                    std::lock_guard<std::mutex> lock(doneLock);
-                    if (done) break;
-                }
-                printf("join() got mutex but stream not done!");
-                usleep(10 * 1000); // sleep then check again
-            }
-            result = AAudioStream_close(stream);
-            stream = nullptr;
-        }
-        return result;
-    }
-};
-
-// Callback function that fills the audio output buffer.
-static aaudio_data_callback_result_t s_myDataCallbackProc(
-        AAudioStream *stream,
-        void *userData,
-        void *audioData,
-        int32_t numFrames
-) {
-    (void) stream;
-    (void) audioData;
-    (void) numFrames;
-    AudioEngine *engine = (struct AudioEngine *)userData;
-    std::lock_guard<std::mutex> lock(engine->doneLock);
-    engine->started = true;
-    usleep(DURATION_SECONDS * 1000 * 1000); // Mimic SynthMark procedure.
-    engine->done = true;
-    return AAUDIO_CALLBACK_RESULT_STOP;
-}
-
-static void s_myErrorCallbackProc(
-    AAudioStream *stream __unused,
-    void *userData __unused,
-    aaudio_result_t error) {
-    printf("%s() - error = %d\n", __func__, error);
-}
-
-static aaudio_result_t s_OpenAudioStream(struct AudioEngine *engine) {
-    // Use an AAudioStreamBuilder to contain requested parameters.
-    aaudio_result_t result = AAudio_createStreamBuilder(&engine->builder);
-    if (result != AAUDIO_OK) {
-        printf("AAudio_createStreamBuilder returned %s",
-               AAudio_convertResultToText(result));
-        return result;
-    }
-
-    // Request stream properties.
-    AAudioStreamBuilder_setPerformanceMode(engine->builder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
-    AAudioStreamBuilder_setDataCallback(engine->builder, s_myDataCallbackProc, engine);
-    AAudioStreamBuilder_setErrorCallback(engine->builder, s_myErrorCallbackProc, engine);
-
-    // Create an AAudioStream using the Builder.
-    result = AAudioStreamBuilder_openStream(engine->builder, &engine->stream);
-    if (result != AAUDIO_OK) {
-        printf("AAudioStreamBuilder_openStream returned %s",
-               AAudio_convertResultToText(result));
-        return result;
-    }
-
-    return result;
-}
-
-int main(int argc, char **argv) {
-    (void) argc;
-    (void) argv;
-    struct AudioEngine engine;
-    aaudio_result_t result = AAUDIO_OK;
-    int errorCount = 0;
-
-    // Make printf print immediately so that debug info is not stuck
-    // in a buffer if we hang or crash.
-    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
-
-    printf("Test Return Stop Hang V1.0\n");
-
-    result = s_OpenAudioStream(&engine);
-    if (result != AAUDIO_OK) {
-        printf("s_OpenAudioStream returned %s\n",
-               AAudio_convertResultToText(result));
-        errorCount++;
-    }
-
-    // Check to see what kind of stream we actually got.
-    int32_t deviceId = AAudioStream_getDeviceId(engine.stream);
-    aaudio_performance_mode_t actualPerfMode = AAudioStream_getPerformanceMode(engine.stream);
-    printf("-------- opened: deviceId = %3d, perfMode = %d\n", deviceId, actualPerfMode);
-
-    // Start stream.
-    result = AAudioStream_requestStart(engine.stream);
-    printf("AAudioStream_requestStart() returned %d >>>>>>>>>>>>>>>>>>>>>>\n", result);
-    if (result != AAUDIO_OK) {
-        errorCount++;
-    } else {
-        int counter = 0;
-        while (!engine.started) {
-            printf("Waiting for stream to start, %d\n", counter++);
-            usleep(5 * 1000);
-        }
-        printf("You should see more messages %d seconds after this. If not then the test failed!\n",
-               DURATION_SECONDS);
-        result = engine.join(); // This might hang!
-        AAudioStreamBuilder_delete(engine.builder);
-        engine.builder = nullptr;
-    }
-
-    printf("aaudio result = %d = %s\n", result, AAudio_convertResultToText(result));
-    printf("test %s\n", errorCount ? "FAILED" : "PASSED");
-
-    return errorCount ? EXIT_FAILURE : EXIT_SUCCESS;
-}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index e15ef3d..90f6f41 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -181,21 +181,9 @@
         .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)mStatus)
         .record();
 
+    stopAndJoinCallbacks(); // checks mStatus
+
     if (mStatus == NO_ERROR) {
-        // Make sure that callback function exits in the case where
-        // it is looping on buffer empty condition in obtainBuffer().
-        // Otherwise the callback thread will never exit.
-        stop();
-        if (mAudioRecordThread != 0) {
-            mProxy->interrupt();
-            mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
-            mAudioRecordThread->requestExitAndWait();
-            mAudioRecordThread.clear();
-        }
-        // No lock here: worst case we remove a NULL callback which will be a nop
-        if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
-        }
         IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
         mAudioRecord.clear();
         mCblkMemory.clear();
@@ -208,6 +196,27 @@
     }
 }
 
+void AudioRecord::stopAndJoinCallbacks() {
+    // Prevent nullptr crash if it did not open properly.
+    if (mStatus != NO_ERROR) return;
+
+    // Make sure that callback function exits in the case where
+    // it is looping on buffer empty condition in obtainBuffer().
+    // Otherwise the callback thread will never exit.
+    stop();
+    if (mAudioRecordThread != 0) {
+        mProxy->interrupt();
+        mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
+        mAudioRecordThread->requestExitAndWait();
+        mAudioRecordThread.clear();
+    }
+    // No lock here: worst case we remove a NULL callback which will be a nop
+    if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
+        // This may not stop all of these device callbacks!
+        // TODO: Add some sort of protection.
+        AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
+    }
+}
 status_t AudioRecord::set(
         audio_source_t inputSource,
         uint32_t sampleRate,
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 6c9e85c..1bc3baa 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -327,21 +327,9 @@
         .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)mStatus)
         .record();
 
+    stopAndJoinCallbacks(); // checks mStatus
+
     if (mStatus == NO_ERROR) {
-        // Make sure that callback function exits in the case where
-        // it is looping on buffer full condition in obtainBuffer().
-        // Otherwise the callback thread will never exit.
-        stop();
-        if (mAudioTrackThread != 0) {
-            mProxy->interrupt();
-            mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
-            mAudioTrackThread->requestExitAndWait();
-            mAudioTrackThread.clear();
-        }
-        // No lock here: worst case we remove a NULL callback which will be a nop
-        if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
-        }
         IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
         mAudioTrack.clear();
         mCblkMemory.clear();
@@ -355,6 +343,29 @@
     }
 }
 
+void AudioTrack::stopAndJoinCallbacks() {
+    // Prevent nullptr crash if it did not open properly.
+    if (mStatus != NO_ERROR) return;
+
+    // Make sure that callback function exits in the case where
+    // it is looping on buffer full condition in obtainBuffer().
+    // Otherwise the callback thread will never exit.
+    stop();
+    if (mAudioTrackThread != 0) { // not thread safe
+        mProxy->interrupt();
+        mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
+        mAudioTrackThread->requestExitAndWait();
+        mAudioTrackThread.clear();
+    }
+    // No lock here: worst case we remove a NULL callback which will be a nop
+    if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
+        // This may not stop all of these device callbacks!
+        // TODO: Add some sort of protection.
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
+        mDeviceCallback.clear();
+    }
+}
+
 status_t AudioTrack::set(
         audio_stream_type_t streamType,
         uint32_t sampleRate,
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 82a29d4..3467c3a 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -303,6 +303,19 @@
             void        stop();
             bool        stopped() const;
 
+    /* Calls stop() and then wait for all of the callbacks to return.
+     * It is safe to call this if stop() or pause() has already been called.
+     *
+     * This function is called from the destructor. But since AudioRecord
+     * is ref counted, the destructor may be called later than desired.
+     * This can be called explicitly as part of closing an AudioRecord
+     * if you want to be certain that callbacks have completely finished.
+     *
+     * This is not thread safe and should only be called from one thread,
+     * ideally as the AudioRecord is being closed.
+     */
+            void        stopAndJoinCallbacks();
+
     /* Return the sink sample rate for this record track in Hz.
      * If specified as zero in constructor or set(), this will be the source sample rate.
      * Unlike AudioTrack, the sample rate is const after initialization, so doesn't need a lock.
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index d167c40..c293343 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -479,6 +479,19 @@
             void        stop();
             bool        stopped() const;
 
+    /* Call stop() and then wait for all of the callbacks to return.
+     * It is safe to call this if stop() or pause() has already been called.
+     *
+     * This function is called from the destructor. But since AudioTrack
+     * is ref counted, the destructor may be called later than desired.
+     * This can be called explicitly as part of closing an AudioTrack
+     * if you want to be certain that callbacks have completely finished.
+     *
+     * This is not thread safe and should only be called from one thread,
+     * ideally as the AudioTrack is being closed.
+     */
+            void        stopAndJoinCallbacks();
+
     /* Flush a stopped or paused track. All previously buffered data is discarded immediately.
      * This has the effect of draining the buffers without mixing or output.
      * Flush is intended for streaming mode, for example before switching to non-contiguous content.
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 5d75055..7998879 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -63,7 +63,6 @@
         "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
         "Common/src/Copy_16.cpp",
         "Common/src/MonoTo2I_32.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/dB_to_Lin32.cpp",
         "Common/src/Shift_Sat_v16xv16.cpp",
         "Common/src/Shift_Sat_v32xv32.cpp",
@@ -148,7 +147,6 @@
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/From2iToMono_32.cpp",
         "Common/src/Mult3s_32x16.cpp",
         "Common/src/Copy_16.cpp",
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 9f5f448..12b86f3 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -137,9 +137,9 @@
 
         pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
-        LoadConst_Float(0, /* Clear the input delay buffer */
-                        (LVM_FLOAT*)&pInstance->pBufferManagement->InDelayBuffer,
-                        (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
+        memset(pInstance->pBufferManagement->InDelayBuffer, 0,
+                LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE *
+                sizeof(pInstance->pBufferManagement->InDelayBuffer[0]));
         pInstance->pBufferManagement->InDelaySamples =
                 MIN_INTERNAL_BLOCKSIZE;                    /* Set the number of delay samples */
         pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index 18de85b..10f351e 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,8 +24,6 @@
     VARIOUS FUNCTIONS
 ***********************************************************************************/
 
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
-
 void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                           LVM_INT32 NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index be19fa0..5a67bda 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "LVC_Mixer_Private.h"
 #include "VectorArithmetic.h"
 #include "ScalarArithmetic.h"
@@ -68,7 +69,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, n);
@@ -150,7 +151,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, NrFrames * NrChannels);
+            memset(dst, 0, NrFrames * NrChannels * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
deleted file mode 100644
index df7a558..0000000
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION LoadConst_32
-***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--) {
-        *dst = val;
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index 8408962..58a9102 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "Mixer_private.h"
 #include "VectorArithmetic.h"
 
@@ -61,7 +62,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else if ((pInstance->Target) == 1.0f) {
             if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
         } else
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index d4b321f..be3505f 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -60,7 +60,8 @@
     pLVREV_Private->pRevLPFBiquad->clear();
     for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
         pLVREV_Private->revLPFBiquad[i]->clear();
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[i], LVREV_MAX_T_DELAY[i]);
+        memset(pLVREV_Private->pDelay_T[i], 0, LVREV_MAX_T_DELAY[i] *
+                sizeof(pLVREV_Private->pDelay_T[i][0]));
     }
     return LVREV_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index c5b6598..de23d07 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -81,10 +81,7 @@
         pConfig->DelaySize =
                 (pParams->NrChannels == FCC_1) ? (LVM_INT16)Delay : (LVM_INT16)(FCC_2 * Delay);
         pConfig->DelayOffset = 0;
-        LoadConst_Float(0,                                      /* Value */
-                        (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
+        memset(pConfig->StereoSamples, 0, sizeof(pConfig->StereoSamples));
         /*
          * Setup the filters
          */
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index de4f8d4..383bae8 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -160,6 +160,12 @@
 #define AMEDIAMETRICS_PROP_VOLUME_LEFT    "volume.left"    // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_VOLUME_RIGHT   "volume.right"   // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_WHERE          "where"          // string value
+// EncodingRequested is the encoding format requested by the app
+#define AMEDIAMETRICS_PROP_ENCODINGREQUESTED "encodingRequested" // string
+// PerformanceModeActual is the actual selected performance mode, could be "none', "loeLatency" or
+// "powerSaving"
+#define AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL "performanceModeActual" // string
+#define AMEDIAMETRICS_PROP_FRAMESTRANSFERRED "framesTransferred" // int64_t, transferred frames
 
 // Timing values: millisecond values are suffixed with MS and the type is double
 // nanosecond values are suffixed with NS and the type is int64.
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
index 68e2875..9705f3c 100644
--- a/media/libmediatranscoding/TranscodingSessionController.cpp
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -19,6 +19,7 @@
 
 #define VALIDATE_STATE 1
 
+#include <android/permission_manager.h>
 #include <inttypes.h>
 #include <media/TranscodingSessionController.h>
 #include <media/TranscodingUidPolicy.h>
@@ -193,7 +194,7 @@
 
     ~Pacer() = default;
 
-    bool onSessionStarted(uid_t uid);
+    bool onSessionStarted(uid_t uid, uid_t callingUid);
     void onSessionCompleted(uid_t uid, std::chrono::microseconds runningTime);
     void onSessionCancelled(uid_t uid);
 
@@ -212,9 +213,49 @@
         std::chrono::steady_clock::time_point lastCompletedTime;
     };
     std::map<uid_t, UidHistoryEntry> mUidHistoryMap;
+    std::unordered_set<uid_t> mMtpUids;
+    std::unordered_set<uid_t> mNonMtpUids;
+
+    bool isSubjectToQuota(uid_t uid, uid_t callingUid);
 };
 
-bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid) {
+bool TranscodingSessionController::Pacer::isSubjectToQuota(uid_t uid, uid_t callingUid) {
+    // Submitting with self uid is not limited (which can only happen if it's used as an
+    // app-facing API). MediaProvider usage always submit on behalf of other uids.
+    if (uid == callingUid) {
+        return false;
+    }
+
+    if (mMtpUids.find(uid) != mMtpUids.end()) {
+        return false;
+    }
+
+    if (mNonMtpUids.find(uid) != mNonMtpUids.end()) {
+        return true;
+    }
+
+    // We don't have MTP permission info about this uid yet, check permission and save the result.
+    int32_t result;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        if (APermissionManager_checkPermission("android.permission.ACCESS_MTP", -1 /*pid*/, uid,
+                                               &result) == PERMISSION_MANAGER_STATUS_OK &&
+            result == PERMISSION_MANAGER_PERMISSION_GRANTED) {
+            mMtpUids.insert(uid);
+            return false;
+        }
+    }
+
+    mNonMtpUids.insert(uid);
+    return true;
+}
+
+bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid, uid_t callingUid) {
+    if (!isSubjectToQuota(uid, callingUid)) {
+        ALOGI("Pacer::onSessionStarted: uid %d (caling uid: %d): not subject to quota", uid,
+              callingUid);
+        return true;
+    }
+
     // If uid doesn't exist, only insert the entry and mark session active. Skip quota checking.
     if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
         mUidHistoryMap.emplace(uid, UidHistoryEntry{});
@@ -494,7 +535,7 @@
             // Check if at least one client has quota to start the session.
             bool keepForClient = false;
             for (uid_t uid : topSession->allClientUids) {
-                if (mPacer->onSessionStarted(uid)) {
+                if (mPacer->onSessionStarted(uid, topSession->callingUid)) {
                     keepForClient = true;
                     // DO NOT break here, because book-keeping still needs to happen
                     // for the other uids.
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
index 9e7fa95..ef9c4f8 100644
--- a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -339,19 +339,37 @@
         EXPECT_EQ(mTranscoder.use_count(), 2);
     }
 
-    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
-                         bool pauseLastSuccessSession = false) {
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
         testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
-                        pauseLastSuccessSession);
+                        false /*pauseLastSuccessSession*/, true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithPause(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        true /*pauseLastSuccessSession*/, true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithMultipleUids(int numSubmits, int sessionDurationMs, int expectedSuccess,
+                                         const std::shared_ptr<TestClientCallback>& client,
+                                         const std::vector<int>& additionalClientUids) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, client,
+                        additionalClientUids, false /*pauseLastSuccessSession*/,
+                        true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithSelfUid(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        false /*pauseLastSuccessSession*/, false /*useRealCallingUid*/);
     }
 
     void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
                          const std::shared_ptr<TestClientCallback>& client,
-                         const std::vector<int>& additionalClientUids,
-                         bool pauseLastSuccessSession) {
+                         const std::vector<int>& additionalClientUids, bool pauseLastSuccessSession,
+                         bool useRealCallingUid) {
+        uid_t callingUid = useRealCallingUid ? ::getuid() : client->clientUid();
         for (int i = 0; i < numSubmits; i++) {
-            mController->submit(client->clientId(), SESSION(i), client->clientUid(),
-                                client->clientUid(), mRealtimeRequest, client);
+            mController->submit(client->clientId(), SESSION(i), callingUid, client->clientUid(),
+                                mRealtimeRequest, client);
             for (int additionalUid : additionalClientUids) {
                 mController->addClientUid(client->clientId(), SESSION(i), additionalUid);
             }
@@ -1294,8 +1312,7 @@
 
 TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerWithPause) {
     ALOGD("TestTranscoderPacerDuringPause");
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
-                    true /*pauseLastSuccessSession*/);
+    testPacerHelperWithPause(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
 }
 
 /*
@@ -1305,17 +1322,26 @@
 TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerMultipleUids) {
     ALOGD("TestTranscoderPacerMultipleUids");
     // First, run mClientCallback0 to the point of no quota.
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
-                    mClientCallback0, {}, false /*pauseLastSuccessSession*/);
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    10 /*expectedSuccess*/, mClientCallback0, {});
     // Make UID(0) block on Client1's sessions too, Client1's quota should not be affected.
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
-                    mClientCallback1, {UID(0)}, false /*pauseLastSuccessSession*/);
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    10 /*expectedSuccess*/, mClientCallback1, {UID(0)});
     // Make UID(10) block on Client2's sessions. We expect to see 11 succeeds (instead of 10),
     // because the addClientUid() is called after the submit, and first session is already
     // started by the time UID(10) is added. UID(10) allowed us to run the 11th session,
     // after that both UID(10) and UID(2) are out of quota.
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 11 /*expectedSuccess*/,
-                    mClientCallback2, {UID(10)}, false /*pauseLastSuccessSession*/);
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    11 /*expectedSuccess*/, mClientCallback2, {UID(10)});
+}
+
+/*
+ * Use same uid for clientUid and callingUid, should not be limited by quota.
+ */
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerSelfUid) {
+    ALOGD("TestTranscoderPacerSelfUid");
+    testPacerHelperWithSelfUid(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                               12 /*expectedSuccess*/);
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
index 784e802..30d0ae6 100644
--- a/media/libstagefright/foundation/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -146,6 +146,10 @@
 int WriteOpusHeader(const OpusHeader &header, int input_sample_rate,
                     uint8_t* output, size_t output_size) {
     // See https://wiki.xiph.org/OggOpus#ID_Header.
+    if (header.channels < 1 || header.channels > kMaxChannels) {
+        ALOGE("Invalid channel count: %d", header.channels);
+        return -1;
+    }
     const size_t total_size = kOpusHeaderStreamMapOffset + header.channels;
     if (output_size < total_size) {
         ALOGE("Output buffer too small for header.");
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index 866ebfd..d7eccb8 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -152,12 +152,17 @@
 /**
  * Creates a new media muxer for appending data to an existing MPEG4 file.
  * This is a synchronous API call and could take a while to return if the existing file is large.
- * Works for only MPEG4 files that contain a) a single audio track, b) a single video track,
- * c) a single audio and a single video track.
- * @param(fd): needs to be opened with read and write permission.  Does not take ownership of
+ * Only works for MPEG4 files matching one of the following characteristics:
+ * <ul>
+ *    <li>a single audio track.</li>
+ *    <li>a single video track.</li>
+ *    <li>a single audio and a single video track.</li>
+ * </ul>
+ * @param fd Must be opened with read and write permission. Does not take ownership of
  * this fd i.e., caller is responsible for closing fd.
- * @param(mode): AppendMode is an enum that specifies one of the modes of appending data.
- * @return : Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
+ * @param mode Specifies how data will be appended; the AppendMode enum describes
+ *             the possible methods for appending..
+ * @return Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
  * {@link AMediaMuxer_delete} should be used to free the returned pointer.
  *
  * Available since API level 31.
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 52dc0cf..9e48c1f 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -46,6 +46,7 @@
         "libbinder",
         "libcutils",
         "liblog",
+        "libpermission",
         "libutils",
         "libhidlbase",
         "android.hardware.graphics.bufferqueue@1.0",
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 562c213..84ed656 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -253,6 +253,18 @@
     // Children: ModulesTraits, VolumeTraits, SurroundSoundTraits (optional)
 };
 
+// Deleter using free() for use with std::unique_ptr<>. See also UniqueCPtr<> below.
+struct FreeDelete {
+    // NOTE: Deleting a const object is valid but free() takes a non-const pointer.
+    void operator()(const void* ptr) const {
+        free(const_cast<void*>(ptr));
+    }
+};
+
+// Alias for std::unique_ptr<> that uses the C function free() to delete objects.
+template <typename T>
+using UniqueCPtr = std::unique_ptr<T, FreeDelete>;
+
 template <class T>
 constexpr void (*xmlDeleter)(T* t);
 template <>
@@ -608,7 +620,7 @@
     }
     // Tokenize and Convert Sources name to port pointer
     PolicyAudioPortVector sources;
-    std::unique_ptr<char[]> sourcesLiteral{strndup(
+    UniqueCPtr<char> sourcesLiteral{strndup(
                 sourcesAttr.c_str(), strlen(sourcesAttr.c_str()))};
     char *devTag = strtok(sourcesLiteral.get(), ",");
     while (devTag != NULL) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index dd44c54..ad359ec 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2229,7 +2229,8 @@
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
             device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
-            ALOGV("%s found device type is 0x%X", __FUNCTION__, device->type());
+            ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
+                __FUNCTION__, device->type());
         }
         if (device == nullptr) {
             ALOGW("getInputForAttr() could not find device for source %d", attributes.source);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 4ffa9cc..b5eb98f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -537,35 +537,34 @@
 {
 //    Go over all active clients and allow capture (does not force silence) in the
 //    following cases:
-//    The client source is virtual (remote submix, call audio TX or RX...)
-//    OR The user the client is running in has microphone sensor privacy disabled
-//        AND The client is the assistant
-//                AND an accessibility service is on TOP or a RTT call is active
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR uses VOICE_RECOGNITION AND is on TOP
-//                        OR uses HOTWORD
-//                    AND there is no active privacy sensitive capture or call
-//                        OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR The client is an accessibility service
-//                AND Is on TOP
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR The assistant is not on TOP
-//                        AND there is no active privacy sensitive capture or call
-//                            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//                AND is on TOP
+//    The client is the assistant
+//        AND an accessibility service is on TOP or a RTT call is active
 //                AND the source is VOICE_RECOGNITION or HOTWORD
-//            OR the client source is HOTWORD
-//                AND is on TOP
-//                    OR all active clients are using HOTWORD source
-//                AND no call is active
-//                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR the client is the current InputMethodService
-//                AND a RTT call is active AND the source is VOICE_RECOGNITION
-//            OR Any client
-//                AND The assistant is not on TOP
-//                AND is on TOP or latest started
+//            OR uses VOICE_RECOGNITION AND is on TOP
+//                OR uses HOTWORD
+//            AND there is no active privacy sensitive capture or call
+//                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR The client is an accessibility service
+//        AND Is on TOP
+//                AND the source is VOICE_RECOGNITION or HOTWORD
+//            OR The assistant is not on TOP
 //                AND there is no active privacy sensitive capture or call
 //                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//        AND is on TOP
+//        AND the source is VOICE_RECOGNITION or HOTWORD
+//    OR the client source is virtual (remote submix, call audio TX or RX...)
+//    OR the client source is HOTWORD
+//        AND is on TOP
+//            OR all active clients are using HOTWORD source
+//        AND no call is active
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR the client is the current InputMethodService
+//        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR Any client
+//        AND The assistant is not on TOP
+//        AND is on TOP or latest started
+//        AND there is no active privacy sensitive capture or call
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 
 
     sp<AudioRecordClient> topActive;
@@ -596,8 +595,7 @@
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
         uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(current->identity.uid));
-        if (!current->active || (!isVirtualSource(current->attributes.source)
-                && isUserSensorPrivacyEnabledForUid(currentUid))) {
+        if (!current->active) {
             continue;
         }
 
@@ -734,9 +732,6 @@
         if (isVirtualSource(source)) {
             // Allow capture for virtual (remote submix, call audio TX or RX...) sources
             allowCapture = true;
-        } else if (isUserSensorPrivacyEnabledForUid(currentUid)) {
-            // If sensor privacy is enabled, don't allow capture
-            allowCapture = false;
         } else if (mUidPolicy->isAssistantUid(currentUid)) {
             // For assistant allow capture if:
             //     An accessibility service is on TOP or a RTT call is active
@@ -1145,16 +1140,6 @@
     return NO_INIT;
 }
 
-bool AudioPolicyService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mMicrophoneSensorPrivacyPolicies.find(userId) == mMicrophoneSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        userPolicy->registerSelfForMicrophoneOnly(userId);
-        mMicrophoneSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mMicrophoneSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 status_t AudioPolicyService::printHelp(int out) {
     return dprintf(out, "Audio policy service commands:\n"
         "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 145ba06..00d9670 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -346,8 +346,6 @@
     status_t validateUsage(audio_usage_t usage);
     status_t validateUsage(audio_usage_t usage, const media::permission::Identity& identity);
 
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
-
     void updateUidStates();
     void updateUidStates_l() REQUIRES(mLock);
 
@@ -908,8 +906,6 @@
     void *mLibraryHandle = nullptr;
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
-
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mMicrophoneSensorPrivacyPolicies;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 32c0267..07c889b 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -114,6 +114,7 @@
         "libutils",
         "libbinder",
         "libactivitymanager_aidl",
+        "libpermission",
         "libcutils",
         "libmedia",
         "libmediautils",
@@ -163,6 +164,7 @@
     export_shared_lib_headers: [
         "libbinder",
         "libactivitymanager_aidl",
+        "libpermission",
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 88a1ab6..74d44cc 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -29,6 +29,7 @@
 #include <inttypes.h>
 #include <pthread.h>
 
+#include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/ICamera.h>
 #include <android/hardware/ICameraClient.h>
 
@@ -145,6 +146,7 @@
 
 void CameraService::onFirstRef()
 {
+
     ALOGI("CameraService process starting");
 
     BnCameraService::onFirstRef();
@@ -242,10 +244,6 @@
     VendorTagDescriptor::clearGlobalVendorTagDescriptor();
     mUidPolicy->unregisterSelf();
     mSensorPrivacyPolicy->unregisterSelf();
-
-    for (auto const& [_, policy] : mCameraSensorPrivacyPolicies) {
-        policy->unregisterSelf();
-    }
 }
 
 void CameraService::onNewProviderRegistered() {
@@ -755,6 +753,10 @@
     return Status::ok();
 }
 
+void CameraService::clearCachedVariables() {
+    BasicClient::BasicClient::sCameraService = nullptr;
+}
+
 int CameraService::getDeviceVersion(const String8& cameraId, int* facing, int* orientation) {
     ATRACE_CALL();
 
@@ -1711,8 +1713,9 @@
 
         // Set camera muting behavior
         if (client->supportsCameraMute()) {
-            client->setCameraMute(mOverrideCameraMuteMode ||
-                    isUserSensorPrivacyEnabledForUid(clientUid));
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
+            client->setCameraMute(mOverrideCameraMuteMode || isCameraPrivacyEnabled);
         }
 
         if (shimUpdateOnly) {
@@ -2156,10 +2159,15 @@
     return addListenerHelper(listener, cameraStatuses);
 }
 
+binder::Status CameraService::addListenerTest(const sp<hardware::ICameraServiceListener>& listener,
+            std::vector<hardware::CameraStatus>* cameraStatuses) {
+    return addListenerHelper(listener, cameraStatuses, false, true);
+}
+
 Status CameraService::addListenerHelper(const sp<ICameraServiceListener>& listener,
         /*out*/
         std::vector<hardware::CameraStatus> *cameraStatuses,
-        bool isVendorListener) {
+        bool isVendorListener, bool isProcessLocalTest) {
 
     ATRACE_CALL();
 
@@ -2190,7 +2198,7 @@
         sp<ServiceListener> serviceListener =
                 new ServiceListener(this, listener, clientUid, clientPid, isVendorListener,
                         openCloseCallbackAllowed);
-        auto ret = serviceListener->initialize();
+        auto ret = serviceListener->initialize(isProcessLocalTest);
         if (ret != NO_ERROR) {
             String8 msg = String8::format("Failed to initialize service listener: %s (%d)",
                     strerror(-ret), ret);
@@ -2945,10 +2953,17 @@
         // If the calling Uid is trusted (a native service), the AppOpsManager could
         // return MODE_IGNORED. Do not treat such case as error.
         if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
-            // Return the same error as for device policy manager rejection
-            return -EACCES;
+            bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
+                    mClientPackageName);
+            bool isCameraPrivacyEnabled =
+                    sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                            multiuser_get_user_id(mClientUid));
+            if (!isUidActive || !isCameraPrivacyEnabled) {
+                ALOGI("Camera %s: Access for \"%s\" has been restricted",
+                        mCameraIdStr.string(), String8(mClientPackageName).string());
+                // Return the same error as for device policy manager rejection
+                return -EACCES;
+            }
         }
     }
 
@@ -3026,15 +3041,22 @@
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
+        bool isCameraPrivacyEnabled =
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                        multiuser_get_user_id(mClientUid));
         ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
                 mCameraIdStr.string(), String8(mClientPackageName).string(),
                 mUidIsTrusted, isUidActive);
         // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
         // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
         // error.
-        if (!mUidIsTrusted && !isUidActive) {
+        if (!mUidIsTrusted && isUidActive && isCameraPrivacyEnabled) {
+            setCameraMute(true);
+        } else if (!mUidIsTrusted && !isUidActive) {
             block();
         }
+    } else if (res == AppOpsManager::MODE_ALLOWED) {
+        setCameraMute(sCameraService->mOverrideCameraMuteMode);
     }
 }
 
@@ -3307,6 +3329,7 @@
     if (mRegistered) {
         return;
     }
+    hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
     status_t res = mSpm.linkToDeath(this);
@@ -3316,39 +3339,6 @@
     }
 }
 
-status_t CameraService::SensorPrivacyPolicy::registerSelfForIndividual(int userId) {
-    Mutex::Autolock _l(mSensorPrivacyLock);
-    if (mRegistered) {
-        return OK;
-    }
-
-    status_t res = mSpm.addIndividualSensorPrivacyListener(userId,
-            SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA, this);
-    if (res != OK) {
-        ALOGE("Unable to register camera privacy listener: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = mSpm.isIndividualSensorPrivacyEnabled(userId,
-        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA, mSensorPrivacyEnabled);
-    if (res != OK) {
-        ALOGE("Unable to check camera privacy: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = mSpm.linkToDeath(this);
-    if (res != OK) {
-        ALOGE("Register link to death failed for sensor privacy: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    mRegistered = true;
-    mIsIndividual = true;
-    mUserId = userId;
-    ALOGV("SensorPrivacyPolicy: Registered with SensorPrivacyManager");
-    return OK;
-}
-
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
@@ -3362,20 +3352,24 @@
     return mSensorPrivacyEnabled;
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(userid_t userId) {
+    if (!hasCameraPrivacyFeature()) {
+        return false;
+    }
+    return mSpm.isIndividualSensorPrivacyEnabled(userId,
+        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
         mSensorPrivacyEnabled = enabled;
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
-    sp<CameraService> service = mService.promote();
-    if (service != nullptr) {
-        if (mIsIndividual) {
-            service->setMuteForAllClients(mUserId, enabled);
-        } else {
-            if (enabled) {
-                service->blockAllClients();
-            }
+    if (enabled) {
+        sp<CameraService> service = mService.promote();
+        if (service != nullptr) {
+            service->blockAllClients();
         }
     }
     return binder::Status::ok();
@@ -3387,6 +3381,31 @@
     mRegistered = false;
 }
 
+bool CameraService::SensorPrivacyPolicy::hasCameraPrivacyFeature() {
+    if (!mNeedToCheckCameraPrivacyFeature) {
+        return mHasCameraPrivacyFeature;
+    }
+    bool hasCameraPrivacyFeature = false;
+    sp<IBinder> binder = defaultServiceManager()->getService(String16("package_native"));
+    if (binder != nullptr) {
+        sp<content::pm::IPackageManagerNative> packageManager =
+                interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (packageManager != nullptr) {
+            binder::Status status = packageManager->hasSystemFeature(
+                    String16("android.hardware.camera.toggle"), 0, &hasCameraPrivacyFeature);
+
+            if (status.isOk()) {
+                mNeedToCheckCameraPrivacyFeature = false;
+                mHasCameraPrivacyFeature = hasCameraPrivacyFeature;
+            } else {
+                ALOGE("Unable to check if camera privacy feature is supported");
+            }
+        }
+    }
+
+    return hasCameraPrivacyFeature;
+}
+
 // ----------------------------------------------------------------------------
 //                  CameraState
 // ----------------------------------------------------------------------------
@@ -4006,19 +4025,6 @@
     }
 }
 
-void CameraService::setMuteForAllClients(userid_t userId, bool enabled) {
-    const auto clients = mActiveClientManager.getAll();
-    for (auto& current : clients) {
-        if (current != nullptr) {
-            const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr
-                    && multiuser_get_user_id(basicClient->getClientUid()) == userId) {
-                basicClient->setCameraMute(enabled);
-            }
-        }
-    }
-}
-
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
     if (!checkCallingPermission(sManageCameraPermission, nullptr, nullptr)) {
@@ -4225,16 +4231,4 @@
     return mode;
 }
 
-bool CameraService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mCameraSensorPrivacyPolicies.find(userId) == mCameraSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        if (userPolicy->registerSelfForIndividual(userId) != OK) {
-            return false;
-        }
-        mCameraSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mCameraSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f962121..771981c 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -190,7 +190,8 @@
 
     binder::Status      addListenerHelper(const sp<hardware::ICameraServiceListener>& listener,
             /*out*/
-            std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false);
+            std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false,
+            bool isProcessLocalTest = false);
 
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
@@ -219,6 +220,19 @@
             int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
+    // Methods to be used in CameraService class tests only
+    //
+    // CameraService class test method only - clear static variables in the
+    // cameraserver process, which otherwise might affect multiple test runs.
+    void                clearCachedVariables();
+
+    // Add test listener, linkToDeath won't be called since this is for process
+    // local testing.
+    binder::Status    addListenerTest(const sp<hardware::ICameraServiceListener>& listener,
+            /*out*/
+            std::vector<hardware::CameraStatus>* cameraStatuses);
+
+    /////////////////////////////////////////////////////////////////////
     // Shared utilities
     static binder::Status filterGetInfoErrorCode(status_t err);
 
@@ -226,6 +240,7 @@
     // CameraClient functionality
 
     class BasicClient : public virtual RefBase {
+    friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
                 const String8& monitorTags) = 0;
@@ -650,13 +665,13 @@
         public:
             explicit SensorPrivacyPolicy(wp<CameraService> service)
                     : mService(service), mSensorPrivacyEnabled(false), mRegistered(false),
-                      mIsIndividual(false), mUserId(0) {}
+                    mHasCameraPrivacyFeature(false), mNeedToCheckCameraPrivacyFeature(true) {}
 
             void registerSelf();
-            status_t registerSelfForIndividual(int userId);
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
+            bool isCameraPrivacyEnabled(userid_t userId);
 
             binder::Status onSensorPrivacyChanged(bool enabled);
 
@@ -669,8 +684,10 @@
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
             bool mRegistered;
-            bool mIsIndividual;
-            userid_t mUserId;
+            bool mHasCameraPrivacyFeature;
+            bool mNeedToCheckCameraPrivacyFeature;
+
+            bool hasCameraPrivacyFeature();
     };
 
     sp<UidPolicy> mUidPolicy;
@@ -926,7 +943,10 @@
                       mIsVendorListener(isVendorClient),
                       mOpenCloseCallbackAllowed(openCloseCallbackAllowed) { }
 
-            status_t initialize() {
+            status_t initialize(bool isProcessLocalTest) {
+                if (isProcessLocalTest) {
+                    return OK;
+                }
                 return IInterface::asBinder(mListener)->linkToDeath(this);
             }
 
@@ -1046,9 +1066,6 @@
     // Blocks all active clients.
     void blockAllClients();
 
-    // Mutes all active clients for a user.
-    void setMuteForAllClients(userid_t userId, bool enabled);
-
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(const Vector<String16>& args, int err);
 
@@ -1121,12 +1138,6 @@
 
     // Current camera mute mode
     bool mOverrideCameraMuteMode = false;
-
-    // Map from user to sensor privacy policy
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mCameraSensorPrivacyPolicies;
-
-    // Checks if the sensor privacy is enabled for the uid
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index c7d7c4b..3d74f0b 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -44,6 +44,7 @@
         "libcutils",
         "libcameraservice",
         "libcamera_client",
+        "liblog",
         "libui",
         "libgui",
         "android.hardware.camera.common@1.0",
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 54550a5..985b2f8 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -18,8 +18,18 @@
  * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
  */
 
+#define LOG_TAG "CameraServiceFuzzer"
+//#define LOG_NDEBUG 0
+
 #include <CameraService.h>
+#include <device3/Camera3StreamInterface.h>
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/OutputConfiguration.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <private/android_filesystem_config.h>
@@ -30,6 +40,7 @@
 using namespace std;
 
 const int32_t kPreviewThreshold = 8;
+const int32_t kNumRequestsTested = 8;
 const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
 const nsecs_t kEventTimeout = 10000000000;   // 1 [s.]
 const size_t kMaxNumLines = USHRT_MAX;
@@ -39,6 +50,23 @@
                             hardware::ICameraService::CAMERA_TYPE_ALL};
 const int kCameraApiVersion[] = {android::CameraService::API_VERSION_1,
                                  android::CameraService::API_VERSION_2};
+const uint8_t kSensorPixelModes[] = {ANDROID_SENSOR_PIXEL_MODE_DEFAULT,
+        ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION};
+const int32_t kRequestTemplates[] = {
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_PREVIEW,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_STILL_CAPTURE,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_RECORD,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_MANUAL,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG
+};
+
+const int32_t kRotations[] = {
+    camera3::CAMERA_STREAM_ROTATION_0,
+    camera3::CAMERA_STREAM_ROTATION_90,
+    camera3::CAMERA_STREAM_ROTATION_270
+};
+
 const int kLayerMetadata[] = {
     0x00100000 /*GRALLOC_USAGE_RENDERSCRIPT*/, 0x00000003 /*GRALLOC_USAGE_SW_READ_OFTEN*/,
     0x00000100 /*GRALLOC_USAGE_HW_TEXTURE*/,   0x00000800 /*GRALLOC_USAGE_HW_COMPOSER*/,
@@ -69,15 +97,15 @@
 
 class CameraFuzzer : public ::android::hardware::BnCameraClient {
    public:
-    CameraFuzzer() = default;
+    CameraFuzzer(sp<CameraService> cs, std::shared_ptr<FuzzedDataProvider> fp) :
+          mCameraService(cs), mFuzzedDataProvider(fp) {};
     ~CameraFuzzer() { deInit(); }
-    bool init();
-    void process(const uint8_t *data, size_t size);
+    void process();
     void deInit();
 
    private:
-    FuzzedDataProvider *mFuzzedDataProvider = nullptr;
     sp<CameraService> mCameraService = nullptr;
+    std::shared_ptr<FuzzedDataProvider> mFuzzedDataProvider = nullptr;
     sp<SurfaceComposerClient> mComposerClient = nullptr;
     int32_t mNumCameras = 0;
     size_t mPreviewBufferCount = 0;
@@ -167,19 +195,7 @@
     return rc;
 }
 
-bool CameraFuzzer::init() {
-    setuid(AID_MEDIA);
-    mCameraService = new CameraService();
-    if (mCameraService) {
-        return true;
-    }
-    return false;
-}
-
 void CameraFuzzer::deInit() {
-    if (mCameraService) {
-        mCameraService = nullptr;
-    }
     if (mComposerClient) {
         mComposerClient->dispose();
     }
@@ -298,12 +314,12 @@
     for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
         getCameraInformation(cameraId);
 
-        const String16 opPackageName("com.fuzzer.poc");
         ::android::binder::Status rc;
         sp<ICamera> cameraDevice;
 
-        rc = mCameraService->connect(this, cameraId, opPackageName, AID_MEDIA, AID_ROOT,
-                                     &cameraDevice);
+        rc = mCameraService->connect(this, cameraId, String16(),
+                android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
+                &cameraDevice);
         if (!rc.isOk()) {
             // camera not connected
             return;
@@ -405,8 +421,7 @@
     }
 }
 
-void CameraFuzzer::process(const uint8_t *data, size_t size) {
-    mFuzzedDataProvider = new FuzzedDataProvider(data, size);
+void CameraFuzzer::process() {
     getNumCameras();
     invokeCameraSound();
     if (mNumCameras > 0) {
@@ -415,19 +430,169 @@
     invokeDump();
     invokeShellCommand();
     invokeNotifyCalls();
-    delete mFuzzedDataProvider;
+}
+
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+        // No op
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onCameraAccessPrioritiesChanged() {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+};
+
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+    TestCameraDeviceCallbacks() {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual binder::Status onDeviceError(int /*errorCode*/,
+            const CaptureResultExtras& /*resultExtras*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onDeviceIdle() {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+            int64_t /*timestamp*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+            const CaptureResultExtras& /*resultExtras*/,
+            const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onPrepared(int /*streamId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRepeatingRequestError(
+            int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRequestQueueEmpty() {
+        return binder::Status::ok();
+    }
+};
+
+class Camera2Fuzzer {
+   public:
+    Camera2Fuzzer(sp<CameraService> cs, std::shared_ptr<FuzzedDataProvider> fp) :
+          mCameraService(cs), mFuzzedDataProvider(fp) { };
+    ~Camera2Fuzzer() {}
+    void process();
+   private:
+    sp<CameraService> mCameraService = nullptr;
+    std::shared_ptr<FuzzedDataProvider> mFuzzedDataProvider = nullptr;
+};
+
+void Camera2Fuzzer::process() {
+    sp<TestCameraServiceListener> listener = new TestCameraServiceListener();
+    std::vector<hardware::CameraStatus> statuses;
+    mCameraService->addListenerTest(listener, &statuses);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, &device);
+        if (device == nullptr) {
+            continue;
+        }
+        device->beginConfigure();
+        sp<IGraphicBufferProducer> gbProducer;
+        sp<IGraphicBufferConsumer> gbConsumer;
+        BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(gbConsumer,
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/8, /*controlledByApp*/true);
+        opaqueConsumer->setName(String8("Roger"));
+
+        // Set to VGA dimension for default, as that is guaranteed to be present
+        gbConsumer->setDefaultBufferSize(640, 480);
+        gbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
+
+        String16 noPhysicalId;
+        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
+        OutputConfiguration output(gbProducer,
+                kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
+                noPhysicalId);
+        int streamId;
+        device->createStream(output, &streamId);
+        CameraMetadata sessionParams;
+        std::vector<int> offlineStreamIds;
+        device->endConfigure(/*isConstrainedHighSpeed*/ mFuzzedDataProvider->ConsumeBool(),
+                sessionParams, ns2ms(systemTime()), &offlineStreamIds);
+
+        CameraMetadata requestTemplate;
+        size_t requestTemplatesSize =  sizeof(kRequestTemplates) /sizeof(int32_t)  - 1;
+        device->createDefaultRequest(kRequestTemplates[
+                mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, requestTemplatesSize)],
+                /*out*/&requestTemplate);
+        hardware::camera2::CaptureRequest request;
+        request.mSurfaceList.add(surface);
+        request.mIsReprocess = false;
+        hardware::camera2::utils::SubmitInfo info;
+        for (int i = 0; i < kNumRequestsTested; i++) {
+            uint8_t sensorPixelMode =
+                    kSensorPixelModes[mFuzzedDataProvider->ConsumeBool() ? 1 : 0];
+            requestTemplate.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1);
+            request.mPhysicalCameraSettings.clear();
+            request.mPhysicalCameraSettings.push_back({s.cameraId.string(), requestTemplate});
+            device->submitRequest(request, /*streaming*/false, /*out*/&info);
+            ALOGV("%s : camera id %s submit request id %d",__FUNCTION__, s.cameraId.string(),
+                    info.mRequestId);
+        }
+        device->disconnect();
+    }
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
     if (size < 1) {
         return 0;
     }
-    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer();
+    setuid(AID_CAMERASERVER);
+    std::shared_ptr<FuzzedDataProvider> fp = std::make_shared<FuzzedDataProvider>(data, size);
+    sp<CameraService> cs = new CameraService();
+    cs->clearCachedVariables();
+    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(cs, fp);
     if (!camerafuzzer) {
         return 0;
     }
-    if (camerafuzzer->init()) {
-        camerafuzzer->process(data, size);
-    }
+    camerafuzzer->process();
+    Camera2Fuzzer camera2fuzzer(cs, fp);
+    camera2fuzzer.process();
     return 0;
 }
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 2b797b8..dbc68b2 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -245,7 +245,7 @@
                 });
             }));
 
-    // Handle legacy aaudio stream statistics
+    // Handle legacy aaudio playback stream statistics
     mActions.addAction(
         AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*." AMEDIAMETRICS_PROP_EVENT,
         std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
@@ -254,6 +254,15 @@
                 mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_LEGACY);
             }));
 
+    // Handle legacy aaudio capture stream statistics
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_LEGACY);
+            }));
+
     // Handle mmap aaudio stream statistics
     mActions.addAction(
         AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM "*." AMEDIAMETRICS_PROP_EVENT,
@@ -949,7 +958,8 @@
             key, AMEDIAMETRICS_PROP_CHANNELCOUNT, &channelCount);
 
     int64_t totalFramesTransferred = -1;
-    // TODO: log and get total frames transferred
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_FRAMESTRANSFERRED, &totalFramesTransferred);
 
     std::string perfModeRequestedStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -957,8 +967,11 @@
     const auto perfModeRequested =
             types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeRequestedStr);
 
-    int32_t perfModeActual = 0;
-    // TODO: log and get actual performance mode
+    std::string perfModeActualStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL, &perfModeActualStr);
+    const auto perfModeActual =
+            types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeActualStr);
 
     std::string sharingModeStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -972,8 +985,10 @@
     std::string serializedDeviceTypes;
     // TODO: only routed device id is logged, but no device type
 
-    int32_t formatApp = 0;
-    // TODO: log format from app
+    std::string formatAppStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ENCODINGREQUESTED, &formatAppStr);
+    const auto formatApp = types::lookup<types::ENCODING, int32_t>(formatAppStr);
 
     std::string formatDeviceStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -981,7 +996,8 @@
     const auto formatDevice = types::lookup<types::ENCODING, int32_t>(formatDeviceStr);
 
     std::string logSessionId;
-    // TODO: log logSessionId
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
 
     int32_t sampleRate = 0;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -1001,11 +1017,11 @@
             << " channel_count:" << channelCount
             << " total_frames_transferred:" << totalFramesTransferred
             << " perf_mode_requested:" << perfModeRequested << "(" << perfModeRequestedStr << ")"
-            << " perf_mode_actual:" << perfModeActual
+            << " perf_mode_actual:" << perfModeActual << "(" << perfModeActualStr << ")"
             << " sharing:" << sharingMode << "(" << sharingModeStr << ")"
             << " xrun_count:" << xrunCount
             << " device_type:" << serializedDeviceTypes
-            << " format_app:" << formatApp
+            << " format_app:" << formatApp << "(" << formatAppStr << ")"
             << " format_device: " << formatDevice << "(" << formatDeviceStr << ")"
             << " log_session_id: " << logSessionId
             << " sample_rate: " << sampleRate
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 5e672ee..1d64878 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -545,12 +545,13 @@
         return AStatsManager_PULL_SKIP;
     }
     std::lock_guard _l(mLock);
+    bool dumped = false;
     for (auto &item : mPullableItems[key]) {
         if (const auto sitem = item.lock()) {
-            dump2Statsd(sitem, data, mStatsdLog);
+            dumped |= dump2Statsd(sitem, data, mStatsdLog);
         }
     }
     mPullableItems[key].clear();
-    return AStatsManager_PULL_SUCCESS;
+    return dumped ? AStatsManager_PULL_SUCCESS : AStatsManager_PULL_SKIP;
 }
 } // namespace android
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 8a2158f..381f441 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -33,7 +33,7 @@
 
 #include "cleaner.h"
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
@@ -51,7 +51,7 @@
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::CodecData metrics_proto;
+    ::android::stats::mediametrics_message::CodecData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 27fd089..73b8872 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -188,6 +188,11 @@
     (void) item->getString("plugin_metrics", &plugin_metrics);
     const auto plugin_raw(base64DecodeNoPad(plugin_metrics));
 
+    if (serialized_metrics.size() == 0 && plugin_metrics.size() == 0) {
+        ALOGD("statsd_mediadrm_puller skipping empty entry");
+        return false;
+    }
+
     std::string vendor;
     (void) item->getString("vendor", &vendor);
     std::string description;
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index faea58f..13dd3d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -38,6 +38,10 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
+AAudioServiceEndpoint::~AAudioServiceEndpoint() {
+    ALOGD("%s() called", __func__);
+}
+
 std::string AAudioServiceEndpoint::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index 72090c2..a7f63d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -43,7 +43,7 @@
         , public AAudioStreamParameters {
 public:
 
-    virtual ~AAudioServiceEndpoint() = default;
+    virtual ~AAudioServiceEndpoint();
 
     virtual std::string dump() const;
 
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 556710d..7294a58 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -51,8 +51,6 @@
         : mMmapStream(nullptr)
         , mAAudioService(audioService) {}
 
-AAudioServiceEndpointMMAP::~AAudioServiceEndpointMMAP() {}
-
 std::string AAudioServiceEndpointMMAP::dump() const {
     std::stringstream result;
 
@@ -357,7 +355,10 @@
 // This is called by AudioFlinger when it wants to destroy a stream.
 void AAudioServiceEndpointMMAP::onTearDown(audio_port_handle_t portHandle) {
     ALOGD("%s(portHandle = %d) called", __func__, portHandle);
-    std::thread asyncTask(&AAudioServiceEndpointMMAP::handleTearDownAsync, this, portHandle);
+    android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+    std::thread asyncTask([holdEndpoint, portHandle]() {
+        holdEndpoint->handleTearDownAsync(portHandle);
+    });
     asyncTask.detach();
 }
 
@@ -378,9 +379,11 @@
     ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
     if (getDeviceId() != deviceId) {
         if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-            std::thread asyncTask([this, deviceId]() {
-                disconnectRegisteredStreams();
-                setDeviceId(deviceId);
+            android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+            std::thread asyncTask([holdEndpoint, deviceId]() {
+                ALOGD("onRoutingChanged() asyncTask launched");
+                holdEndpoint->disconnectRegisteredStreams();
+                holdEndpoint->setDeviceId(deviceId);
             });
             asyncTask.detach();
         } else {
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 24b161d..5a53885 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -44,7 +44,7 @@
 public:
     explicit AAudioServiceEndpointMMAP(android::AAudioService &audioService);
 
-    virtual ~AAudioServiceEndpointMMAP();
+    virtual ~AAudioServiceEndpointMMAP() = default;
 
     std::string dump() const override;
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 8638f36..dbacd75 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -67,8 +67,7 @@
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
-                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
-                        || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
+                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
                         "service stream %p still open, state = %d",
                         this, getState());
 }
@@ -229,7 +228,7 @@
     aaudio_result_t result = AAUDIO_OK;
 
     if (auto state = getState();
-        state == AAUDIO_STREAM_STATE_CLOSED || state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        state == AAUDIO_STREAM_STATE_CLOSED || isDisconnected_l()) {
         ALOGW("%s() already CLOSED, returns INVALID_STATE, handle = %d",
                 __func__, getHandle());
         return AAUDIO_ERROR_INVALID_STATE;
@@ -261,8 +260,14 @@
     sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
     setState(AAUDIO_STREAM_STATE_STARTED);
     mThreadEnabled.store(true);
+    // Make sure this object does not get deleted before the run() method
+    // can protect it by making a strong pointer.
+    incStrong(nullptr); // See run() method.
     result = mTimestampThread.start(this);
-    if (result != AAUDIO_OK) goto error;
+    if (result != AAUDIO_OK) {
+        decStrong(nullptr); // run() can't do it so we have to do it here.
+        goto error;
+    }
 
     return result;
 
@@ -336,7 +341,12 @@
 
     setState(AAUDIO_STREAM_STATE_STOPPING);
 
+    // Temporarily unlock because we are joining the timestamp thread and it may try
+    // to acquire mLock.
+    mLock.unlock();
     result = stopTimestampThread();
+    mLock.lock();
+
     if (result != AAUDIO_OK) {
         disconnect_l();
         return result;
@@ -396,7 +406,12 @@
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
     ALOGD("%s() %s entering >>>>>>>>>>>>>> TIMESTAMPS", __func__, getTypeText());
+    // Hold onto the ref counted stream until the end.
+    android::sp<AAudioServiceStreamBase> holdStream(this);
     TimestampScheduler timestampScheduler;
+    // Balance the incStrong from when the thread was launched.
+    holdStream->decStrong(nullptr);
+
     timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
     timestampScheduler.start(AudioClock::getNanoseconds());
     int64_t nextTime = timestampScheduler.nextAbsoluteTime();
@@ -432,8 +447,7 @@
 }
 
 void AAudioServiceStreamBase::disconnect_l() {
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
-        && getState() != AAUDIO_STREAM_STATE_CLOSED) {
+    if (!isDisconnected_l() && getState() != AAUDIO_STREAM_STATE_CLOSED) {
 
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
@@ -441,7 +455,7 @@
             .record();
 
         sendServiceEvent(AAUDIO_SERVICE_EVENT_DISCONNECTED);
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected_l(true);
     }
 }
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 8e5c8ef..c42df0f 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -265,6 +265,13 @@
 
     aaudio_stream_state_t   mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
 
+    bool isDisconnected_l() const REQUIRES(mLock) {
+        return mDisconnected;
+    }
+    void setDisconnected_l(bool flag) REQUIRES(mLock) {
+        mDisconnected = flag;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -322,6 +329,8 @@
     // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
     std::atomic<bool>       mSuspended{false};
 
+    bool                    mDisconnected GUARDED_BY(mLock) {false};
+
 protected:
     // Locking order is important.
     // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index bdf826c..b7ae167 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -111,11 +111,11 @@
     DemuxPid hidlPid;
     switch (pid.getTag()) {
         case TunerDemuxPid::tPid: {
-            hidlPid.tPid((uint16_t)pid.tPid);
+            hidlPid.tPid((uint16_t)pid.get<TunerDemuxPid::tPid>());
             break;
         }
         case TunerDemuxPid::mmtpPid: {
-            hidlPid.mmtpPid((uint16_t)pid.mmtpPid);
+            hidlPid.mmtpPid((uint16_t)pid.get<TunerDemuxPid::mmtpPid>());
             break;
         }
     }
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index efe396a..039fd31 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -57,10 +57,10 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    MQDesc dvrMQDesc;
+    MQDesc filterMQDesc;
     Result res;
     mFilter->getQueueDesc([&](Result r, const MQDesc& desc) {
-        dvrMQDesc = desc;
+        filterMQDesc = desc;
         res = r;
     });
     if (res != Result::SUCCESS) {
@@ -69,7 +69,7 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
-                dvrMQDesc,  &aidlMQDesc);
+                filterMQDesc,  &aidlMQDesc);
     *_aidl_return = move(aidlMQDesc);
     return Status::ok();
 }