Merge "AudioAidlLegacyConversionTest: Added test for Aidl2Legacy2Aidl."
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 913854c..b6f8552 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -3486,10 +3486,11 @@
      * </ul></p>
      *
      * <p>This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which contains a list
-     * of keys that can be overridden using <a href="https://developer.android.com/reference/CaptureRequest/Builder.html#setPhysicalCameraKey">Builder#setPhysicalCameraKey</a>.
+     * of keys that can be overridden using
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.Builder.html#setPhysicalCameraKey">Builder#setPhysicalCameraKey</a>.
      * The respective value of such request key can be obtained by calling
-     * <a href="https://developer.android.com/reference/CaptureRequest/Builder.html#getPhysicalCameraKey">Builder#getPhysicalCameraKey</a>. Capture requests that contain
-     * individual physical device requests must be built via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.Builder.html#getPhysicalCameraKey">Builder#getPhysicalCameraKey</a>.
+     * Capture requests that contain individual physical device requests must be built via
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureRequest(int,">Set)</a>.</p>
      *
      * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
@@ -3713,7 +3714,7 @@
      * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
      * <p>For applications targeting SDK version 31 or newer, if the mobile device declares to be
      * media performance class 12 or higher by setting
-     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CODES/MEDIA_PERFORMANCE_CLASS.html">MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
+     * <a href="https://developer.android.com/reference/android/os/Build.VERSION.html#MEDIA_PERFORMANCE_CLASS">VERSION#MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
      * the primary camera devices (first rear/front camera in the camera ID list) will not
      * support JPEG sizes smaller than 1080p. If the application configures a JPEG stream
      * smaller than 1080p, the camera device will round up the JPEG image size to at least
@@ -3732,7 +3733,7 @@
      * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
      * <p>For applications targeting SDK version 31 or newer, if the mobile device doesn't declare
      * to be media performance class 12 or better by setting
-     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CODES/MEDIA_PERFORMANCE_CLASS.html">MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
+     * <a href="https://developer.android.com/reference/android/os/Build.VERSION.html#MEDIA_PERFORMANCE_CLASS">VERSION#MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
      * or if the camera device isn't a primary rear/front camera, the minimum required output
      * stream configurations are the same as for applications targeting SDK version older than
      * 31.</p>
@@ -10225,7 +10226,7 @@
      * fire the flash for flash power metering during precapture, and then fire the flash
      * for the final capture, if a flash is available on the device and the AE mode is set to
      * enable the flash.</p>
-     * <p>Devices that initially shipped with Android version <a href="https://developer.android.com/reference/android/os/Build/VERSION_CODES.html#Q">Q</a> or newer will not include any LEGACY-level devices.</p>
+     * <p>Devices that initially shipped with Android version <a href="https://developer.android.com/reference/android/os/Build.VERSION_CODES.html#Q">Q</a> or newer will not include any LEGACY-level devices.</p>
      *
      * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c38dbef..055f20a 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -309,7 +309,7 @@
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportFrameworkMetrics(reportPluginMetrics());
+    if (mInitCheck == OK) reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 923f5c1..691eede 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -344,19 +344,22 @@
     mFrameDecoded = false;
     mFrameMemory.clear();
 
-    mRetriever = new MediaMetadataRetriever();
-    status_t err = mRetriever->setDataSource(mDataSource, "image/heif");
+    sp<MediaMetadataRetriever> retriever = new MediaMetadataRetriever();
+    status_t err = retriever->setDataSource(mDataSource, "image/heif");
     if (err != OK) {
         ALOGE("failed to set data source!");
-
         mRetriever.clear();
         mDataSource.clear();
         return false;
     }
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever = retriever;
+    }
     ALOGV("successfully set data source.");
 
-    const char* hasImage = mRetriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
-    const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
+    const char* hasImage = retriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
+    const char* hasVideo = retriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
 
     mHasImage = hasImage && !strcasecmp(hasImage, "yes");
     mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
@@ -364,7 +367,7 @@
     HeifFrameInfo* defaultInfo = nullptr;
     if (mHasImage) {
         // image index < 0 to retrieve primary image
-        sp<IMemory> sharedMem = mRetriever->getImageAtIndex(
+        sp<IMemory> sharedMem = retriever->getImageAtIndex(
                 -1, mOutputColor, true /*metaOnly*/);
 
         if (sharedMem == nullptr || sharedMem->unsecurePointer() == nullptr) {
@@ -399,7 +402,7 @@
     }
 
     if (mHasVideo) {
-        sp<IMemory> sharedMem = mRetriever->getFrameAtTime(0,
+        sp<IMemory> sharedMem = retriever->getFrameAtTime(0,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
                 mOutputColor, true /*metaOnly*/);
 
@@ -425,7 +428,7 @@
 
         initFrameInfo(&mSequenceInfo, videoFrame);
 
-        const char* frameCount = mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
+        const char* frameCount = retriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
         if (frameCount == nullptr) {
             android_errorWriteWithInfoLog(0x534e4554, "215002587", -1, NULL, 0);
             ALOGD("No valid sequence information in metadata");
@@ -511,14 +514,27 @@
 }
 
 bool HeifDecoderImpl::decodeAsync() {
+    wp<MediaMetadataRetriever> weakRetriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        weakRetriever = mRetriever;
+        mRetriever.clear();
+    }
+
     for (size_t i = 1; i < mNumSlices; i++) {
+        sp<MediaMetadataRetriever> retriever = weakRetriever.promote();
+        if (retriever == nullptr) {
+            return false;
+        }
+
         ALOGV("decodeAsync(): decoding slice %zu", i);
         size_t top = i * mSliceHeight;
         size_t bottom = (i + 1) * mSliceHeight;
         if (bottom > mImageInfo.mHeight) {
             bottom = mImageInfo.mHeight;
         }
-        sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+
+        sp<IMemory> frameMemory = retriever->getImageRectAtIndex(
                 -1, mOutputColor, 0, top, mImageInfo.mWidth, bottom);
         {
             Mutex::Autolock autolock(mLock);
@@ -534,9 +550,6 @@
             mScanlineReady.signal();
         }
     }
-    // Aggressive clear to avoid holding on to resources
-    mRetriever.clear();
-
     // Hold on to mDataSource in case the client wants to redecode.
     return false;
 }
@@ -549,6 +562,17 @@
         return true;
     }
 
+    sp<MediaMetadataRetriever> retriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        if (mRetriever == nullptr) {
+            ALOGE("Failed to get MediaMetadataRetriever!");
+            return false;
+        }
+
+        retriever = mRetriever;
+    }
+
     // See if we want to decode in slices to allow client to start
     // scanline processing in parallel with decode. If this fails
     // we fallback to decoding the full frame.
@@ -563,7 +587,7 @@
 
         if (mNumSlices > 1) {
             // get first slice and metadata
-            sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+            sp<IMemory> frameMemory = retriever->getImageRectAtIndex(
                     -1, mOutputColor, 0, 0, mImageInfo.mWidth, mSliceHeight);
 
             if (frameMemory == nullptr || frameMemory->unsecurePointer() == nullptr) {
@@ -598,9 +622,9 @@
 
     if (mHasImage) {
         // image index < 0 to retrieve primary image
-        mFrameMemory = mRetriever->getImageAtIndex(-1, mOutputColor);
+        mFrameMemory = retriever->getImageAtIndex(-1, mOutputColor);
     } else if (mHasVideo) {
-        mFrameMemory = mRetriever->getFrameAtTime(0,
+        mFrameMemory = retriever->getFrameAtTime(0,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
     }
 
@@ -658,7 +682,17 @@
     // set total scanline to sequence height now
     mTotalScanline = mSequenceInfo.mHeight;
 
-    mFrameMemory = mRetriever->getFrameAtIndex(frameIndex, mOutputColor);
+    sp<MediaMetadataRetriever> retriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        retriever = mRetriever;
+        if (retriever == nullptr) {
+            ALOGE("failed to get MediaMetadataRetriever!");
+            return false;
+        }
+    }
+
+    mFrameMemory = retriever->getFrameAtIndex(frameIndex, mOutputColor);
     if (mFrameMemory == nullptr || mFrameMemory->unsecurePointer() == nullptr) {
         ALOGE("decode: videoFrame is a nullptr");
         return false;
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index 86a8628..c1504cd 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -72,6 +72,8 @@
     bool mHasVideo;
     size_t mSequenceLength;
 
+    Mutex mRetrieverLock;
+
     // Slice decoding only
     Mutex mLock;
     Condition mScanlineReady;
diff --git a/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
index e7908eb..c3b19c2 100644
--- a/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
@@ -47,6 +47,7 @@
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
         <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
+        <devicePort tagName="BLE Broadcast Out" type="AUDIO_DEVICE_OUT_BLE_BROADCAST" role="sink"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -63,5 +64,7 @@
                sources="BLE Headset In"/>
         <route type="mix" sink="BLE Speaker Out"
                sources="le audio output"/>
+        <route type="mix" sink="BLE Broadcast Out"
+               sources="le audio output"/>
     </routes>
 </module>