Merge "Fix toDrmMetricGroupHidl conversion"
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index bc83ec1..2a07ffc 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -118,6 +118,12 @@
         return err;
     }
 
+    int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+        ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+        return err;
+    }
+
     mWidth = width;
     mHeight = height;
     mFormat = format;
@@ -132,6 +138,7 @@
     mHistogramBins = std::move(histogramBins);
     mHistogramCounts = std::move(histogramCounts);
     mDynamicRangeProfile = dynamicRangeProfile;
+    mStreamUseCase = streamUseCase;
 
     return OK;
 }
@@ -214,6 +221,11 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mStreamUseCase)) != OK) {
+        ALOGE("%s: Failed to write stream use case!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 15c9dc9..0982bba 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -81,6 +81,10 @@
     return mDynamicRangeProfile;
 }
 
+int OutputConfiguration::getStreamUseCase() const {
+    return mStreamUseCase;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -90,7 +94,8 @@
         mIsDeferred(false),
         mIsShared(false),
         mIsMultiResolution(false),
-        mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+        mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+        mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -177,6 +182,12 @@
         return err;
     }
 
+    int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+        ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+        return err;
+    }
+
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -185,6 +196,7 @@
     mIsDeferred = isDeferred != 0;
     mIsShared = isShared != 0;
     mIsMultiResolution = isMultiResolution != 0;
+    mStreamUseCase = streamUseCase;
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
@@ -196,8 +208,9 @@
     mDynamicRangeProfile = dynamicProfile;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
-          " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
-          mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
+          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d", __FUNCTION__,
+          mRotation, mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(),
+          mIsMultiResolution, mStreamUseCase);
 
     return err;
 }
@@ -213,6 +226,7 @@
     mPhysicalCameraId = physicalId;
     mIsMultiResolution = false;
     mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -222,7 +236,8 @@
   : mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
     mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
-    mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) { }
+    mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+    mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -272,6 +287,9 @@
     err = parcel->writeInt32(mDynamicRangeProfile ? 1 : 0);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mStreamUseCase);
+    if (err != OK) return err;
+
     return OK;
 }
 
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 1209a20..ab2d903 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -64,19 +64,24 @@
 
     // Dynamic range profile
     int mDynamicRangeProfile;
+    // Stream use case
+    int mStreamUseCase;
 
     CameraStreamStats() :
             mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
             mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
             mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
-            mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {}
+            mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
     CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
-            int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile)
+            int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
+            int streamUseCase)
             : mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
               mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
               mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
               mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
-              mDynamicRangeProfile(dynamicRangeProfile) {}
+              mDynamicRangeProfile(dynamicRangeProfile),
+              mStreamUseCase(streamUseCase) {}
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 1631903..f1cb6bd 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -49,6 +49,7 @@
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
     bool                       isMultiResolution() const;
+    int                        getStreamUseCase() const;
 
     // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
     const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -91,7 +92,8 @@
                 mPhysicalCameraId == other.mPhysicalCameraId &&
                 mIsMultiResolution == other.mIsMultiResolution &&
                 sensorPixelModesUsedEqual(other) &&
-                mDynamicRangeProfile == other.mDynamicRangeProfile);
+                mDynamicRangeProfile == other.mDynamicRangeProfile &&
+                mStreamUseCase == other.mStreamUseCase );
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -131,6 +133,9 @@
         if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
             return mDynamicRangeProfile < other.mDynamicRangeProfile;
         }
+        if (mStreamUseCase != other.mStreamUseCase) {
+            return mStreamUseCase < other.mStreamUseCase;
+        }
         return gbpsLessThan(other);
     }
 
@@ -156,6 +161,7 @@
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
     int                        mDynamicRangeProfile;
+    int                        mStreamUseCase;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index bd281c8..5cc6759 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -4206,6 +4206,55 @@
      */
     ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED =          // byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)
             ACAMERA_SCALER_START + 24,
+    /**
+     * <p>The stream use cases supported by this camera device.</p>
+     *
+     * <p>Type: int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The stream use case indicates the purpose of a particular camera stream from
+     * the end-user perspective. Some examples of camera use cases are: preview stream for
+     * live viewfinder shown to the user, still capture for generating high quality photo
+     * capture, video record for encoding the camera output for the purpose of future playback,
+     * and video call for live realtime video conferencing.</p>
+     * <p>With this flag, the camera device can optimize the image processing pipeline
+     * parameters, such as tuning, sensor mode, and ISP settings, indepedent of
+     * the properties of the immediate camera output surface. For example, if the output
+     * surface is a SurfaceTexture, the stream use case flag can be used to indicate whether
+     * the camera frames eventually go to display, video encoder,
+     * still image capture, or all of them combined.</p>
+     * <p>The application sets the use case of a camera stream by calling
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setStreamUseCase">OutputConfiguration#setStreamUseCase</a>.</p>
+     * <p>A camera device with
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
+     * capability must support the following stream use cases:</p>
+     * <ul>
+     * <li>DEFAULT</li>
+     * <li>PREVIEW</li>
+     * <li>STILL_CAPTURE</li>
+     * <li>VIDEO_RECORD</li>
+     * <li>PREVIEW_VIDEO_STILL</li>
+     * <li>VIDEO_CALL</li>
+     * </ul>
+     * <p>The guaranteed stream combinations related to stream use case for a camera device with
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
+     * capability is documented in the camera device
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>. The
+     * application is strongly recommended to use one of the guaranteed stream combintations.
+     * If the application creates a session with a stream combination not in the guaranteed
+     * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
+     * the camera device may ignore some stream use cases due to hardware constraints
+     * and implementation details.</p>
+     * <p>For stream combinations not covered by the stream use case mandatory lists, such as
+     * reprocessable session, constrained high speed session, or RAW stream combinations, the
+     * application should leave stream use cases within the session as DEFAULT.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+            ACAMERA_SCALER_START + 25,
     ACAMERA_SCALER_END,
 
     /**
@@ -9142,6 +9191,35 @@
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
                                                                       = 16,
 
+    /**
+     * <p>The camera device supports selecting a per-stream use case via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setStreamUseCase">OutputConfiguration#setStreamUseCase</a>
+     * so that the device can optimize camera pipeline parameters such as tuning, sensor
+     * mode, or ISP settings for a specific user scenario.
+     * Some sample usages of this capability are:
+     * * Distinguish high quality YUV captures from a regular YUV stream where
+     *   the image quality may not be as good as the JPEG stream, or
+     * * Use one stream to serve multiple purposes: viewfinder, video recording and
+     *   still capture. This is common with applications that wish to apply edits equally
+     *   to preview, saved images, and saved videos.</p>
+     * <p>This capability requires the camera device to support the following
+     * stream use cases:
+     * * DEFAULT for backward compatibility where the application doesn't set
+     *   a stream use case
+     * * PREVIEW for live viewfinder and in-app image analysis
+     * * STILL_CAPTURE for still photo capture
+     * * VIDEO_RECORD for recording video clips
+     * * PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video
+     *   recording, and still capture.
+     * * VIDEO_CALL for long running video calls</p>
+     * <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
+     * lists all of the supported stream use cases.</p>
+     * <p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
+     * mandatory stream combinations involving stream use cases, which can also be queried
+     * via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE           = 19,
+
 } acamera_metadata_enum_android_request_available_capabilities_t;
 
 // ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP
@@ -9420,6 +9498,76 @@
 
 } acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
 
+// ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_use_cases {
+    /**
+     * <p>Default stream use case.</p>
+     * <p>This use case is the same as when the application doesn't set any use case for
+     * the stream. The camera device uses the properties of the output target, such as
+     * format, dataSpace, or surface class type, to optimize the image processing pipeline.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT                = 0x0,
+
+    /**
+     * <p>Live stream shown to the user.</p>
+     * <p>Optimized for performance and usability as a viewfinder, but not necessarily for
+     * image quality. The output is not meant to be persisted as saved images or video.</p>
+     * <p>No stall if android.control.<em> are set to FAST; may have stall if android.control.</em>
+     * are set to HIGH_QUALITY. This use case has the same behavior as the default
+     * SurfaceView and SurfaceTexture targets. Additionally, this use case can be used for
+     * in-app image analysis.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW                = 0x1,
+
+    /**
+     * <p>Still photo capture.</p>
+     * <p>Optimized for high-quality high-resolution capture, and not expected to maintain
+     * preview-like frame rates.</p>
+     * <p>The stream may have stalls regardless of whether ACAMERA_CONTROL_* is HIGH_QUALITY.
+     * This use case has the same behavior as the default JPEG and RAW related formats.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE          = 0x2,
+
+    /**
+     * <p>Recording video clips.</p>
+     * <p>Optimized for high-quality video capture, including high-quality image stabilization
+     * if supported by the device and enabled by the application. As a result, may produce
+     * output frames with a substantial lag from real time, to allow for highest-quality
+     * stabilization or other processing. As such, such an output is not suitable for drawing
+     * to screen directly, and is expected to be persisted to disk or similar for later
+     * playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed
+     * to have video stabilization applied when the video stabilization control is set
+     * to ON, as opposed to PREVIEW_STABILIZATION.</p>
+     * <p>This use case has the same behavior as the default MediaRecorder and MediaCodec
+     * targets.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD           = 0x3,
+
+    /**
+     * <p>One single stream used for combined purposes of preview, video, and still capture.</p>
+     * <p>For such multi-purpose streams, the camera device aims to make the best tradeoff
+     * between the individual use cases. For example, the STILL_CAPTURE use case by itself
+     * may have stalls for achieving best image quality. But if combined with PREVIEW and
+     * VIDEO_RECORD, the camera device needs to trade off the additional image processing
+     * for speed so that preview and video recording aren't slowed down.</p>
+     * <p>Similarly, VIDEO_RECORD may produce frames with a substantial lag, but
+     * PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video
+     * stabilization with this use case, the device must support and the app must select the
+     * PREVIEW_STABILIZATION mode for video stabilization.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL    = 0x4,
+
+    /**
+     * <p>Long-running video call optimized for both power efficienty and video quality.</p>
+     * <p>The camera sensor may run in a lower-resolution mode to reduce power consumption
+     * at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL
+     * outputs are expected to work in dark conditions, so are usually accompanied with
+     * variable frame rate settings to allow sufficient exposure time in low light.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL             = 0x5,
+
+} acamera_metadata_enum_android_scaler_available_stream_use_cases_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index ec16bc2..9783855 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -70,6 +70,10 @@
 
 using namespace android;
 
+namespace {
+    constexpr static int PIXEL_FORMAT_RGBA_1010102_AS_8888 = -HAL_PIXEL_FORMAT_RGBA_1010102;
+}
+
 static long gNumRepetitions;
 static long gMaxNumFrames;  // 0 means decode all available.
 static long gReproduceBug;  // if not -1.
@@ -626,7 +630,14 @@
     fprintf(stderr, "       -m max-number-of-frames-to-decode in each pass\n");
     fprintf(stderr, "       -b bug to reproduce\n");
     fprintf(stderr, "       -i(nfo) dump codec info (profiles and color formats supported, details)\n");
-    fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art\n");
+    fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art (/sdcard/out.jpg)\n");
+    fprintf(stderr, "       -P(ixelFormat) pixel format to use for raw thumbnail "
+                    "(/sdcard/out.raw)\n");
+    fprintf(stderr, "          %d: RGBA_565\n", HAL_PIXEL_FORMAT_RGB_565);
+    fprintf(stderr, "          %d: RGBA_8888\n", HAL_PIXEL_FORMAT_RGBA_8888);
+    fprintf(stderr, "          %d: BGRA_8888\n", HAL_PIXEL_FORMAT_BGRA_8888);
+    fprintf(stderr, "          %d: RGBA_1010102\n", HAL_PIXEL_FORMAT_RGBA_1010102);
+    fprintf(stderr, "          %d: RGBA_1010102 as RGBA_8888\n", PIXEL_FORMAT_RGBA_1010102_AS_8888);
     fprintf(stderr, "       -s(oftware) prefer software codec\n");
     fprintf(stderr, "       -r(hardware) force to use hardware codec\n");
     fprintf(stderr, "       -o playback audio\n");
@@ -784,6 +795,7 @@
     bool useSurfaceTexAlloc = false;
     bool dumpStream = false;
     bool dumpPCMStream = false;
+    int32_t pixelFormat = 0;        // thumbnail pixel format
     String8 dumpStreamFilename;
     gNumRepetitions = 1;
     gMaxNumFrames = 0;
@@ -797,7 +809,7 @@
     sp<android::ALooper> looper;
 
     int res;
-    while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
+    while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:P:")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -838,6 +850,7 @@
                 break;
             }
 
+            case 'P':
             case 'm':
             case 'n':
             case 'b':
@@ -853,6 +866,8 @@
                     gNumRepetitions = x;
                 } else if (res == 'm') {
                     gMaxNumFrames = x;
+                } else if (res == 'P') {
+                    pixelFormat = x;
                 } else {
                     CHECK_EQ(res, 'b');
                     gReproduceBug = x;
@@ -975,24 +990,71 @@
             close(fd);
             fd = -1;
 
+            uint32_t retrieverPixelFormat = HAL_PIXEL_FORMAT_RGB_565;
+            if (pixelFormat == PIXEL_FORMAT_RGBA_1010102_AS_8888) {
+                retrieverPixelFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
+            } else if (pixelFormat) {
+                retrieverPixelFormat = pixelFormat;
+            }
             sp<IMemory> mem =
                     retriever->getFrameAtTime(-1,
                             MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
-                            HAL_PIXEL_FORMAT_RGB_565,
-                            false /*metaOnly*/);
+                            retrieverPixelFormat, false /*metaOnly*/);
 
             if (mem != NULL) {
                 failed = false;
-                printf("getFrameAtTime(%s) => OK\n", filename);
+                printf("getFrameAtTime(%s) format=%d => OK\n", filename, retrieverPixelFormat);
 
                 VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
 
-                CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
-                            frame->getFlattenedData(),
-                            frame->mWidth, frame->mHeight), 0);
+                if (pixelFormat) {
+                    int bpp = 0;
+                    switch (pixelFormat) {
+                    case HAL_PIXEL_FORMAT_RGB_565:
+                        bpp = 2;
+                        break;
+                    case PIXEL_FORMAT_RGBA_1010102_AS_8888:
+                        // convert RGBA_1010102 to RGBA_8888
+                        {
+                            uint32_t *data = (uint32_t *)frame->getFlattenedData();
+                            uint32_t *end = data + frame->mWidth * frame->mHeight;
+                            for (; data < end; ++data) {
+                                *data =
+                                    // pick out 8-bit R, G, B values and move them to the
+                                    // correct position
+                                    ( (*data &      0x3fc) >> 2) | // R
+                                    ( (*data &    0xff000) >> 4) | // G
+                                    ( (*data & 0x3fc00000) >> 6) | // B
+                                    // pick out 2-bit A and expand to 8-bits
+                                    (((*data & 0xc0000000) >> 6) * 0x55);
+                            }
+                        }
+
+                        FALLTHROUGH_INTENDED;
+
+                    case HAL_PIXEL_FORMAT_RGBA_1010102:
+                    case HAL_PIXEL_FORMAT_RGBA_8888:
+                    case HAL_PIXEL_FORMAT_BGRA_8888:
+                        bpp = 4;
+                        break;
+                    }
+                    if (bpp) {
+                        FILE *out = fopen("/sdcard/out.raw", "wb");
+                        fwrite(frame->getFlattenedData(), bpp * frame->mWidth, frame->mHeight, out);
+                        fclose(out);
+
+                        printf("write out %d x %d x %dbpp\n", frame->mWidth, frame->mHeight, bpp);
+                    } else {
+                        printf("unknown pixel format.\n");
+                    }
+                } else {
+                    CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
+                                frame->getFlattenedData(),
+                                frame->mWidth, frame->mHeight), 0);
+                }
             }
 
-            {
+            if (!pixelFormat) {
                 mem = retriever->extractAlbumArt();
 
                 if (mem != NULL) {
diff --git a/drm/OWNERS b/drm/OWNERS
index e788754..090c021 100644
--- a/drm/OWNERS
+++ b/drm/OWNERS
@@ -1 +1,3 @@
 jtinker@google.com
+kelzhan@google.com
+robertshih@google.com
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index a688728..03782ef 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -109,7 +109,12 @@
     // skip negative convert check as count of enum elements are 2
     aidldb.type = static_cast<BufferType>((int32_t)buffer.type);
     aidldb.nonsecureMemory = hidlSharedBufferToAidlSharedBuffer(buffer.nonsecureMemory);
-    aidldb.secureMemory = ::android::makeToAidl(buffer.secureMemory.getNativeHandle());
+    auto handle = buffer.secureMemory.getNativeHandle();
+    if (handle) {
+        aidldb.secureMemory = ::android::makeToAidl(handle);
+    } else {
+        aidldb.secureMemory = {.fds = {}, .ints = {}};
+    }
     return aidldb;
 }
 
@@ -130,6 +135,13 @@
     return String8(string.c_str());
 }
 
+static std::vector<uint8_t> toStdVec(const uint8_t* ptr, size_t n) {
+    if (!ptr) {
+        return std::vector<uint8_t>();
+    }
+    return std::vector<uint8_t>(ptr, ptr + n);
+}
+
 // -------Hidl interface related end--------------
 
 CryptoHalAidl::CryptoHalAidl()
@@ -335,8 +347,8 @@
     status_t err = UNKNOWN_ERROR;
     mLock.unlock();
 
-    std::vector<uint8_t> keyIdAidl = std::vector<uint8_t>(keyId, keyId + 16);
-    std::vector<uint8_t> ivAidl = std::vector<uint8_t>(iv, iv + 16);
+    std::vector<uint8_t> keyIdAidl(toStdVec(keyId, 16));
+    std::vector<uint8_t> ivAidl(toStdVec(iv, 16));
     DecryptResult result;
     err = mPlugin->decrypt(secure, keyIdAidl, ivAidl, aMode, aPattern, stdSubSamples,
                            hidlSharedBufferToAidlSharedBuffer(hSource), offset,
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
index 168a661..35c2382 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
@@ -72,7 +72,8 @@
     // This should match the in_mimeTypes handed by InitDataParser.
     *_aidl_return = in_mimeType == kIsoBmffVideoMimeType || in_mimeType == kIsoBmffAudioMimeType ||
                     in_mimeType == kCencInitDataFormat || in_mimeType == kWebmVideoMimeType ||
-                    in_mimeType == kWebmAudioMimeType || in_mimeType == kWebmInitDataFormat;
+                    in_mimeType == kWebmAudioMimeType || in_mimeType == kWebmInitDataFormat ||
+                    in_mimeType.empty();
     return ::ndk::ScopedAStatus::ok();
 }
 
@@ -85,7 +86,9 @@
         ALOGD("%s mime type is not supported by crypto scheme", in_mimeType.c_str());
     }
     *_aidl_return = isClearKeyUUID(in_uuid.uuid.data()) && isSupportedMimeType &&
-                    in_securityLevel == SecurityLevel::SW_SECURE_CRYPTO;
+                    (in_securityLevel == SecurityLevel::SW_SECURE_CRYPTO ||
+                     in_securityLevel == SecurityLevel::DEFAULT ||
+                     in_securityLevel == SecurityLevel::UNKNOWN);
     return ::ndk::ScopedAStatus::ok();
 }
 
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 92bea66..5478bcd 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -21,6 +21,7 @@
 #include <inttypes.h>
 #include <stdio.h>
 #include <chrono>
+#include <set>
 
 #include "AidlUtils.h"
 #include "ClearKeyDrmProperties.h"
@@ -188,7 +189,8 @@
 
     const std::vector<uint8_t> scopeId = in_scope;
     ::android::sp<Session> session;
-    if (in_keyType == KeyType::STREAMING || in_keyType == KeyType::OFFLINE) {
+    std::set<KeyType> init_types{KeyType::STREAMING, KeyType::OFFLINE};
+    if (init_types.count(in_keyType)) {
         std::vector<uint8_t> sessionId(scopeId.begin(), scopeId.end());
         session = mSessionLibrary->findSession(sessionId);
         if (!session.get()) {
@@ -402,8 +404,8 @@
     auto itr = mSecureStops.find(in_secureStopId.secureStopId);
     if (itr != mSecureStops.end()) {
         ClearkeySecureStop clearkeyStop = itr->second;
-        stop.assign(clearkeyStop.id.begin(), clearkeyStop.id.end());
-        stop.assign(clearkeyStop.data.begin(), clearkeyStop.data.end());
+        stop.insert(stop.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
+        stop.insert(stop.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
     }
     mSecureStopLock.unlock();
 
@@ -439,9 +441,9 @@
     std::vector<::aidl::android::hardware::drm::SecureStop> stops;
     for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
         ClearkeySecureStop clearkeyStop = itr->second;
-        std::vector<uint8_t> stop = {};
-        stop.assign(clearkeyStop.id.begin(), clearkeyStop.id.end());
-        stop.assign(clearkeyStop.data.begin(), clearkeyStop.data.end());
+        std::vector<uint8_t> stop{};
+        stop.insert(stop.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
+        stop.insert(stop.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
 
         SecureStop secureStop;
         secureStop.opaqueData = stop;
@@ -476,7 +478,7 @@
         return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE);
     }
 
-    *_aidl_return = itr->second;
+    *_aidl_return = SecurityLevel::SW_SECURE_CRYPTO;
     return toNdkScopedAStatus(Status::OK);
 }
 
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
index 987e328..17d4a22 100644
--- a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
@@ -23,7 +23,7 @@
 
 namespace clearkeydrm {
 
-class SessionLibrary : public ::android::RefBase {
+class SessionLibrary {
   public:
     static SessionLibrary* get();
 
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index cb9837f..a2a79d5 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -22,8 +22,4 @@
 
     srcs: ["C2SoftAomDec.cpp"],
     static_libs: ["libaom"],
-
-    include_dirs: [
-        "external/libaom/",
-    ],
 }
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index 0be1bed..7f82486 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -18,11 +18,6 @@
     static_libs: ["libavcdec"],
 
     srcs: ["C2SoftAvcDec.cpp"],
-
-    include_dirs: [
-        "external/libavc/decoder",
-        "external/libavc/common",
-    ],
 }
 
 cc_library {
@@ -37,11 +32,6 @@
 
     srcs: ["C2SoftAvcEnc.cpp"],
 
-    include_dirs: [
-        "external/libavc/encoder",
-        "external/libavc/common",
-    ],
-
     cflags: [
         "-Wno-unused-variable",
     ],
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 160e250..f1669fd 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -34,6 +34,12 @@
         "libstagefright_foundation", // for Mutexed
     ],
 
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -75,6 +81,12 @@
     ],
 
     ldflags: ["-Wl,-Bsymbolic"],
+
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 }
 
 // public dependency for software codec implementation
diff --git a/media/codec2/components/cmds/Android.bp b/media/codec2/components/cmds/Android.bp
index d6ffd12..2a11c01 100644
--- a/media/codec2/components/cmds/Android.bp
+++ b/media/codec2/components/cmds/Android.bp
@@ -15,9 +15,6 @@
         "codec2.cpp",
     ],
 
-    include_dirs: [
-    ],
-
     header_libs: [
         "libmediadrm_headers",
     ],
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 7692d37..162339f 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -22,4 +22,10 @@
 
     srcs: ["C2SoftGav1Dec.cpp"],
     static_libs: ["libgav1"],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
 }
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index daa10ae..a58044c 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -17,9 +17,4 @@
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
-
-    include_dirs: [
-        "external/libmpeg2/decoder",
-        "external/libmpeg2/common",
-    ],
 }
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 64999b7..7d5740b 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -11,6 +11,10 @@
     name: "libcodec2_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
     export_include_dirs: ["include"],
 }
 
@@ -18,6 +22,10 @@
     name: "libcodec2",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
     vndk: {
         enabled: true,
     },
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 122aacd..db7874d 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -57,6 +57,10 @@
     name: "libcodec2_hidl@1.0",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     defaults: ["hidl_defaults"],
 
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 0eeedb6..ed77a15 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -67,6 +67,11 @@
     name: "libcodec2_hidl@1.1",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
 
     defaults: ["hidl_defaults"],
 
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 2f4d6b1..674921e 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -11,6 +11,8 @@
     name: "libsfplugin_ccodec_utils",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [ "//apex_available:platform", "com.android.media.swcodec", ],
+
     double_loadable: true,
 
     srcs: [
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index b858fa5..9c3ba4d 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -15,12 +15,9 @@
         "C2Param_test.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/codec2/vndk/include",
-    ],
-
     header_libs: [
         "libcodec2_headers",
+        "libcodec2_vndk_headers",
     ],
 
     // param tests must not depend on any codec2 libraries as all params should be templated
@@ -47,9 +44,6 @@
         "vndk/C2BufferTest.cpp",
     ],
 
-    include_dirs: [
-    ],
-
     shared_libs: [
         "libcodec2",
         "libcodec2_vndk",
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 27cd1f8..598500d 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -18,6 +18,25 @@
     vendor_available: true,
 
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
+
+cc_library_headers {
+    name: "libcodec2_vndk_headers",
+    vendor_available: true,
+    min_sdk_version: "29",
+
+    export_include_dirs: [
+        "include",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 }
 
 // !!!DO NOT DEPEND ON THIS SHARED LIBRARY DIRECTLY!!!
@@ -28,6 +47,11 @@
     min_sdk_version: "29",
     // TODO: b/147147883
     double_loadable: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
 
     srcs: [
         "C2AllocatorBlob.cpp",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index be81481..40efb38 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -834,33 +834,18 @@
     aps->onNewAudioModulesAvailable();
 }
 
-status_t AudioSystem::setDeviceConnectionState(audio_devices_t device,
-                                               audio_policy_dev_state_t state,
-                                               const char* device_address,
-                                               const char* device_name,
+status_t AudioSystem::setDeviceConnectionState(audio_policy_dev_state_t state,
+                                               const android::media::audio::common::AudioPort& port,
                                                audio_format_t encodedFormat) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    const char* address = "";
-    const char* name = "";
 
     if (aps == 0) return PERMISSION_DENIED;
 
-    if (device_address != NULL) {
-        address = device_address;
-    }
-    if (device_name != NULL) {
-        name = device_name;
-    }
-
-    AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_device_AudioDevice(device, address));
-
     return statusTFromBinderStatus(
             aps->setDeviceConnectionState(
-                    deviceAidl,
                     VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(state)),
-                    name,
+                    port,
                     VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_format_t_AudioFormatDescription(encodedFormat))));
 }
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 88e7396..292d92f 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -803,6 +803,12 @@
     return result.value_or(0);
 }
 
+status_t AudioFlingerClientAdapter::setDeviceConnectedState(
+        const struct audio_port_v7 *port, bool connected) {
+    media::AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPort(*port));
+    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
+}
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
@@ -1292,4 +1298,10 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setDeviceConnectedState(
+        const media::AudioPort& port, bool connected) {
+    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index c55c66e..6afe023 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -225,4 +225,6 @@
     int getAAudioMixerBurstCount();
 
     int getAAudioHardwareBurstMinUsec();
+
+    void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index c3e8dfb..69328a7 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -50,6 +50,7 @@
 import android.media.audio.common.AudioMode;
 import android.media.audio.common.AudioProfile;
 import android.media.audio.common.AudioOffloadInfo;
+import android.media.audio.common.AudioPort;
 import android.media.audio.common.AudioSource;
 import android.media.audio.common.AudioStreamType;
 import android.media.audio.common.AudioUsage;
@@ -64,9 +65,8 @@
 interface IAudioPolicyService {
     oneway void onNewAudioModulesAvailable();
 
-    void setDeviceConnectionState(in AudioDevice device,
-                                  in AudioPolicyDeviceState state,
-                                  @utf8InCpp String deviceName,
+    void setDeviceConnectionState(in AudioPolicyDeviceState state,
+                                  in android.media.audio.common.AudioPort port,
                                   in AudioFormatDescription encodedFormat);
 
     AudioPolicyDeviceState getDeviceConnectionState(in AudioDevice device);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 4280a6a..e0cb47e 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -30,6 +30,7 @@
 #include <android/media/ISpatializer.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioPort.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -263,8 +264,8 @@
     // IAudioPolicyService interface (see AudioPolicyInterface for method descriptions)
     //
     static void onNewAudioModulesAvailable();
-    static status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state,
-                                             const char *device_address, const char *device_name,
+    static status_t setDeviceConnectionState(audio_policy_dev_state_t state,
+                                             const android::media::audio::common::AudioPort& port,
                                              audio_format_t encodedFormat);
     static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                 const char *device_address);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index b4ee4dc..e047378 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -358,6 +358,8 @@
     virtual int32_t getAAudioMixerBurstCount() = 0;
 
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
 };
 
 /**
@@ -454,14 +456,12 @@
     status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
-
     status_t getMmapPolicyInfos(
             media::audio::common::AudioMMapPolicyType policyType,
             std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
-
     int32_t getAAudioMixerBurstCount() override;
-
     int32_t getAAudioHardwareBurstMinUsec() override;
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -550,6 +550,7 @@
             GET_MMAP_POLICY_INFOS = media::BnAudioFlingerService::TRANSACTION_getMmapPolicyInfos,
             GET_AAUDIO_MIXER_BURST_COUNT = media::BnAudioFlingerService::TRANSACTION_getAAudioMixerBurstCount,
             GET_AAUDIO_HARDWARE_BURST_MIN_USEC = media::BnAudioFlingerService::TRANSACTION_getAAudioHardwareBurstMinUsec,
+            SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
         };
 
         /**
@@ -669,6 +670,7 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *_aidl_return) override;
     Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
     Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
+    Status setDeviceConnectedState(const media::AudioPort& port, bool connected) override;
 
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 8b09d76..9438191 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -311,13 +311,10 @@
         sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
     }
 #endif
-#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
-    Return<void> ret = mDevice->openInputStream_7_1(
-#else
     Return<void> ret = mDevice->openInputStream(
-#endif
             handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
-            [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& result,
+            [&](Result r,
+                const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& result,
                     const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
@@ -489,6 +486,32 @@
 }
 #endif
 
+status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    if (mDevice == 0) return NO_INIT;
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    if (supportsSetConnectedState7_1) {
+        AudioPort hidlPort;
+        if (status_t result = HidlUtils::audioPortFromHal(*port, &hidlPort); result != NO_ERROR) {
+            return result;
+        }
+        Return<Result> ret = mDevice->setConnectedState_7_1(hidlPort, connected);
+        if (!ret.isOk() || ret != Result::NOT_SUPPORTED) {
+            return processReturn("setConnectedState_7_1", ret);
+        } else if (ret == Result::OK) {
+            return NO_ERROR;
+        }
+        supportsSetConnectedState7_1 = false;
+    }
+#endif
+    DeviceAddress hidlAddress;
+    if (status_t result = CoreUtils::deviceAddressFromHal(
+                    port->ext.device.type, port->ext.device.address, &hidlAddress);
+            result != NO_ERROR) {
+        return result;
+    }
+    return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
+}
+
 status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
     if (mDevice == 0) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 104db40..cd9535e 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -132,13 +132,16 @@
         return INVALID_OPERATION;
     }
 
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
     status_t dump(int fd, const Vector<String16>& args) override;
 
   private:
     friend class DevicesFactoryHalHidl;
     sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
-    sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice> mPrimaryDevice;
     // Null if it's not a primary device.
+    sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice> mPrimaryDevice;
+    bool supportsSetConnectedState7_1 = true;
 
     // Can not be constructed directly by clients.
     explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index 1384c1e..e473e41 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "DeviceHalLocal"
 //#define LOG_NDEBUG 0
 
+#include <media/AudioParameter.h>
 #include <utils/Log.h>
 
 #include "DeviceHalLocal.h"
@@ -232,6 +233,14 @@
     return INVALID_OPERATION;
 }
 
+status_t DeviceHalLocal::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    AudioParameter param(String8(port->ext.device.address));
+    const String8 key(connected ?
+            AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
+    param.addInt(key, port->ext.device.type);
+    return setParameters(param.toString());
+}
+
 status_t DeviceHalLocal::dump(int fd, const Vector<String16>& /* args */) {
     return mDev->dump(mDev, fd);
 }
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index b06e253..3e586cf 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -128,6 +128,8 @@
         return INVALID_OPERATION;
     }
 
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
     status_t dump(int fd, const Vector<String16>& args) override;
 
     void closeOutputStream(struct audio_stream_out *stream_out);
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index b2f1cf3..6916ca1 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -39,7 +39,7 @@
 
 namespace android {
 
-using ReadCommand = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadCommand;
+using ReadCommand = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadCommand;
 
 using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
@@ -907,7 +907,7 @@
 
 
 StreamInHalHidl::StreamInHalHidl(
-        const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& stream)
+        const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
         : StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
 }
 
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 03342ef..44bf60a 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -20,7 +20,7 @@
 #include <atomic>
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamIn.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
@@ -35,8 +35,9 @@
 using ::android::hardware::EventFlag;
 using ::android::hardware::MessageQueue;
 using ::android::hardware::Return;
-using ReadParameters = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadParameters;
-using ReadStatus = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadStatus;
+using ReadParameters =
+        ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadParameters;
+using ReadStatus = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadStatus;
 using WriteCommand = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteCommand;
 using WriteStatus = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteStatus;
 
@@ -262,7 +263,7 @@
     typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
     typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
 
-    const sp<::android::hardware::audio::CPP_VERSION::IStreamIn> mStream;
+    const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
@@ -270,7 +271,8 @@
     EventFlag* mEfGroup;
 
     // Can not be constructed directly by clients.
-    StreamInHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& stream);
+    StreamInHalHidl(
+            const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream);
 
     virtual ~StreamInHalHidl();
 
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 70c3199..9ca7118 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -128,6 +128,9 @@
     virtual int32_t getAAudioMixerBurstCount() = 0;
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
 
+    // Update the connection status of an external device.
+    virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+
     virtual status_t dump(int fd, const Vector<String16>& args) = 0;
 
   protected:
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index b28ae70..041b427 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -26,6 +26,7 @@
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
 #include <drm/drm_framework_common.h>
+#include <log/log.h>
 #include <media/mediametadataretriever.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -422,7 +423,13 @@
 
         initFrameInfo(&mSequenceInfo, videoFrame);
 
-        mSequenceLength = atoi(mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT));
+        const char* frameCount = mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
+        if (frameCount == nullptr) {
+            android_errorWriteWithInfoLog(0x534e4554, "215002587", -1, NULL, 0);
+            ALOGD("No valid sequence information in metadata");
+            return false;
+        }
+        mSequenceLength = atoi(frameCount);
 
         if (defaultInfo == nullptr) {
             defaultInfo = &mSequenceInfo;
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index ec52a49..a6f0b60 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -43,10 +43,10 @@
 }
 
 //static
-MediaResource MediaResource::CodecResource(bool secure, bool video, int64_t instanceCount) {
+MediaResource MediaResource::CodecResource(bool secure, SubType subType, int64_t instanceCount) {
     return MediaResource(
             secure ? Type::kSecureCodec : Type::kNonSecureCodec,
-            video ? SubType::kVideoCodec : SubType::kAudioCodec,
+            subType,
             instanceCount);
 }
 
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index 4712528..3b69d4f 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -37,7 +37,8 @@
     MediaResource(Type type, SubType subType, int64_t value);
     MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value);
 
-    static MediaResource CodecResource(bool secure, bool video, int64_t instanceCount = 1);
+    static MediaResource CodecResource(bool secure, MediaResourceSubType subType,
+            int64_t instanceCount = 1);
     static MediaResource GraphicMemoryResource(int64_t value);
     static MediaResource CpuBoostResource();
     static MediaResource VideoBatteryResource();
@@ -62,6 +63,7 @@
         case MediaResource::SubType::kUnspecifiedSubType: return "unspecified";
         case MediaResource::SubType::kAudioCodec:         return "audio-codec";
         case MediaResource::SubType::kVideoCodec:         return "video-codec";
+        case MediaResource::SubType::kImageCodec:         return "image-codec";
         default:                                 return def;
     }
 }
diff --git a/media/libmediahelper/AudioValidator.cpp b/media/libmediahelper/AudioValidator.cpp
index 7eddbe1..5a0d517 100644
--- a/media/libmediahelper/AudioValidator.cpp
+++ b/media/libmediahelper/AudioValidator.cpp
@@ -47,8 +47,7 @@
         const effect_descriptor_t& desc, std::string_view bugNumber)
 {
     status_t status = NO_ERROR;
-    if (checkStringOverflow(desc.name)
-        | /* always */ checkStringOverflow(desc.implementor)) {
+    if (checkStringOverflow(desc.name) || checkStringOverflow(desc.implementor)) {
         status = BAD_VALUE;
     }
     return safetyNetLog(status, bugNumber);
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 8c86e16..609298f 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -206,6 +206,7 @@
     }
 
     const char *mime;
+    bool isHeif = false;
     if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
         ALOGE("image track has no mime type");
         return NULL;
@@ -215,6 +216,7 @@
         mime = MEDIA_MIMETYPE_VIDEO_HEVC;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
+        isHeif = true;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
         mime = MEDIA_MIMETYPE_VIDEO_AV1;
         trackMeta = new MetaData(*trackMeta);
@@ -240,6 +242,16 @@
         format->setInt32("width", thumbWidth);
     }
 
+    // If decoding tiled HEIF check decoder supports tile dimensions instead
+    if (!thumbnail && isHeif && format != NULL) {
+        int32_t tileWidth, tileHeight;
+        if (trackMeta->findInt32(kKeyTileWidth, &tileWidth) && tileWidth > 0
+                && trackMeta->findInt32(kKeyTileHeight, &tileHeight) && tileHeight > 0) {
+            format->setInt32("height", tileHeight);
+            format->setInt32("width", tileWidth);
+        }
+    }
+
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a0c8f8a..f81a5eb 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -4448,30 +4448,31 @@
     memset(&aspects, 0, sizeof(aspects));
     // Color metadata may have changed.
     sp<MetaData> meta = mSource->getFormat();
-    // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
-        int32_t primaries, transfer, coeffs;
-        bool fullRange;
-        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
-                asString(aspects.mPrimaries),
-                asString(aspects.mTransfer),
-                asString(aspects.mMatrixCoeffs),
-                asString(aspects.mRange));
-        ColorUtils::convertCodecColorAspectsToIsoAspects(
-                aspects, &primaries, &transfer, &coeffs, &fullRange);
-        mOwner->beginBox("colr");
-        mOwner->writeFourcc("nclx");
-        mOwner->writeInt16(primaries);
-        mOwner->writeInt16(transfer);
-        mOwner->writeInt16(coeffs);
-        mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
-        mOwner->endBox(); // colr
-    } else {
+    bool findPrimaries = meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries);
+    bool findTransfer = meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer);
+    bool findMatrix = meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs);
+    bool findRange = meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange);
+    if (!findPrimaries && !findTransfer && !findMatrix && !findRange) {
         ALOGV("no color information");
+        return;
     }
+
+    int32_t primaries, transfer, coeffs;
+    bool fullRange;
+    ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+            asString(aspects.mPrimaries),
+            asString(aspects.mTransfer),
+            asString(aspects.mMatrixCoeffs),
+            asString(aspects.mRange));
+    ColorUtils::convertCodecColorAspectsToIsoAspects(
+            aspects, &primaries, &transfer, &coeffs, &fullRange);
+    mOwner->beginBox("colr");
+    mOwner->writeFourcc("nclx");
+    mOwner->writeInt16(primaries);
+    mOwner->writeInt16(transfer);
+    mOwner->writeInt16(coeffs);
+    mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
+    mOwner->endBox(); // colr
 }
 
 void MPEG4Writer::Track::writeMdcvAndClliBoxes() {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e9dcb26..c0e69d2 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -102,6 +102,8 @@
 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
 static const char *kCodecModeAudio = "audio";
+static const char *kCodecModeImage = "image";
+static const char *kCodecModeUnknown = "unknown";
 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
@@ -656,6 +658,24 @@
     notify->post();
 }
 
+static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
+    switch (domain) {
+        case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
+        case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
+        case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
+        default:                       return MediaResourceSubType::kUnspecifiedSubType;
+    }
+}
+
+static const char * toCodecMode(MediaCodec::Domain domain) {
+    switch (domain) {
+        case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
+        case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
+        case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
+        default:                       return kCodecModeUnknown;
+    }
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -751,9 +771,9 @@
       mFlags(0),
       mStickyError(OK),
       mSoftRenderer(NULL),
-      mIsVideo(false),
-      mVideoWidth(0),
-      mVideoHeight(0),
+      mDomain(DOMAIN_UNKNOWN),
+      mWidth(0),
+      mHeight(0),
       mRotationDegrees(0),
       mHDRMetadataFlags(0),
       mDequeueInputTimeoutGeneration(0),
@@ -1165,7 +1185,7 @@
         });
     }
 
-    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
         mBytesInput += buffer->size();
         mFramesInput++;
     }
@@ -1194,7 +1214,7 @@
 
     CHECK_NE(mState, UNINITIALIZED);
 
-    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
         int32_t flags = 0;
         (void) buffer->meta()->findInt32("flags", &flags);
 
@@ -1402,7 +1422,13 @@
         mCodecInfo->getSupportedMediaTypes(&mediaTypes);
         for (size_t i = 0; i < mediaTypes.size(); ++i) {
             if (mediaTypes[i].startsWith("video/")) {
-                mIsVideo = true;
+                mDomain = DOMAIN_VIDEO;
+                break;
+            } else if (mediaTypes[i].startsWith("audio/")) {
+                mDomain = DOMAIN_AUDIO;
+                break;
+            } else if (mediaTypes[i].startsWith("image/")) {
+                mDomain = DOMAIN_IMAGE;
                 break;
             }
         }
@@ -1415,7 +1441,7 @@
         return NAME_NOT_FOUND;
     }
 
-    if (mIsVideo) {
+    if (mDomain == DOMAIN_VIDEO) {
         // video codec needs dedicated looper
         if (mCodecLooper == NULL) {
             mCodecLooper = new ALooper;
@@ -1448,17 +1474,16 @@
 
     if (mMetricsHandle != 0) {
         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
-        mediametrics_setCString(mMetricsHandle, kCodecMode,
-                                mIsVideo ? kCodecModeVideo : kCodecModeAudio);
+        mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
     }
 
-    if (mIsVideo) {
+    if (mDomain == DOMAIN_VIDEO) {
         mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
     }
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(secureCodec, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -1539,16 +1564,16 @@
         mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
     }
 
-    if (mIsVideo) {
-        format->findInt32("width", &mVideoWidth);
-        format->findInt32("height", &mVideoHeight);
+    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
+        format->findInt32("width", &mWidth);
+        format->findInt32("height", &mHeight);
         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
             mRotationDegrees = 0;
         }
 
         if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mVideoWidth);
-            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mVideoHeight);
+            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
+            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
             mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
             int32_t maxWidth = 0;
             if (format->findInt32("max-width", &maxWidth)) {
@@ -1562,21 +1587,23 @@
             if (format->findInt32("color-format", &colorFormat)) {
                 mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
             }
-            float frameRate = -1.0;
-            if (format->findFloat("frame-rate", &frameRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
-            }
-            float captureRate = -1.0;
-            if (format->findFloat("capture-rate", &captureRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
-            }
-            float operatingRate = -1.0;
-            if (format->findFloat("operating-rate", &operatingRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
-            }
-            int32_t priority = -1;
-            if (format->findInt32("priority", &priority)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+            if (mDomain == DOMAIN_VIDEO) {
+                float frameRate = -1.0;
+                if (format->findFloat("frame-rate", &frameRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+                }
+                float captureRate = -1.0;
+                if (format->findFloat("capture-rate", &captureRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+                }
+                float operatingRate = -1.0;
+                if (format->findFloat("operating-rate", &operatingRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+                }
+                int32_t priority = -1;
+                if (format->findInt32("priority", &priority)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+                }
             }
             int32_t colorStandard = -1;
             if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
@@ -1598,9 +1625,9 @@
         }
 
         // Prevent possible integer overflow in downstream code.
-        if (mVideoWidth < 0 || mVideoHeight < 0 ||
-               (uint64_t)mVideoWidth * mVideoHeight > (uint64_t)INT32_MAX / 4) {
-            ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
+        if (mWidth < 0 || mHeight < 0 ||
+               (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
+            ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
             return BAD_VALUE;
         }
 
@@ -1633,7 +1660,7 @@
     }
 
     // push min/max QP to MediaMetrics after shaping
-    if (mIsVideo && mMetricsHandle != 0) {
+    if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
         int32_t qpIMin = -1;
         if (format->findInt32("video-qp-i-min", &qpIMin)) {
             mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
@@ -1686,7 +1713,8 @@
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
+            toMediaResourceSubType(mDomain)));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
@@ -2267,7 +2295,7 @@
 }
 
 uint64_t MediaCodec::getGraphicBufferSize() {
-    if (!mIsVideo) {
+    if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
         return 0;
     }
 
@@ -2275,7 +2303,7 @@
     size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
     for (size_t i = 0; i < portNum; ++i) {
         // TODO: this is just an estimation, we should get the real buffer size from ACodec.
-        size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2;
+        size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
     }
     return size;
 }
@@ -2287,7 +2315,8 @@
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
+            toMediaResourceSubType(mDomain)));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
@@ -3219,8 +3248,8 @@
                             : MediaCodecInfo::Attributes(0);
                     if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
                         // software codec is currently ignored.
-                        mResourceManagerProxy->addResource(
-                                MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+                        mResourceManagerProxy->addResource(MediaResource::CodecResource(
+                            mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
                     }
 
                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
@@ -3386,7 +3415,7 @@
                     }
 
                     CHECK_EQ(mState, STARTING);
-                    if (mIsVideo) {
+                    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
                         mResourceManagerProxy->addResource(
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index a5affb9..6dc8157 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -590,9 +590,10 @@
     uint32_t gfxRange = range;
     uint32_t gfxStandard = standard;
     uint32_t gfxTransfer = transfer;
-    // TRICKY: use & to ensure all three mappings are completed
-    if (!(sGfxRanges.map(range, &gfxRange) & sGfxStandards.map(standard, &gfxStandard)
-            & sGfxTransfers.map(transfer, &gfxTransfer))) {
+    bool mappedRange = sGfxRanges.map(range, &gfxRange);
+    bool mappedStandard = sGfxStandards.map(standard, &gfxStandard);
+    bool mappedTransfer = sGfxTransfers.map(transfer, &gfxTransfer);
+    if (! (mappedRange && mappedStandard && mappedTransfer)) {
         ALOGW("could not safely map platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s) to "
               "graphics dataspace (R:%u S:%u T:%u)",
               range, asString(range), standard, asString(standard), transfer, asString(transfer),
@@ -626,9 +627,10 @@
     CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
     CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
     CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
-    // TRICKY: use & to ensure all three mappings are completed
-    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
-            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+    bool mappedRange = sGfxRanges.map(gfxRange, &cuRange);
+    bool mappedStandard = sGfxStandards.map(gfxStandard, &cuStandard);
+    bool mappedTransfer = sGfxTransfers.map(gfxTransfer, &cuTransfer);
+    if (! (mappedRange && mappedStandard && mappedTransfer)) {
         ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
               "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
               gfxRange, gfxStandard, gfxTransfer,
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index ce3b0d0..29a87e9 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -70,6 +70,13 @@
 using aidl::android::media::MediaResourceParcel;
 
 struct MediaCodec : public AHandler {
+    enum Domain {
+        DOMAIN_UNKNOWN = 0,
+        DOMAIN_VIDEO = 1,
+        DOMAIN_AUDIO = 2,
+        DOMAIN_IMAGE = 3
+    };
+
     enum ConfigureFlags {
         CONFIGURE_FLAG_ENCODE           = 1,
         CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
@@ -438,10 +445,10 @@
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
-    bool mIsVideo;
+    Domain mDomain;
     AString mLogSessionId;
-    int32_t mVideoWidth;
-    int32_t mVideoHeight;
+    int32_t mWidth;
+    int32_t mHeight;
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
@@ -502,7 +509,7 @@
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
 
-    PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
+    std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
     bool mIsSurfaceToScreen;
 
     MediaCodec(
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 411c206..5506a73 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -39,7 +39,7 @@
         darwin: {
             enabled: false,
         },
-        linux_glibc: {
+        glibc: {
             cflags: [
                 "-Dsigev_notify_thread_id=_sigev_un._tid",
             ],
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 43f79ce..5b53331 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -386,6 +386,24 @@
     return mAAudioHwBurstMinMicros;
 }
 
+status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) {
+    status_t final_result = NO_INIT;
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    mHardwareStatus = AUDIO_HW_SET_CONNECTED_STATE;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+        status_t result = dev->setConnectedState(port, connected);
+        // Same logic as with setParameter: it's a success if at least one
+        // HAL module accepts the update.
+        if (final_result != NO_ERROR) {
+            final_result = result;
+        }
+    }
+    mHardwareStatus = AUDIO_HW_IDLE;
+    return final_result;
+}
+
 // getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
 std::optional<media::AudioVibratorInfo> AudioFlinger::getDefaultVibratorInfo_l() {
     if (mAudioVibratorInfos.empty()) {
@@ -4334,6 +4352,7 @@
         case TransactionCode::SET_AUDIO_PORT_CONFIG:
         case TransactionCode::SET_RECORD_SILENCED:
         case TransactionCode::AUDIO_POLICY_READY:
+        case TransactionCode::SET_DEVICE_CONNECTED_STATE:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 8c546cc..59f22eb 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -290,6 +290,8 @@
 
     virtual int32_t getAAudioHardwareBurstMinUsec();
 
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected);
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
         const std::function<status_t()>& delegate) override;
 
@@ -912,6 +914,7 @@
         AUDIO_HW_SET_MASTER_MUTE,       // set_master_mute
         AUDIO_HW_GET_MASTER_MUTE,       // get_master_mute
         AUDIO_HW_GET_MICROPHONES,       // getMicrophones
+        AUDIO_HW_SET_CONNECTED_STATE,   // setConnectedState
     };
 
     mutable     hardware_call_state                 mHardwareStatus;    // for dump only
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index e8e478b..5b2b87e 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -105,10 +105,8 @@
     virtual void onNewAudioModulesAvailable() = 0;
 
     // indicate a change in device connection status
-    virtual status_t setDeviceConnectionState(audio_devices_t device,
-                                              audio_policy_dev_state_t state,
-                                              const char *device_address,
-                                              const char *device_name,
+    virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
+                                              const android::media::audio::common::AudioPort& port,
                                               audio_format_t encodedFormat) = 0;
     // retrieve a device connection status
     virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
@@ -546,6 +544,8 @@
 
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 0165dc8..4b4817e 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -36,6 +36,8 @@
         "libaudiopolicyenginedefault",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "android.media.audio.common.types-V1-cpp",
+        "audioclient-types-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index bd295ce..cc36c08 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -38,6 +38,7 @@
 #include <vector>
 
 #include <Serializer.h>
+#include <android/media/audio/common/AudioPort.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioParameter.h>
@@ -53,6 +54,10 @@
 
 namespace android {
 
+using android::media::audio::common::AudioDevice;
+using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioPortDeviceExt;
+using android::media::audio::common::AudioPortExt;
 using content::AttributionSourceState;
 
 //FIXME: workaround for truncated touch sounds
@@ -97,44 +102,80 @@
 // AudioPolicyInterface implementation
 // ----------------------------------------------------------------------------
 
-status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device,
-                                                      audio_policy_dev_state_t state,
-                                                      const char *device_address,
-                                                      const char *device_name,
-                                                      audio_format_t encodedFormat)
-{
-    status_t status = setDeviceConnectionStateInt(device, state, device_address,
-                                                  device_name, encodedFormat);
+status_t AudioPolicyManager::setDeviceConnectionState(audio_policy_dev_state_t state,
+        const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat) {
+    status_t status = setDeviceConnectionStateInt(state, port, encodedFormat);
     nextAudioPortGeneration();
     return status;
 }
 
+status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device,
+                                                      audio_policy_dev_state_t state,
+                                                      const char* device_address,
+                                                      const char* device_name,
+                                                      audio_format_t encodedFormat) {
+    media::AudioPort aidlPort;
+    if (status_t status = deviceToAudioPort(device, device_address, device_name, &aidlPort);
+        status == OK) {
+        return setDeviceConnectionState(state, aidlPort.hal, encodedFormat);
+    } else {
+        ALOGE("Failed to convert to AudioPort Parcelable: %s", statusToString(status).c_str());
+        return status;
+    }
+}
+
 void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
                                                         audio_policy_dev_state_t state)
 {
-    AudioParameter param(String8(device->address().c_str()));
-    const String8 key(state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE ?
-                AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
-    param.addInt(key, device->type());
-    mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString());
+    audio_port_v7 devicePort;
+    device->toAudioPort(&devicePort);
+    if (status_t status = mpClientInterface->setDeviceConnectedState(
+                    &devicePort, state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+            status != OK) {
+        ALOGE("Error %d while setting connected state for device %s", status,
+                device->getDeviceTypeAddr().toString(false).c_str());
+    }
+}
+
+status_t AudioPolicyManager::setDeviceConnectionStateInt(
+        audio_policy_dev_state_t state, const android::media::audio::common::AudioPort& port,
+        audio_format_t encodedFormat) {
+    // TODO: b/211601178 Forward 'port' to Audio HAL via mHwModules. For now, only device_type,
+    // device_address and device_name are forwarded.
+    if (port.ext.getTag() != AudioPortExt::device) {
+        return BAD_VALUE;
+    }
+    audio_devices_t device_type;
+    std::string device_address;
+    if (status_t status = aidl2legacy_AudioDevice_audio_device(
+                port.ext.get<AudioPortExt::device>().device, &device_type, &device_address);
+        status != OK) {
+        return status;
+    };
+    const char* device_name = port.name.c_str();
+    // connect/disconnect only 1 device at a time
+    if (!audio_is_output_device(device_type) && !audio_is_input_device(device_type))
+        return BAD_VALUE;
+
+    sp<DeviceDescriptor> device = mHwModules.getDeviceDescriptor(
+            device_type, device_address.c_str(), device_name, encodedFormat,
+            state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+    return device ? setDeviceConnectionStateInt(device, state) : INVALID_OPERATION;
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t deviceType,
                                                          audio_policy_dev_state_t state,
-                                                         const char *device_address,
-                                                         const char *device_name,
-                                                         audio_format_t encodedFormat)
-{
-    ALOGV("setDeviceConnectionStateInt() device: 0x%X, state %d, address %s name %s format 0x%X",
-            deviceType, state, device_address, device_name, encodedFormat);
-
-    // connect/disconnect only 1 device at a time
-    if (!audio_is_output_device(deviceType) && !audio_is_input_device(deviceType)) return BAD_VALUE;
-
-    sp<DeviceDescriptor> device =
-            mHwModules.getDeviceDescriptor(deviceType, device_address, device_name, encodedFormat,
-                                           state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
-    return device ? setDeviceConnectionStateInt(device, state) : INVALID_OPERATION;
+                                                         const char* device_address,
+                                                         const char* device_name,
+                                                         audio_format_t encodedFormat) {
+    media::AudioPort aidlPort;
+    if (status_t status = deviceToAudioPort(deviceType, device_address, device_name, &aidlPort);
+        status == OK) {
+        return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat);
+    } else {
+        ALOGE("Failed to convert to AudioPort Parcelable: %s", statusToString(status).c_str());
+        return status;
+    }
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(const sp<DeviceDescriptor> &device,
@@ -399,6 +440,14 @@
     return BAD_VALUE;
 }
 
+status_t AudioPolicyManager::deviceToAudioPort(audio_devices_t device, const char* device_address,
+                                               const char* device_name,
+                                               media::AudioPort* aidlPort) {
+    DeviceDescriptorBase devDescr(device, device_address);
+    devDescr.setName(device_name);
+    return devDescr.writeToParcelable(aidlPort);
+}
+
 void AudioPolicyManager::setEngineDeviceConnectionState(const sp<DeviceDescriptor> device,
                                       audio_policy_dev_state_t state) {
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 165ac13..a145c70 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -35,6 +35,7 @@
 #include <media/PatchBuilder.h>
 #include "AudioPolicyInterface.h"
 
+#include <android/media/audio/common/AudioPort.h>
 #include <AudioPolicyManagerObserver.h>
 #include <AudioPolicyConfig.h>
 #include <PolicyAudioPort.h>
@@ -95,11 +96,8 @@
         virtual ~AudioPolicyManager();
 
         // AudioPolicyInterface
-        virtual status_t setDeviceConnectionState(audio_devices_t device,
-                                                          audio_policy_dev_state_t state,
-                                                          const char *device_address,
-                                                          const char *device_name,
-                                                          audio_format_t encodedFormat);
+        virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
+                const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                               const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
@@ -911,6 +909,16 @@
         PatchBuilder buildMsdPatch(bool msdIsSource, const sp<DeviceDescriptor> &device) const;
         status_t setMsdOutputPatches(const DeviceVector *outputDevices = nullptr);
         void releaseMsdOutputPatches(const DeviceVector& devices);
+
+        // Overload of setDeviceConnectionState()
+        status_t setDeviceConnectionState(audio_devices_t deviceType,
+                                          audio_policy_dev_state_t state,
+                                          const char* device_address, const char* device_name,
+                                          audio_format_t encodedFormat);
+
+        // Called by setDeviceConnectionState()
+        status_t deviceToAudioPort(audio_devices_t deviceType, const char* device_address,
+                                   const char* device_name, media::AudioPort* aidPort);
 private:
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
@@ -1030,6 +1038,9 @@
         bool     isValidAttributes(const audio_attributes_t *paa);
 
         // Called by setDeviceConnectionState().
+        status_t setDeviceConnectionStateInt(audio_policy_dev_state_t state,
+                                             const android::media::audio::common::AudioPort& port,
+                                             audio_format_t encodedFormat);
         status_t setDeviceConnectionStateInt(audio_devices_t deviceType,
                                              audio_policy_dev_state_t state,
                                              const char *device_address,
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index aaf6fba..63a1e71 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -307,4 +307,15 @@
     return af->updateSecondaryOutputs(trackSecondaryOutputs);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setDeviceConnectedState(
+        const struct audio_port_v7 *port, bool connected) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return PERMISSION_DENIED;
+    }
+    return af->setDeviceConnectedState(port, connected);
+}
+
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 87a350f..582c048 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -119,14 +119,9 @@
 }
 
 Status AudioPolicyService::setDeviceConnectionState(
-        const AudioDevice& deviceAidl,
         media::AudioPolicyDeviceState stateAidl,
-        const std::string& deviceNameAidl,
+        const android::media::audio::common::AudioPort& port,
         const AudioFormatDescription& encodedFormatAidl) {
-    audio_devices_t device;
-    std::string address;
-    RETURN_BINDER_STATUS_IF_ERROR(
-            aidl2legacy_AudioDevice_audio_device(deviceAidl, &device, &address));
     audio_policy_dev_state_t state = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(stateAidl));
     audio_format_t encodedFormat = VALUE_OR_RETURN_BINDER_STATUS(
@@ -147,7 +142,7 @@
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->setDeviceConnectionState(
-            device, state, address.c_str(), deviceNameAidl.c_str(), encodedFormat);
+            state, port, encodedFormat);
     if (status == NO_ERROR) {
         onCheckSpatializer_l();
     }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index ac5af6b..39f2c97 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -82,9 +82,8 @@
     //
     binder::Status onNewAudioModulesAvailable() override;
     binder::Status setDeviceConnectionState(
-            const AudioDevice& device,
             media::AudioPolicyDeviceState state,
-            const std::string& deviceName,
+            const android::media::audio::common::AudioPort& port,
             const AudioFormatDescription& encodedFormat) override;
     binder::Status getDeviceConnectionState(const AudioDevice& device,
                                             media::AudioPolicyDeviceState* _aidl_return) override;
@@ -808,6 +807,9 @@
         status_t updateSecondaryOutputs(
                 const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
 
+        status_t setDeviceConnectedState(
+                const struct audio_port_v7 *port, bool connected) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 8fbe8b2..2e220bc 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -31,7 +31,7 @@
 
     static_libs: [
         "libaudiopolicycomponents",
-        "libgmock"
+        "libgmock",
     ],
 
     header_libs: [
@@ -65,6 +65,12 @@
         "liblog",
         "libmedia_helper",
         "libutils",
+        "android.media.audio.common.types-V1-cpp",
+        "libaudioclient_aidl_conversion",
+        "libstagefright_foundation",
+        "libshmemcompat",
+        "libshmemutil",
+        "audioclient-types-aidl-cpp",
     ],
 
     static_libs: ["libaudiopolicycomponents"],
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 84b40d2..adef8f1 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -103,6 +103,11 @@
         ++mAudioPortListUpdateCount;
     }
 
+    status_t setDeviceConnectedState(
+            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+        return NO_ERROR;
+    }
+
     // Helper methods for tests
     size_t getActivePatchesCount() const { return mActivePatches.size(); }
 
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 4e0735b..da85658 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -96,6 +96,10 @@
             const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
         return NO_INIT;
     }
+    status_t setDeviceConnectedState(
+            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+        return NO_INIT;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index ff06937..7441f20 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -35,6 +35,8 @@
     using AudioPolicyManager::getAudioPatches;
     using AudioPolicyManager::getDirectPlaybackSupport;
     using AudioPolicyManager::getDirectProfilesForAttributes;
+    using AudioPolicyManager::setDeviceConnectionState;
+    using AudioPolicyManager::deviceToAudioPort;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
 };
 
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index df4389b..10f8dc0 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -111,16 +111,17 @@
             continue;
         }
         std::string address = "11:22:33:44:55:66";
+        media::AudioPort aidlPort;
+        ASSERT_EQ(OK, manager.deviceToAudioPort(device->type(), address.c_str(), "" /*name*/,
+                                                 &aidlPort));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
         ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
-                device->type(), AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(),
-                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+                AUDIO_POLICY_DEVICE_STATE_AVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
         ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
-                device->type(), AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, address.c_str(),
-                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+                AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
     }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 9b0d0e7..11fa991 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -145,9 +145,9 @@
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
     if (entry.count > 0) {
-        const auto it = std::find(entry.data.i32, entry.data.i32 + entry.count,
+        const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
-        if (it != entry.data.i32 + entry.count) {
+        if (it != entry.data.u8 + entry.count) {
             entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
             if (entry.count > 0 || ((entry.count % 2) != 0)) {
                 int standardBitmap = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
@@ -860,6 +860,7 @@
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
     bool isMultiResolution = outputConfiguration.isMultiResolution();
     int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int streamUseCase = outputConfiguration.getStreamUseCase();
 
     res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
@@ -903,7 +904,8 @@
         sp<Surface> surface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                 isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile);
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase);
 
         if (!res.isOk())
             return res;
@@ -949,7 +951,7 @@
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                 outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
-                streamInfo.dynamicRangeProfile);
+                /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
     }
 
     if (err != OK) {
@@ -1044,7 +1046,8 @@
             &surfaceIds,
             outputConfiguration.getSurfaceSetID(), isShared,
             outputConfiguration.isMultiResolution(), consumerUsage,
-            outputConfiguration.getDynamicRangeProfile());
+            outputConfiguration.getDynamicRangeProfile(),
+            outputConfiguration.getStreamUseCase());
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1058,7 +1061,8 @@
         mStreamInfoMap.emplace(std::piecewise_construct, std::forward_as_tuple(streamId),
                 std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
                         overriddenSensorPixelModesUsed,
-                        outputConfiguration.getDynamicRangeProfile()));
+                        outputConfiguration.getDynamicRangeProfile(),
+                        outputConfiguration.getStreamUseCase()));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
@@ -1246,6 +1250,7 @@
     }
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
+    int streamUseCase = outputConfiguration.getStreamUseCase();
 
     int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
 
@@ -1254,7 +1259,8 @@
         sp<Surface> surface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                 /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile);
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase);
         if (!res.isOk())
             return res;
 
@@ -1612,6 +1618,7 @@
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
     int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int streamUseCase= outputConfiguration.getStreamUseCase();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1624,7 +1631,8 @@
         sp<Surface> surface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile);
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase);
 
         if (!res.isOk())
             return res;
@@ -1763,7 +1771,7 @@
         bool isCompositeStream = false;
         for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
-            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
                 camera3::HeicCompositeStream::isHeicCompositeStream(s);
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index e936cb6..6ddf500 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -183,7 +183,8 @@
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) = 0;
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -201,7 +202,8 @@
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) = 0;
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
 
     /**
      * Create an input stream of width, height, and format.
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 64f5abf..4568209 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -714,6 +714,10 @@
 
     bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
+    // No method corresponding to the same provider / member belonging to the
+    // same provider should be used after this method is called since it'll lead
+    // to invalid memory access (especially since this is called by ProviderInfo methods on hal
+    // service death).
     status_t removeProvider(const std::string& provider);
     sp<StatusListener> getStatusListener() const;
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 992027a..c8f6310 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -978,7 +978,7 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
-            uint64_t consumerUsage, int dynamicRangeProfile) {
+            uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -991,7 +991,8 @@
 
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
             format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
-            streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile);
+            streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
+            streamUseCase);
 }
 
 static bool isRawFormat(int format) {
@@ -1011,7 +1012,7 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
         const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
-        uint64_t consumerUsage, int dynamicRangeProfile) {
+        uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
@@ -1089,7 +1090,7 @@
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, blobBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution, dynamicRangeProfile);
+                isMultiResolution, dynamicRangeProfile, streamUseCase);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
         bool maxResolution =
                 sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1103,22 +1104,22 @@
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution, dynamicRangeProfile);
+                isMultiResolution, dynamicRangeProfile, streamUseCase);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                mUseHalBufManager, dynamicRangeProfile);
+                mUseHalBufManager, dynamicRangeProfile, streamUseCase);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution, dynamicRangeProfile);
+                isMultiResolution, dynamicRangeProfile, streamUseCase);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution, dynamicRangeProfile);
+                isMultiResolution, dynamicRangeProfile, streamUseCase);
     }
 
     size_t consumerCount = consumers.size();
@@ -1812,14 +1813,16 @@
                 streamIds.push_back(stream->getId());
                 Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
                 int64_t usage = 0LL;
+                int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
                 if (camera3Stream != nullptr) {
                     usage = camera3Stream->getUsage();
+                    streamUseCase = camera3Stream->getStreamUseCase();
                 }
                 streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
                     stream->getFormat(), stream->getDataSpace(), usage,
                     stream->getMaxHalBuffers(),
                     stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
-                    stream->getDynamicRangeProfile());
+                    stream->getDynamicRangeProfile(), streamUseCase);
             }
         }
     }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6c4ba49..d466ae4 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -134,7 +134,8 @@
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0,
             int dynamicRangeProfile =
-            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) override;
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) override;
 
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -146,7 +147,8 @@
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0,
             int dynamicRangeProfile =
-            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) override;
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) override;
 
     status_t createInputStream(
             uint32_t width, uint32_t height, int format, bool isMultiResolution,
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index ba97367..dc3a6f3 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,11 +34,11 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution, int dynamicRangeProfile) :
+        int setId, bool isMultiResolution, int dynamicRangeProfile, int streamUseCase) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
                 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
-                dynamicRangeProfile),
+                dynamicRangeProfile, streamUseCase),
         mTotalBufferCount(0),
         mHandoutTotalBufferCount(0),
         mHandoutOutputBufferCount(0),
@@ -89,6 +89,7 @@
         lines.appendFormat("      Physical camera id: %s\n", camera_stream::physical_camera_id);
     }
     lines.appendFormat("      Dynamic Range Profile: 0x%x", camera_stream::dynamic_range_profile);
+    lines.appendFormat("      Stream use case: %d\n", camera_stream::use_case);
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
     lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 518ee42..f2b1536 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -38,7 +38,8 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
 
   public:
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 69723b6..95d19ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -46,11 +46,12 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution, int dynamicRangeProfile) :
+        int setId, bool isMultiResolution, int dynamicRangeProfile,
+        int streamUseCase) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
-                            dynamicRangeProfile),
+                            dynamicRangeProfile, streamUseCase),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -75,10 +76,11 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution, int dynamicRangeProfile) :
+        int setId, bool isMultiResolution, int dynamicRangeProfile,
+        int streamUseCase) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
-                            setId, isMultiResolution, dynamicRangeProfile),
+                            setId, isMultiResolution, dynamicRangeProfile, streamUseCase),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -110,11 +112,12 @@
         camera_stream_rotation_t rotation, nsecs_t timestampOffset,
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution, int dynamicRangeProfile) :
+        int setId, bool isMultiResolution, int dynamicRangeProfile,
+        int streamUseCase) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
-                            dynamicRangeProfile),
+                            dynamicRangeProfile, streamUseCase),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -152,12 +155,12 @@
                                         const std::unordered_set<int32_t> &sensorPixelModesUsed,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
                                          int setId, bool isMultiResolution,
-                                         int dynamicRangeProfile) :
+                                         int dynamicRangeProfile, int streamUseCase) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
-                            dynamicRangeProfile),
+                            dynamicRangeProfile, streamUseCase),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseMonoTimestamp(false),
@@ -581,7 +584,7 @@
     mHandoutTotalBufferCount = 0;
     mFrameCount = 0;
     mLastTimestamp = 0;
-    mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
+    mUseMonoTimestamp = (isConsumedByHWComposer() || isVideoStream());
 
     res = native_window_set_buffer_count(mConsumer.get(),
             mTotalBufferCount);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index d9bf62a..7d2d32e 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -90,7 +90,8 @@
             nsecs_t timestampOffset, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -103,7 +104,8 @@
             nsecs_t timestampOffset, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -115,7 +117,8 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
 
     virtual ~Camera3OutputStream();
 
@@ -243,7 +246,8 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
 
     /**
      * Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 0e2671a..fbd1e56 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -33,10 +33,12 @@
         camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool useHalBufManager, int dynamicProfile) :
+        int setId, bool useHalBufManager, int dynamicProfile,
+        int streamUseCase) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
-                            consumerUsage, timestampOffset, setId, dynamicProfile),
+                            consumerUsage, timestampOffset, setId, /*isMultiResolution*/false,
+                            dynamicProfile, streamUseCase),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
     if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index fafa26f..223d52b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -41,7 +41,8 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             bool useHalBufManager = false,
-            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
 
     virtual ~Camera3SharedOutputStream();
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 83f9a98..6b093b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -54,7 +54,8 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution, int dynamicRangeProfile) :
+        int setId, bool isMultiResolution, int dynamicRangeProfile,
+        int streamUseCase) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -91,6 +92,7 @@
     camera_stream::physical_camera_id = mPhysicalCameraId.string();
     camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
     camera_stream::dynamic_range_profile = dynamicRangeProfile;
+    camera_stream::use_case = streamUseCase;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
@@ -175,6 +177,10 @@
     return camera_stream::max_buffers;
 }
 
+int Camera3Stream::getStreamUseCase() const {
+    return camera_stream::use_case;
+}
+
 void Camera3Stream::setOfflineProcessingSupport(bool support) {
     mSupportOfflineProcessing = support;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index bbbea8d..ada570b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -178,6 +178,7 @@
     android_dataspace getOriginalDataSpace() const;
     int               getMaxHalBuffers() const;
     const String8&    physicalCameraId() const;
+    int               getStreamUseCase() const;
 
     void              setOfflineProcessingSupport(bool) override;
     bool              getOfflineProcessingSupport() const override;
@@ -505,7 +506,8 @@
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            int setId, bool isMultiResolution, int dynamicRangeProfile);
+            int setId, bool isMultiResolution, int dynamicRangeProfile,
+            int streamUseCase);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index ef10f0d..7b1597b 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -65,6 +65,7 @@
 
     std::unordered_set<int32_t> sensor_pixel_modes_used;
     int dynamic_range_profile;
+    int use_case;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -109,16 +110,19 @@
         bool supportsOffline = false;
         std::unordered_set<int32_t> sensorPixelModesUsed;
         int dynamicRangeProfile;
+        int streamUseCase;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0),
-            dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {}
+            dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+            streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
                 uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
-                int _dynamicRangeProfile) :
+                int _dynamicRangeProfile, int _streamUseCase) :
             width(_width), height(_height), format(_format),
             dataSpace(_dataSpace), consumerUsage(_consumerUsage),
-            sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile){}
+            sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
+            streamUseCase(_streamUseCase) {}
 };
 
 /**
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 7b7a2a2..87cf99a 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -65,6 +65,7 @@
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
+using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
 
 namespace android {
 
@@ -969,8 +970,16 @@
                     __FUNCTION__, src->dynamic_range_profile);
             return BAD_VALUE;
         }
+        if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT &&
+                mHidlSession_3_8 == nullptr) {
+            ALOGE("%s: Camera device doesn't support non-default stream use case %d!",
+                    __FUNCTION__, src->use_case);
+            return BAD_VALUE;
+        }
         dst3_8.v3_7 = dst3_7;
         dst3_8.dynamicRangeProfile = mapToHidlDynamicProfile(src->dynamic_range_profile);
+        dst3_8.useCase =
+                static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(src->use_case);
         activeStreams.insert(streamId);
         // Create Buffer ID map if necessary
         mBufferRecords.tryCreateBufferCache(streamId);
@@ -992,7 +1001,6 @@
     requestedConfiguration3_4.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
             sessionParamSize);
-    requestedConfiguration3_7.operationMode = operationMode;
     requestedConfiguration3_7.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
             sessionParamSize);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f826d83..548fb0b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -31,6 +31,7 @@
 using android::hardware::camera2::ICameraDeviceUser;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap;
+using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
 
 namespace android {
 namespace camera3 {
@@ -310,11 +311,30 @@
     }
 }
 
+bool isStreamUseCaseSupported(int streamUseCase,
+        const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t availableStreamUseCases =
+            deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
+
+    if (availableStreamUseCases.count == 0 &&
+            streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        return true;
+    }
+
+    for (size_t i = 0; i < availableStreamUseCases.count; i++) {
+        if (availableStreamUseCases.data.i32[i] == streamUseCase) {
+            return true;
+        }
+    }
+    return false;
+}
+
 binder::Status createSurfaceFromGbp(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
-        const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile){
+        const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile,
+        int streamUseCase) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -427,6 +447,13 @@
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
+    if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
+            physicalCameraMetadata)) {
+        String8 msg = String8::format("Camera %s: stream use case %d not supported,"
+                " failed to create output stream", logicalCameraId.string(), streamUseCase);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
 
     if (!isStreamInfoValid) {
         streamInfo.width = width;
@@ -436,6 +463,7 @@
         streamInfo.consumerUsage = consumerUsage;
         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
         streamInfo.dynamicRangeProfile = dynamicRangeProfile;
+        streamInfo.streamUseCase = streamUseCase;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
@@ -506,6 +534,8 @@
     stream->dynamicRangeProfile =
         static_cast<CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap> (
                 streamInfo.dynamicRangeProfile);
+    stream->useCase = static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(
+            streamInfo.streamUseCase);
 }
 
 binder::Status checkPhysicalCameraId(
@@ -666,6 +696,7 @@
             return res;
         }
 
+        int streamUseCase = it.getStreamUseCase();
         if (deferredConsumer) {
             streamInfo.width = it.getWidth();
             streamInfo.height = it.getHeight();
@@ -686,6 +717,7 @@
                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                                 "Deferred surface sensor pixel modes not valid");
             }
+            streamInfo.streamUseCase = streamUseCase;
             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
                     &streamConfiguration.streams[streamIdx++]);
             isStreamInfoValid = true;
@@ -698,7 +730,8 @@
         for (auto& bufferProducer : bufferProducers) {
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile);
+                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
+                    streamUseCase);
 
             if (!res.isOk())
                 return res;
@@ -851,6 +884,11 @@
             // image
             return false;
         }
+        if (static_cast<int32_t>(streamConfigV38.streams[i].useCase) !=
+                ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+            // ICameraDevice older than 3.8 doesn't support stream use case
+            return false;
+        }
         streamConfigV37.streams[i] = streamConfigV38.streams[i].v3_7;
     }
     streamConfigV37.operationMode = streamConfigV38.operationMode;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 9a5dc2c..8dfc11d 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -94,13 +94,15 @@
 // Create a Surface from an IGraphicBufferProducer. Returns error if
 // IGraphicBufferProducer's property doesn't match with streamInfo
 binder::Status createSurfaceFromGbp(
-camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
-const std::vector<int32_t> &sensorPixelModesUsed,  int dynamicRangeProfile);
+        camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed,  int dynamicRangeProfile,
+        int streamUseCase);
+
 void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
         camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
-        hardware::camera::device::V3_7::Stream *stream /*out*/);
+        hardware::camera::device::V3_8::Stream *stream /*out*/);
 
 //check if format is 10-bit output compatible
 bool is10bitCompatibleFormat(int32_t format);
@@ -111,6 +113,8 @@
 // Check if the device supports a given dynamicRangeProfile
 bool isDynamicRangeProfileSupported(int dynamicRangeProfile, const CameraMetadata& staticMeta);
 
+bool isStreamUseCaseSupported(int streamUseCase, const CameraMetadata &deviceInfo);
+
 // Check that the physicalCameraId passed in is spported by the camera
 // device.
 binder::Status checkPhysicalCameraId(
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.h b/services/mediaresourcemanager/IMediaResourceMonitor.h
index f92d557..4dd87e1 100644
--- a/services/mediaresourcemanager/IMediaResourceMonitor.h
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.h
@@ -32,6 +32,7 @@
     enum {
         TYPE_VIDEO_CODEC = 0,
         TYPE_AUDIO_CODEC = 1,
+        TYPE_IMAGE_CODEC = 2,
     };
 
     virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type) = 0;
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 953686b..d50f8d5 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -51,8 +51,8 @@
 
 class DeathNotifier : public RefBase {
 public:
-    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-            int pid, int64_t clientId);
+    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service, int pid,
+            int64_t clientId);
 
     virtual ~DeathNotifier() {}
 
@@ -130,27 +130,48 @@
     return itemsStr;
 }
 
-static bool hasResourceType(MediaResource::Type type, const ResourceList& resources) {
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        MediaResourceParcel resource) {
+    if (type != resource.type) {
+      return false;
+    }
+    switch (type) {
+        // Codec subtypes (e.g. video vs. audio) are each considered separate resources, so
+        // compare the subtypes as well.
+        case MediaResource::Type::kSecureCodec:
+        case MediaResource::Type::kNonSecureCodec:
+            if (resource.subType == subType) {
+                return true;
+            }
+            break;
+        // Non-codec resources are not segregated by the subtype (e.g. video vs. audio).
+        default:
+            return true;
+    }
+    return false;
+}
+
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceList& resources) {
     for (auto it = resources.begin(); it != resources.end(); it++) {
-        if (it->second.type == type) {
+        if (hasResourceType(type, subType, it->second)) {
             return true;
         }
     }
     return false;
 }
 
-static bool hasResourceType(MediaResource::Type type, const ResourceInfos& infos) {
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceInfos& infos) {
     for (size_t i = 0; i < infos.size(); ++i) {
-        if (hasResourceType(type, infos[i].resources)) {
+        if (hasResourceType(type, subType, infos[i].resources)) {
             return true;
         }
     }
     return false;
 }
 
-static ResourceInfos& getResourceInfosForEdit(
-        int pid,
-        PidResourceInfosMap& map) {
+static ResourceInfos& getResourceInfosForEdit(int pid, PidResourceInfosMap& map) {
     ssize_t index = map.indexOfKey(pid);
     if (index < 0) {
         // new pid
@@ -161,11 +182,8 @@
     return map.editValueFor(pid);
 }
 
-static ResourceInfo& getResourceInfoForEdit(
-        uid_t uid,
-        int64_t clientId,
-        const std::shared_ptr<IResourceManagerClient>& client,
-        ResourceInfos& infos) {
+static ResourceInfo& getResourceInfoForEdit(uid_t uid, int64_t clientId,
+        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
     ssize_t index = infos.indexOfKey(clientId);
 
     if (index < 0) {
@@ -188,17 +206,24 @@
     if (binder != NULL) {
         sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
         for (size_t i = 0; i < resources.size(); ++i) {
-            if (resources[i].subType == MediaResource::SubType::kAudioCodec) {
-                service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
-            } else if (resources[i].subType == MediaResource::SubType::kVideoCodec) {
-                service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
+            switch (resources[i].subType) {
+                case MediaResource::SubType::kAudioCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
+                    break;
+                case MediaResource::SubType::kVideoCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
+                    break;
+                case MediaResource::SubType::kImageCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_IMAGE_CODEC);
+                    break;
+                case MediaResource::SubType::kUnspecifiedSubType:
+                    break;
             }
         }
     }
 }
 
-binder_status_t ResourceManagerService::dump(
-        int fd, const char** /*args*/, uint32_t /*numArgs*/) {
+binder_status_t ResourceManagerService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
     String8 result;
 
     if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
@@ -275,8 +300,7 @@
     return OK;
 }
 
-struct SystemCallbackImpl :
-        public ResourceManagerService::SystemCallbackInterface {
+struct SystemCallbackImpl : public ResourceManagerService::SystemCallbackInterface {
     SystemCallbackImpl() : mClientToken(new BBinder()) {}
 
     virtual void noteStartVideo(int uid) override {
@@ -303,8 +327,7 @@
 ResourceManagerService::ResourceManagerService()
     : ResourceManagerService(new ProcessInfo(), new SystemCallbackImpl()) {}
 
-ResourceManagerService::ResourceManagerService(
-        const sp<ProcessInfoInterface> &processInfo,
+ResourceManagerService::ResourceManagerService(const sp<ProcessInfoInterface> &processInfo,
         const sp<SystemCallbackInterface> &systemResource)
     : mProcessInfo(processInfo),
       mSystemCB(systemResource),
@@ -362,8 +385,8 @@
     return Status::ok();
 }
 
-void ResourceManagerService::onFirstAdded(
-        const MediaResourceParcel& resource, const ResourceInfo& clientInfo) {
+void ResourceManagerService::onFirstAdded(const MediaResourceParcel& resource,
+        const ResourceInfo& clientInfo) {
     // first time added
     if (resource.type == MediaResource::Type::kCpuBoost
      && resource.subType == MediaResource::SubType::kUnspecifiedSubType) {
@@ -380,8 +403,8 @@
     }
 }
 
-void ResourceManagerService::onLastRemoved(
-        const MediaResourceParcel& resource, const ResourceInfo& clientInfo) {
+void ResourceManagerService::onLastRemoved(const MediaResourceParcel& resource,
+        const ResourceInfo& clientInfo) {
     if (resource.type == MediaResource::Type::kCpuBoost
             && resource.subType == MediaResource::SubType::kUnspecifiedSubType
             && mCpuBoostCount > 0) {
@@ -394,8 +417,8 @@
     }
 }
 
-void ResourceManagerService::mergeResources(
-        MediaResourceParcel& r1, const MediaResourceParcel& r2) {
+void ResourceManagerService::mergeResources(MediaResourceParcel& r1,
+        const MediaResourceParcel& r2) {
     // The resource entry on record is maintained to be in [0,INT64_MAX].
     // Clamp if merging in the new resource value causes it to go out of bound.
     // Note that the new resource value could be negative, eg.DrmSession, the
@@ -411,10 +434,7 @@
     }
 }
 
-Status ResourceManagerService::addResource(
-        int32_t pid,
-        int32_t uid,
-        int64_t clientId,
+Status ResourceManagerService::addResource(int32_t pid, int32_t uid, int64_t clientId,
         const std::shared_ptr<IResourceManagerClient>& client,
         const std::vector<MediaResourceParcel>& resources) {
     String8 log = String8::format("addResource(pid %d, clientId %lld, resources %s)",
@@ -473,8 +493,7 @@
     return Status::ok();
 }
 
-Status ResourceManagerService::removeResource(
-        int32_t pid, int64_t clientId,
+Status ResourceManagerService::removeResource(int32_t pid, int64_t clientId,
         const std::vector<MediaResourceParcel>& resources) {
     String8 log = String8::format("removeResource(pid %d, clientId %lld, resources %s)",
             pid, (long long) clientId, getString(resources).string());
@@ -583,22 +602,19 @@
     return Status::ok();
 }
 
-void ResourceManagerService::getClientForResource_l(
-        int callingPid, const MediaResourceParcel *res,
+void ResourceManagerService::getClientForResource_l(int callingPid, const MediaResourceParcel *res,
         Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     if (res == NULL) {
         return;
     }
     std::shared_ptr<IResourceManagerClient> client;
-    if (getLowestPriorityBiggestClient_l(callingPid, res->type, &client)) {
+    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, &client)) {
         clients->push_back(client);
     }
 }
 
-Status ResourceManagerService::reclaimResource(
-        int32_t callingPid,
-        const std::vector<MediaResourceParcel>& resources,
-        bool* _aidl_return) {
+Status ResourceManagerService::reclaimResource(int32_t callingPid,
+        const std::vector<MediaResourceParcel>& resources, bool* _aidl_return) {
     String8 log = String8::format("reclaimResource(callingPid %d, resources %s)",
             callingPid, getString(resources).string());
     mServiceLog->add(log);
@@ -618,34 +634,43 @@
         const MediaResourceParcel *graphicMemory = NULL;
         const MediaResourceParcel *drmSession = NULL;
         for (size_t i = 0; i < resources.size(); ++i) {
-            MediaResource::Type type = resources[i].type;
-            if (resources[i].type == MediaResource::Type::kSecureCodec) {
-                secureCodec = &resources[i];
-            } else if (type == MediaResource::Type::kNonSecureCodec) {
-                nonSecureCodec = &resources[i];
-            } else if (type == MediaResource::Type::kGraphicMemory) {
-                graphicMemory = &resources[i];
-            } else if (type == MediaResource::Type::kDrmSession) {
-                drmSession = &resources[i];
+            switch (resources[i].type) {
+                case MediaResource::Type::kSecureCodec:
+                    secureCodec = &resources[i];
+                    break;
+                case MediaResource::Type::kNonSecureCodec:
+                    nonSecureCodec = &resources[i];
+                    break;
+                case MediaResource::Type::kGraphicMemory:
+                    graphicMemory = &resources[i];
+                    break;
+                case MediaResource::Type::kDrmSession:
+                    drmSession = &resources[i];
+                    break;
+                default:
+                    break;
             }
         }
 
         // first pass to handle secure/non-secure codec conflict
         if (secureCodec != NULL) {
             if (!mSupportsMultipleSecureCodecs) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec, &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
+                            secureCodec->subType, &clients)) {
                     return Status::ok();
                 }
             }
             if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec, &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec,
+                            secureCodec->subType, &clients)) {
                     return Status::ok();
                 }
             }
         }
         if (nonSecureCodec != NULL) {
             if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec, &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
+                        nonSecureCodec->subType, &clients)) {
                     return Status::ok();
                 }
             }
@@ -681,11 +706,11 @@
         }
     }
 
-    *_aidl_return = reclaimInternal(clients);
+    *_aidl_return = reclaimUnconditionallyFrom(clients);
     return Status::ok();
 }
 
-bool ResourceManagerService::reclaimInternal(
+bool ResourceManagerService::reclaimUnconditionallyFrom(
         const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
     if (clients.size() == 0) {
         return false;
@@ -732,9 +757,7 @@
     return false;
 }
 
-Status ResourceManagerService::overridePid(
-        int originalPid,
-        int newPid) {
+Status ResourceManagerService::overridePid(int originalPid, int newPid) {
     String8 log = String8::format("overridePid(originalPid %d, newPid %d)",
             originalPid, newPid);
     mServiceLog->add(log);
@@ -763,9 +786,7 @@
 }
 
 Status ResourceManagerService::overrideProcessInfo(
-        const std::shared_ptr<IResourceManagerClient>& client,
-        int pid,
-        int procState,
+        const std::shared_ptr<IResourceManagerClient>& client, int pid, int procState,
         int oomScore) {
     String8 log = String8::format("overrideProcessInfo(pid %d, procState %d, oomScore %d)",
             pid, procState, oomScore);
@@ -799,8 +820,8 @@
     return Status::ok();
 }
 
-uintptr_t ResourceManagerService::addCookieAndLink_l(
-        ::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier) {
+uintptr_t ResourceManagerService::addCookieAndLink_l(::ndk::SpAIBinder binder,
+        const sp<DeathNotifier>& notifier) {
     std::scoped_lock lock{sCookieLock};
 
     uintptr_t cookie;
@@ -813,8 +834,7 @@
     return cookie;
 }
 
-void ResourceManagerService::removeCookieAndUnlink_l(
-        ::ndk::SpAIBinder binder, uintptr_t cookie) {
+void ResourceManagerService::removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie) {
     std::scoped_lock lock{sCookieLock};
     AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
     sCookieToDeathNotifierMap.erase(cookie);
@@ -889,16 +909,34 @@
                                          MediaResource::Type::kNonSecureCodec,
                                          MediaResource::Type::kGraphicMemory,
                                          MediaResource::Type::kDrmSession}) {
-            std::shared_ptr<IResourceManagerClient> client;
-            if (getBiggestClient_l(pid, type, &client, true /* pendingRemovalOnly */)) {
-                clients.add(client);
-                break;
+            switch (type) {
+                // Codec resources are segregated by audio, video and image domains.
+                case MediaResource::Type::kSecureCodec:
+                case MediaResource::Type::kNonSecureCodec:
+                    for (MediaResource::SubType subType : {MediaResource::SubType::kAudioCodec,
+                                                           MediaResource::SubType::kVideoCodec,
+                                                           MediaResource::SubType::kImageCodec}) {
+                        std::shared_ptr<IResourceManagerClient> client;
+                        if (getBiggestClientPendingRemoval_l(pid, type, subType, &client)) {
+                            clients.add(client);
+                            continue;
+                        }
+                    }
+                    break;
+                // Non-codec resources are shared by audio, video and image codecs (no subtype).
+                default:
+                    std::shared_ptr<IResourceManagerClient> client;
+                    if (getBiggestClientPendingRemoval_l(pid, type,
+                            MediaResource::SubType::kUnspecifiedSubType, &client)) {
+                        clients.add(client);
+                    }
+                    break;
             }
         }
     }
 
     if (!clients.empty()) {
-        reclaimInternal(clients);
+        reclaimUnconditionallyFrom(clients);
     }
     return Status::ok();
 }
@@ -915,14 +953,13 @@
     return mProcessInfo->getPriority(newPid, priority);
 }
 
-bool ResourceManagerService::getAllClients_l(
-        int callingPid, MediaResource::Type type,
-        Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
+bool ResourceManagerService::getAllClients_l(int callingPid, MediaResource::Type type,
+        MediaResource::SubType subType, Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     Vector<std::shared_ptr<IResourceManagerClient>> temp;
     for (size_t i = 0; i < mMap.size(); ++i) {
         ResourceInfos &infos = mMap.editValueAt(i);
         for (size_t j = 0; j < infos.size(); ++j) {
-            if (hasResourceType(type, infos[j].resources)) {
+            if (hasResourceType(type, subType, infos[j].resources)) {
                 if (!isCallingPriorityHigher_l(callingPid, mMap.keyAt(i))) {
                     // some higher/equal priority process owns the resource,
                     // this request can't be fulfilled.
@@ -942,8 +979,8 @@
     return true;
 }
 
-bool ResourceManagerService::getLowestPriorityBiggestClient_l(
-        int callingPid, MediaResource::Type type,
+bool ResourceManagerService::getLowestPriorityBiggestClient_l(int callingPid,
+        MediaResource::Type type, MediaResource::SubType subType,
         std::shared_ptr<IResourceManagerClient> *client) {
     int lowestPriorityPid;
     int lowestPriority;
@@ -951,7 +988,7 @@
 
     // Before looking into other processes, check if we have clients marked for
     // pending removal in the same process.
-    if (getBiggestClient_l(callingPid, type, client, true /* pendingRemovalOnly */)) {
+    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, client)) {
         return true;
     }
     if (!getPriority_l(callingPid, &callingPriority)) {
@@ -959,7 +996,7 @@
                 callingPid);
         return false;
     }
-    if (!getLowestPriorityPid_l(type, &lowestPriorityPid, &lowestPriority)) {
+    if (!getLowestPriorityPid_l(type, subType, &lowestPriorityPid, &lowestPriority)) {
         return false;
     }
     if (lowestPriority <= callingPriority) {
@@ -968,14 +1005,14 @@
         return false;
     }
 
-    if (!getBiggestClient_l(lowestPriorityPid, type, client)) {
+    if (!getBiggestClient_l(lowestPriorityPid, type, subType, client)) {
         return false;
     }
     return true;
 }
 
-bool ResourceManagerService::getLowestPriorityPid_l(
-        MediaResource::Type type, int *lowestPriorityPid, int *lowestPriority) {
+bool ResourceManagerService::getLowestPriorityPid_l(MediaResource::Type type,
+        MediaResource::SubType subType, int *lowestPriorityPid, int *lowestPriority) {
     int pid = -1;
     int priority = -1;
     for (size_t i = 0; i < mMap.size(); ++i) {
@@ -983,7 +1020,7 @@
             // no client on this process.
             continue;
         }
-        if (!hasResourceType(type, mMap.valueAt(i))) {
+        if (!hasResourceType(type, subType, mMap.valueAt(i))) {
             // doesn't have the requested resource type
             continue;
         }
@@ -1021,8 +1058,13 @@
     return (callingPidPriority < priority);
 }
 
-bool ResourceManagerService::getBiggestClient_l(
-        int pid, MediaResource::Type type, std::shared_ptr<IResourceManagerClient> *client,
+bool ResourceManagerService::getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
+        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client) {
+    return getBiggestClient_l(pid, type, subType, client, true /* pendingRemovalOnly */);
+}
+
+bool ResourceManagerService::getBiggestClient_l(int pid, MediaResource::Type type,
+        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client,
         bool pendingRemovalOnly) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -1041,7 +1083,7 @@
         }
         for (auto it = resources.begin(); it != resources.end(); it++) {
             const MediaResourceParcel &resource = it->second;
-            if (resource.type == type) {
+            if (hasResourceType(type, subType, resource)) {
                 if (resource.value > largestValue) {
                     largestValue = resource.value;
                     clientTemp = infos[i].client;
@@ -1052,8 +1094,8 @@
 
     if (clientTemp == NULL) {
         ALOGE_IF(!pendingRemovalOnly,
-                 "getBiggestClient_l: can't find resource type %s for pid %d",
-                 asString(type), pid);
+                 "getBiggestClient_l: can't find resource type %s and subtype %s for pid %d",
+                 asString(type), asString(subType), pid);
         return false;
     }
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 9c2636e..6551371 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -77,26 +77,19 @@
             int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
     ResourceManagerService();
-    explicit ResourceManagerService(
-            const sp<ProcessInfoInterface> &processInfo,
+    explicit ResourceManagerService(const sp<ProcessInfoInterface> &processInfo,
             const sp<SystemCallbackInterface> &systemResource);
     virtual ~ResourceManagerService();
-    void setObserverService(
-            const std::shared_ptr<ResourceObserverService>& observerService);
+    void setObserverService(const std::shared_ptr<ResourceObserverService>& observerService);
 
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
 
-    Status addResource(
-            int32_t pid,
-            int32_t uid,
-            int64_t clientId,
+    Status addResource(int32_t pid, int32_t uid, int64_t clientId,
             const std::shared_ptr<IResourceManagerClient>& client,
             const std::vector<MediaResourceParcel>& resources) override;
 
-    Status removeResource(
-            int32_t pid,
-            int64_t clientId,
+    Status removeResource(int32_t pid, int64_t clientId,
             const std::vector<MediaResourceParcel>& resources) override;
 
     Status removeClient(int32_t pid, int64_t clientId) override;
@@ -104,20 +97,13 @@
     // Tries to reclaim resource from processes with lower priority than the calling process
     // according to the requested resources.
     // Returns true if any resource has been reclaimed, otherwise returns false.
-    Status reclaimResource(
-            int32_t callingPid,
-            const std::vector<MediaResourceParcel>& resources,
+    Status reclaimResource(int32_t callingPid, const std::vector<MediaResourceParcel>& resources,
             bool* _aidl_return) override;
 
-    Status overridePid(
-            int originalPid,
-            int newPid) override;
+    Status overridePid(int originalPid, int newPid) override;
 
-    Status overrideProcessInfo(
-            const std::shared_ptr<IResourceManagerClient>& client,
-            int pid,
-            int procState,
-            int oomScore) override;
+    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client, int pid,
+            int procState, int oomScore) override;
 
     Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
 
@@ -132,30 +118,34 @@
 
     // Reclaims resources from |clients|. Returns true if reclaim succeeded
     // for all clients.
-    bool reclaimInternal(
-            const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
+    bool reclaimUnconditionallyFrom(const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
 
     // Gets the list of all the clients who own the specified resource type.
     // Returns false if any client belongs to a process with higher priority than the
     // calling process. The clients will remain unchanged if returns false.
-    bool getAllClients_l(int callingPid, MediaResource::Type type,
+    bool getAllClients_l(int callingPid, MediaResource::Type type, MediaResource::SubType subType,
             Vector<std::shared_ptr<IResourceManagerClient>> *clients);
 
     // Gets the client who owns specified resource type from lowest possible priority process.
     // Returns false if the calling process priority is not higher than the lowest process
     // priority. The client will remain unchanged if returns false.
     bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
-            std::shared_ptr<IResourceManagerClient> *client);
+            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
 
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
-    bool getLowestPriorityPid_l(MediaResource::Type type, int *pid, int *priority);
+    bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType, int *pid,
+                int *priority);
 
     // Gets the client who owns biggest piece of specified resource type from pid.
-    // Returns false if failed. The client will remain unchanged if failed.
-    bool getBiggestClient_l(int pid, MediaResource::Type type,
+    // Returns false with no change to client if there are no clients holdiing resources of thisi
+    // type.
+    bool getBiggestClient_l(int pid, MediaResource::Type type, MediaResource::SubType subType,
             std::shared_ptr<IResourceManagerClient> *client,
             bool pendingRemovalOnly = false);
+    // Same method as above, but with pendingRemovalOnly as true.
+    bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
+            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
 
     bool isCallingPriorityHigher_l(int callingPid, int pid);
 
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
index af2ba68..72a0551 100644
--- a/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
@@ -26,4 +26,5 @@
     kUnspecifiedSubType = 0,
     kAudioCodec = 1,
     kVideoCodec = 2,
+    kImageCodec = 3,
 }
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 8e29312..1624477 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -119,11 +119,11 @@
 
 struct TestClient : public BnResourceManagerClient {
     TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
-        : mReclaimed(false), mPid(pid), mService(service) {}
+        : mPid(pid), mService(service) {}
 
     Status reclaimResource(bool* _aidl_return) override {
         mService->removeClient(mPid, getId(ref<TestClient>()));
-        mReclaimed = true;
+        mWasReclaimResourceCalled = true;
         *_aidl_return = true;
         return Status::ok();
     }
@@ -133,18 +133,16 @@
         return Status::ok();
     }
 
-    bool reclaimed() const {
-        return mReclaimed;
-    }
-
-    void reset() {
-        mReclaimed = false;
+    bool checkIfReclaimedAndReset() {
+        bool wasReclaimResourceCalled = mWasReclaimResourceCalled;
+        mWasReclaimResourceCalled = false;
+        return wasReclaimResourceCalled;
     }
 
     virtual ~TestClient() {}
 
 private:
-    bool mReclaimed;
+    bool mWasReclaimResourceCalled = false;
     int mPid;
     std::shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
@@ -166,14 +164,30 @@
     return lhs.type == rhs.type && lhs.arg == rhs.arg;
 }
 
-#define CHECK_STATUS_TRUE(condition) \
-    EXPECT_TRUE((condition).isOk() && (result))
+// The condition is expected to return a status but also update the local
+// result variable.
+#define CHECK_STATUS_TRUE(conditionThatUpdatesResult) \
+    do { \
+        bool result = false; \
+        EXPECT_TRUE((conditionThatUpdatesResult).isOk()); \
+        EXPECT_TRUE(result); \
+    } while(false)
 
-#define CHECK_STATUS_FALSE(condition) \
-    EXPECT_TRUE((condition).isOk() && !(result))
+// The condition is expected to return a status but also update the local
+// result variable.
+#define CHECK_STATUS_FALSE(conditionThatUpdatesResult) \
+    do { \
+        bool result = true; \
+        EXPECT_TRUE((conditionThatUpdatesResult).isOk()); \
+        EXPECT_FALSE(result); \
+    } while(false)
 
 class ResourceManagerServiceTestBase : public ::testing::Test {
 public:
+    static TestClient* toTestClient(std::shared_ptr<IResourceManagerClient> testClient) {
+        return static_cast<TestClient*>(testClient.get());
+    }
+
     ResourceManagerServiceTestBase()
         : mSystemCB(new TestSystemCallback()),
           mService(::ndk::SharedRefBase::make<ResourceManagerService>(
@@ -183,6 +197,10 @@
           mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
     }
 
+    std::shared_ptr<IResourceManagerClient> createTestClient(int pid) {
+        return ::ndk::SharedRefBase::make<TestClient>(pid, mService);
+    }
+
     sp<TestSystemCallback> mSystemCB;
     std::shared_ptr<ResourceManagerService> mService;
     std::shared_ptr<IResourceManagerClient> mTestClient1;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index a029d45..8739c3b 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -25,22 +25,60 @@
 namespace android {
 
 class ResourceManagerServiceTest : public ResourceManagerServiceTestBase {
+private:
+    static MediaResource createSecureVideoCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kSecureCodec,
+            MediaResource::SubType::kVideoCodec, amount);
+    }
+
+    static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kNonSecureCodec,
+            MediaResource::SubType::kVideoCodec, amount);
+    }
+
+    static MediaResource createSecureAudioCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kSecureCodec,
+            MediaResource::SubType::kAudioCodec, amount);
+    }
+
+    static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kNonSecureCodec,
+            MediaResource::SubType::kAudioCodec, amount);
+    }
+
+    static MediaResource createSecureImageCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kSecureCodec,
+            MediaResource::SubType::kImageCodec, amount);
+    }
+
+    static MediaResource createNonSecureImageCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kNonSecureCodec,
+            MediaResource::SubType::kImageCodec, amount);
+    }
+
+    static MediaResource createGraphicMemoryResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kGraphicMemory,
+            MediaResource::SubType::kUnspecifiedSubType, amount);
+    }
+
+    static MediaResource createDrmSessionResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kDrmSession,
+            MediaResource::SubType::kUnspecifiedSubType, amount);
+    }
+
+    static MediaResource createBatteryResource() {
+        return MediaResource(MediaResource::Type::kBattery,
+            MediaResource::SubType::kUnspecifiedSubType, 1);
+    }
+
+    static MediaResource createCpuBoostResource() {
+        return MediaResource(MediaResource::Type::kCpuBoost,
+            MediaResource::SubType::kUnspecifiedSubType, 1);
+    }
+
 public:
     ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
 
-    void verifyClients(bool c1, bool c2, bool c3) {
-        TestClient *client1 = static_cast<TestClient*>(mTestClient1.get());
-        TestClient *client2 = static_cast<TestClient*>(mTestClient2.get());
-        TestClient *client3 = static_cast<TestClient*>(mTestClient3.get());
-
-        EXPECT_EQ(c1, client1->reclaimed());
-        EXPECT_EQ(c2, client2->reclaimed());
-        EXPECT_EQ(c3, client3->reclaimed());
-
-        client1->reset();
-        client2->reset();
-        client3->reset();
-    }
 
     // test set up
     // ---------------------------------------------------------------------------------
@@ -268,7 +306,6 @@
 
     void testOverridePid() {
 
-        bool result;
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
@@ -293,8 +330,6 @@
     }
 
     void testMarkClientForPendingRemoval() {
-        bool result;
-
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = true;
@@ -307,13 +342,17 @@
 
             // no lower priority client
             CHECK_STATUS_FALSE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
 
             // client marked for pending removal from the same process got reclaimed
             CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
@@ -331,11 +370,15 @@
             // client marked for pending removal from the same process got reclaimed
             // first, even though there are lower priority process
             CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // lower priority client got reclaimed
             CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_EQ(true, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
@@ -349,17 +392,23 @@
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // No more clients marked for removal
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
-            verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 1 which still left
             mService->removeClient(kTestPid1, getId(mTestClient1));
@@ -384,14 +433,15 @@
 
     void testGetAllClients() {
         addResource();
-
         MediaResource::Type type = MediaResource::Type::kSecureCodec;
+        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
+
         Vector<std::shared_ptr<IResourceManagerClient> > clients;
-        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &clients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
         // will fail.
-        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, &clients));
-        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &clients));
+        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &clients));
 
         EXPECT_EQ(2u, clients.size());
         // (OK to require ordering in clients[], as the pid map is sorted)
@@ -400,7 +450,6 @@
     }
 
     void testReclaimResourceSecure() {
-        bool result;
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
@@ -417,11 +466,15 @@
 
             // reclaim all secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -439,7 +492,9 @@
 
             // reclaim all secure and non-secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, true /* c2 */, true /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -458,15 +513,21 @@
 
             // reclaim all non-secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another largest graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -483,15 +544,21 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -508,20 +575,25 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // secure codec from lowest process got reclaimed
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another secure codec from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more secure codec, non-secure codec will be reclaimed.
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
         }
     }
 
     void testReclaimResourceNonSecure() {
-        bool result;
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
@@ -537,11 +609,15 @@
 
             // reclaim all secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -558,15 +634,21 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -582,11 +664,15 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // one non secure codec from lowest process got reclaimed
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more non-secure codec, secure codec from lowest priority process will be reclaimed
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
@@ -595,13 +681,17 @@
 
     void testGetLowestPriorityBiggestClient() {
         MediaResource::Type type = MediaResource::Type::kGraphicMemory;
+        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
         std::shared_ptr<IResourceManagerClient> client;
-        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
+        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
+                &client));
 
         addResource();
 
-        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, &client));
-        EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
+        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, subType,
+                &client));
+        EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
+                &client));
 
         // kTestPid1 is the lowest priority process with MediaResource::Type::kGraphicMemory.
         // mTestClient1 has the largest MediaResource::Type::kGraphicMemory within kTestPid1.
@@ -614,35 +704,25 @@
         TestProcessInfo processInfo;
 
         MediaResource::Type type = MediaResource::Type::kGraphicMemory;
-        EXPECT_FALSE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
+        EXPECT_FALSE(mService->getLowestPriorityPid_l(type, subType, &pid, &priority));
 
         addResource();
 
-        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, subType, &pid, &priority));
         EXPECT_EQ(kTestPid1, pid);
         int priority1;
         processInfo.getPriority(kTestPid1, &priority1);
         EXPECT_EQ(priority1, priority);
 
         type = MediaResource::Type::kNonSecureCodec;
-        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, subType, &pid, &priority));
         EXPECT_EQ(kTestPid2, pid);
         int priority2;
         processInfo.getPriority(kTestPid2, &priority2);
         EXPECT_EQ(priority2, priority);
     }
 
-    void testGetBiggestClient() {
-        MediaResource::Type type = MediaResource::Type::kGraphicMemory;
-        std::shared_ptr<IResourceManagerClient> client;
-        EXPECT_FALSE(mService->getBiggestClient_l(kTestPid2, type, &client));
-
-        addResource();
-
-        EXPECT_TRUE(mService->getBiggestClient_l(kTestPid2, type, &client));
-        EXPECT_EQ(mTestClient2, client);
-    }
-
     void testIsCallingPriorityHigher() {
         EXPECT_FALSE(mService->isCallingPriorityHigher_l(101, 100));
         EXPECT_FALSE(mService->isCallingPriorityHigher_l(100, 100));
@@ -725,6 +805,361 @@
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
     }
+
+    void testReclaimResources_withVideoCodec_reclaimsOnlyVideoCodec() {
+        const std::shared_ptr<IResourceManagerClient>& audioImageTestClient = mTestClient1;
+        const std::shared_ptr<IResourceManagerClient>& videoTestClient = mTestClient2;
+
+        // Create an audio and image codec resource
+        std::vector<MediaResourceParcel> audioImageResources;
+        audioImageResources.push_back(createNonSecureAudioCodecResource());
+        audioImageResources.push_back(createNonSecureImageCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(audioImageTestClient),
+                audioImageTestClient, audioImageResources);
+
+        // Fail to reclaim a video codec resource
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureVideoCodecResource());
+        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Now add a video codec resource
+        std::vector<MediaResourceParcel> videoResources;
+        videoResources.push_back(createNonSecureVideoCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoTestClient), videoTestClient,
+                videoResources);
+
+        // Verify that the newly-created video codec resource can be reclaimed
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the audio and image resources are untouched
+        EXPECT_FALSE(toTestClient(audioImageTestClient)->checkIfReclaimedAndReset());
+        // But the video resource was reclaimed
+        EXPECT_TRUE(toTestClient(videoTestClient)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResources_withAudioCodec_reclaimsOnlyAudioCodec() {
+        const auto & videoImageTestClient = mTestClient1;
+        const auto & audioTestClient = mTestClient2;
+
+        // Create a video and audio codec resource
+        std::vector<MediaResourceParcel> videoImageResources;
+        videoImageResources.push_back(createNonSecureVideoCodecResource());
+        videoImageResources.push_back(createNonSecureImageCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoImageTestClient),
+                videoImageTestClient, videoImageResources);
+
+        // Fail to reclaim an audio codec resource
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureAudioCodecResource());
+        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Now add an audio codec resource
+        std::vector<MediaResourceParcel> audioResources;
+        audioResources.push_back(createNonSecureAudioCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid2, getId(audioTestClient), audioTestClient,
+                audioResources);
+
+        // Verify that the newly-created audio codec resource can be reclaimed
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the video and image resources are untouched
+        EXPECT_FALSE(toTestClient(videoImageTestClient)->checkIfReclaimedAndReset());
+        // But the audio resource was reclaimed
+        EXPECT_TRUE(toTestClient(audioTestClient)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResources_withImageCodec_reclaimsOnlyImageCodec() {
+        const auto & videoAudioTestClient = mTestClient1;
+        const auto & imageTestClient = mTestClient2;
+
+        // Create a video and audio codec resource
+        std::vector<MediaResourceParcel> videoAudioResources;
+        videoAudioResources.push_back(createNonSecureVideoCodecResource());
+        videoAudioResources.push_back(createNonSecureAudioCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoAudioTestClient),
+                videoAudioTestClient, videoAudioResources);
+
+        // Fail to reclaim an image codec resource
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureImageCodecResource());
+        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Now add an image codec resource
+        std::vector<MediaResourceParcel> imageResources;
+        imageResources.push_back(createNonSecureImageCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid2, getId(imageTestClient), imageTestClient,
+                imageResources);
+
+        // Verify that the newly-created image codec resource can be reclaimed
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the video and audio resources are untouched
+        EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+        // But the image resource was reclaimed
+        EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResources_whenPartialResourceMatch_reclaims() {
+        const int onlyUid = kTestUid1;
+        const auto onlyClient = createTestClient(kLowPriorityPid);
+
+        std::vector<MediaResourceParcel> ownedResources;
+        ownedResources.push_back(createNonSecureVideoCodecResource());
+        ownedResources.push_back(createGraphicMemoryResource(100));
+        mService->addResource(kLowPriorityPid, onlyUid, getId(onlyClient), onlyClient,
+                ownedResources);
+
+        // Reclaim an image codec instead of the video codec that is owned, but also reclaim
+        // graphics memory, which will trigger the reclaim.
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureImageCodecResource());
+        reclaimResources.push_back(createGraphicMemoryResource(100));
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the video codec resources (including the needed graphic memory) is reclaimed
+        EXPECT_TRUE(toTestClient(onlyClient)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources() {
+        // this test only uses one pid and one uid
+        const int onlyPid = kTestPid1;
+        const int onlyUid = kTestUid1;
+
+        // secure video codec
+        const auto smallSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largeSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largestSecureVideoActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createSecureVideoCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallSecureVideoMarkedClient),
+                    smallSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureVideoCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeSecureVideoMarkedClient),
+                    largeSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureVideoCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
+                    largestSecureVideoActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureVideoMarkedClient));
+        // don't mark the largest client
+
+        // non-secure video codec
+        const auto smallNonSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largeNonSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largestNonSecureVideoActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createNonSecureVideoCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureVideoMarkedClient),
+                    smallNonSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureVideoCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureVideoMarkedClient),
+                    largeNonSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureVideoCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureVideoActiveClient),
+                    largestNonSecureVideoActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureVideoMarkedClient));
+        // don't mark the largest client
+
+        // secure audio codec
+        const auto smallSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largeSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largestSecureAudioActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createSecureAudioCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallSecureAudioMarkedClient),
+                    smallSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureAudioCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeSecureAudioMarkedClient),
+                    largeSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureAudioCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
+                    largestSecureVideoActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureAudioMarkedClient));
+        // don't mark the largest client
+
+        // non-secure audio codec
+        const auto smallNonSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largeNonSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largestNonSecureAudioActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createNonSecureAudioCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureAudioMarkedClient),
+                    smallNonSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureAudioCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureAudioMarkedClient),
+                    largeNonSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureAudioCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureAudioActiveClient),
+                    largestNonSecureAudioActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureAudioMarkedClient));
+        // don't mark the largest client
+
+        // secure image codec
+        const auto smallSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largeSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largestSecureImageActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createSecureImageCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallSecureImageMarkedClient),
+                    smallSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureImageCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeSecureImageMarkedClient),
+                    largeSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureImageCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestSecureImageActiveClient),
+                    largestSecureImageActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureImageMarkedClient));
+        // don't mark the largest client
+
+        // non-secure image codec
+        const auto smallNonSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largeNonSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largestNonSecureImageActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createNonSecureImageCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureImageMarkedClient),
+                    smallNonSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureImageCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureImageMarkedClient),
+                    largeNonSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureImageCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureImageActiveClient),
+                    largestNonSecureImageActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureImageMarkedClient));
+        // don't mark the largest client
+
+        // graphic memory
+        const auto smallGraphicMemoryMarkedClient = createTestClient(onlyPid);
+        const auto largeGraphicMemoryMarkedClient = createTestClient(onlyPid);
+        const auto largestGraphicMemoryActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createGraphicMemoryResource(100));
+            mService->addResource(onlyPid, onlyUid, getId(smallGraphicMemoryMarkedClient),
+                    smallGraphicMemoryMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createGraphicMemoryResource(200));
+            mService->addResource(onlyPid, onlyUid, getId(largeGraphicMemoryMarkedClient),
+                    largeGraphicMemoryMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createGraphicMemoryResource(300));
+            mService->addResource(onlyPid, onlyUid, getId(largestGraphicMemoryActiveClient),
+                    largestGraphicMemoryActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallGraphicMemoryMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeGraphicMemoryMarkedClient));
+        // don't mark the largest client
+
+        // DRM session
+        const auto smallDrmSessionMarkedClient = createTestClient(onlyPid);
+        const auto largeDrmSessionMarkedClient = createTestClient(onlyPid);
+        const auto largestDrmSessionActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createDrmSessionResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallDrmSessionMarkedClient),
+                    smallDrmSessionMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createDrmSessionResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeDrmSessionMarkedClient),
+                    largeDrmSessionMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createDrmSessionResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestDrmSessionActiveClient),
+                    largestDrmSessionActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallDrmSessionMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeDrmSessionMarkedClient));
+        // don't mark the largest client
+
+        // battery
+        const auto batteryMarkedClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createBatteryResource());
+            mService->addResource(onlyPid, onlyUid, getId(batteryMarkedClient),
+                    batteryMarkedClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(batteryMarkedClient));
+
+        // CPU boost
+        const auto cpuBoostMarkedClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createCpuBoostResource());
+            mService->addResource(onlyPid, onlyUid, getId(cpuBoostMarkedClient),
+                    cpuBoostMarkedClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(cpuBoostMarkedClient));
+
+        // now we expect that we only reclaim resources from the biggest marked client
+        EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(onlyPid).isOk());
+        // secure video codec
+        EXPECT_FALSE(toTestClient(smallSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestSecureVideoActiveClient)->checkIfReclaimedAndReset());
+        // non-secure video codec
+        EXPECT_FALSE(toTestClient(smallNonSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeNonSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestNonSecureVideoActiveClient)->checkIfReclaimedAndReset());
+        // secure audio codec
+        EXPECT_FALSE(toTestClient(smallSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestSecureAudioActiveClient)->checkIfReclaimedAndReset());
+        // non-secure audio codec
+        EXPECT_FALSE(toTestClient(smallNonSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeNonSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestNonSecureAudioActiveClient)->checkIfReclaimedAndReset());
+        // secure image codec
+        EXPECT_FALSE(toTestClient(smallSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestSecureImageActiveClient)->checkIfReclaimedAndReset());
+        // non-secure image codec
+        EXPECT_FALSE(toTestClient(smallNonSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeNonSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestNonSecureImageActiveClient)->checkIfReclaimedAndReset());
+        // graphic memory
+        EXPECT_FALSE(toTestClient(smallGraphicMemoryMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeGraphicMemoryMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestGraphicMemoryActiveClient)->checkIfReclaimedAndReset());
+        // DRM session
+        EXPECT_FALSE(toTestClient(smallDrmSessionMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeDrmSessionMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestDrmSessionActiveClient)->checkIfReclaimedAndReset());
+        // battery is not expected to be reclaimed when marked as pending removal
+        EXPECT_FALSE(toTestClient(batteryMarkedClient)->checkIfReclaimedAndReset());
+        // CPU boost is not expected to be reclaimed when marked as pending removal
+        EXPECT_FALSE(toTestClient(cpuBoostMarkedClient)->checkIfReclaimedAndReset());
+    }
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
@@ -768,19 +1203,15 @@
     testGetLowestPriorityPid();
 }
 
-TEST_F(ResourceManagerServiceTest, getBiggestClient_l) {
-    testGetBiggestClient();
-}
-
 TEST_F(ResourceManagerServiceTest, isCallingPriorityHigher_l) {
     testIsCallingPriorityHigher();
 }
 
-TEST_F(ResourceManagerServiceTest, testBatteryStats) {
+TEST_F(ResourceManagerServiceTest, batteryStats) {
     testBatteryStats();
 }
 
-TEST_F(ResourceManagerServiceTest, testCpusetBoost) {
+TEST_F(ResourceManagerServiceTest, cpusetBoost) {
     testCpusetBoost();
 }
 
@@ -792,4 +1223,25 @@
     testMarkClientForPendingRemoval();
 }
 
+TEST_F(ResourceManagerServiceTest, reclaimResources_withVideoCodec_reclaimsOnlyVideoCodec) {
+    testReclaimResources_withVideoCodec_reclaimsOnlyVideoCodec();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResources_withAudioCodec_reclaimsOnlyAudioCodec) {
+    testReclaimResources_withAudioCodec_reclaimsOnlyAudioCodec();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResources_withImageCodec_reclaimsOnlyImageCodec) {
+    testReclaimResources_withImageCodec_reclaimsOnlyImageCodec();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResources_whenPartialResourceMatch_reclaims) {
+    testReclaimResources_whenPartialResourceMatch_reclaims();
+}
+
+TEST_F(ResourceManagerServiceTest,
+        reclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources) {
+    testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index acd9df1..003569d 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -116,6 +116,26 @@
 
 const EventTracker::Event EventTracker::NoEvent;
 
+static MediaResource createSecureVideoCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kSecureCodec,
+        MediaResource::SubType::kVideoCodec, amount);
+}
+
+static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kNonSecureCodec,
+        MediaResource::SubType::kVideoCodec, amount);
+}
+
+static MediaResource createSecureAudioCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kSecureCodec,
+        MediaResource::SubType::kAudioCodec, amount);
+}
+
+static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kNonSecureCodec,
+        MediaResource::SubType::kAudioCodec, amount);
+}
+
 // Operators for GTest macros.
 bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
     return lhs.type == rhs.type && lhs.uid == rhs.uid && lhs.pid == rhs.pid &&
@@ -233,30 +253,30 @@
 
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource()};
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
 
     // Add non-secure video codec.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
 
     // Add secure & non-secure video codecs.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
 
     // Add additional audio codecs, should be ignored.
-    resources.push_back(MediaResource::CodecResource(1 /*secure*/, 0 /*video*/));
-    resources.push_back(MediaResource::CodecResource(0 /*secure*/, 0 /*video*/));
+    resources.push_back(createSecureAudioCodecResource());
+    resources.push_back(createNonSecureAudioCodecResource());
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
@@ -276,9 +296,9 @@
 
     // Add multiple secure & non-secure video codecs.
     // Multiple entries of the same type should be merged, count should be propagated correctly.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 3 /*count*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(3)};
     observables1 = {{MediaObservableType::kVideoSecureCodec, 2}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
@@ -300,7 +320,7 @@
 
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec to client1.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource()};
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
@@ -322,7 +342,7 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add non-secure video codec to client2.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
@@ -344,24 +364,24 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add secure & non-secure video codecs, plus audio codecs (that's ignored).
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(),
+                 createSecureAudioCodecResource(),
+                 createNonSecureAudioCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, should have no event.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 0 /*video*/)};
+    resources = {createSecureAudioCodecResource()};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove the other audio codec and the secure video codec, only secure video codec
     // removal should be reported.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureAudioCodecResource(),
+                 createSecureVideoCodecResource()};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
@@ -386,10 +406,10 @@
 
     // Add multiple secure & non-secure video codecs, plus audio codecs (that's ignored).
     // (ResourceManager will merge these internally.)
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 4 /*count*/),
-                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(4),
+                 createSecureAudioCodecResource(),
+                 createNonSecureAudioCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
@@ -400,10 +420,10 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, 2 secure video codecs and 2 non-secure video codecs.
     // 1 secure video codec removal and 2 non-secure video codec removals should be reported.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 2 /*count*/)};
+    resources = {createNonSecureAudioCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(2)};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
@@ -443,8 +463,8 @@
     std::vector<MediaResourceParcel> resources;
 
     // Add secure & non-secure video codecs.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);