Merge "IOMX: add INTERNAL_OPTION_COLOR_ASPECTS to setParamaters" into nyc-dev
diff --git a/include/camera/ndk/NdkCameraMetadataTags.h b/include/camera/ndk/NdkCameraMetadataTags.h
index d7035a7..a1d3bf7 100644
--- a/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/include/camera/ndk/NdkCameraMetadataTags.h
@@ -647,6 +647,8 @@
     ACAMERA_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO,
     ACAMERA_CONTROL_SCENE_MODE_HDR,
     ACAMERA_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT,
+    ACAMERA_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START              = 100,
+    ACAMERA_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END                = 127,
 } acamera_metadata_enum_android_control_scene_mode_t;
 
 // ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index b0bc2d0..b034c8e 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -596,7 +596,13 @@
     size_t                  mFrameSize;         // app-level frame size == AudioFlinger frame size
     uint32_t                mLatency;           // in ms
     audio_channel_mask_t    mChannelMask;
-    audio_input_flags_t     mFlags;
+
+    audio_input_flags_t     mFlags;                 // same as mOrigFlags, except for bits that may
+                                                    // be denied by client or server, such as
+                                                    // AUDIO_INPUT_FLAG_FAST.  mLock must be
+                                                    // held to read or write those bits reliably.
+    audio_input_flags_t     mOrigFlags;             // as specified in constructor or set(), const
+
     int                     mSessionId;
     transfer_type           mTransfer;
 
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index e0fb603..69dc062 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -949,9 +949,11 @@
 
     uint32_t                mUnderrunCountOffset;   // updated when restoring tracks
 
-    audio_output_flags_t    mFlags;
-        // const after set(), except for bits AUDIO_OUTPUT_FLAG_FAST and AUDIO_OUTPUT_FLAG_OFFLOAD.
-        // mLock must be held to read or write those bits reliably.
+    audio_output_flags_t    mFlags;                 // same as mOrigFlags, except for bits that may
+                                                    // be denied by client or server, such as
+                                                    // AUDIO_OUTPUT_FLAG_FAST.  mLock must be
+                                                    // held to read or write those bits reliably.
+    audio_output_flags_t    mOrigFlags;             // as specified in constructor or set(), const
 
     bool                    mDoNotReconnect;
 
diff --git a/include/media/MediaResource.h b/include/media/MediaResource.h
index 20f2cad..1957a45 100644
--- a/include/media/MediaResource.h
+++ b/include/media/MediaResource.h
@@ -23,17 +23,24 @@
 
 namespace android {
 
-extern const char kResourceSecureCodec[];
-extern const char kResourceNonSecureCodec[];
-extern const char kResourceAudioCodec[];
-extern const char kResourceVideoCodec[];
-extern const char kResourceGraphicMemory[];
-
 class MediaResource {
 public:
+    enum Type {
+        kUnspecified = 0,
+        kSecureCodec,
+        kNonSecureCodec,
+        kGraphicMemory
+    };
+
+    enum SubType {
+        kUnspecifiedSubType = 0,
+        kAudioCodec,
+        kVideoCodec
+    };
+
     MediaResource();
-    MediaResource(String8 type, uint64_t value);
-    MediaResource(String8 type, String8 subType, uint64_t value);
+    MediaResource(Type type, uint64_t value);
+    MediaResource(Type type, SubType subType, uint64_t value);
 
     void readFromParcel(const Parcel &parcel);
     void writeToParcel(Parcel *parcel) const;
@@ -43,11 +50,30 @@
     bool operator==(const MediaResource &other) const;
     bool operator!=(const MediaResource &other) const;
 
-    String8 mType;
-    String8 mSubType;
+    Type mType;
+    SubType mSubType;
     uint64_t mValue;
 };
 
+inline static const char *asString(MediaResource::Type i, const char *def = "??") {
+    switch (i) {
+        case MediaResource::kUnspecified:    return "unspecified";
+        case MediaResource::kSecureCodec:    return "secure-codec";
+        case MediaResource::kNonSecureCodec: return "non-secure-codec";
+        case MediaResource::kGraphicMemory:  return "graphic-memory";
+        default:                             return def;
+    }
+}
+
+inline static const char *asString(MediaResource::SubType i, const char *def = "??") {
+    switch (i) {
+        case MediaResource::kUnspecifiedSubType: return "unspecified";
+        case MediaResource::kAudioCodec:         return "audio-codec";
+        case MediaResource::kVideoCodec:         return "video-codec";
+        default:                                 return def;
+    }
+}
+
 }; // namespace android
 
 #endif  // ANDROID_MEDIA_RESOURCE_H
diff --git a/include/media/stagefright/CodecBase.h b/include/media/stagefright/CodecBase.h
index 3de0d21..2d28432 100644
--- a/include/media/stagefright/CodecBase.h
+++ b/include/media/stagefright/CodecBase.h
@@ -20,9 +20,12 @@
 
 #include <stdint.h>
 
+#define STRINGIFY_ENUMS
+
 #include <media/IOMX.h>
 #include <media/MediaCodecInfo.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/hardware/HardwareAPI.h>
 
 #include <utils/NativeHandle.h>
@@ -34,7 +37,7 @@
 struct ABuffer;
 struct PersistentSurface;
 
-struct CodecBase : public AHandler {
+struct CodecBase : public AHandler, /* static */ ColorUtils {
     enum {
         kWhatFillThisBuffer      = 'fill',
         kWhatDrainThisBuffer     = 'drai',
@@ -97,113 +100,6 @@
      * Codec-related defines
      */
 
-    /**********************************************************************************************/
-
-    /*
-     * Media-platform color constants. MediaCodec uses (an extended version of) platform-defined
-     * constants that are derived from HAL_DATASPACE, since these are directly exposed to the user.
-     * We extend the values to maintain the richer set of information defined inside media
-     * containers and bitstreams that are not supported by the platform. We also expect vendors
-     * to extend some of these values with vendor-specific values. These are separated into a
-     * vendor-extension section so they won't collide with future platform values.
-     */
-
-    enum ColorStandard : uint32_t {
-        kColorStandardUnspecified =
-                HAL_DATASPACE_STANDARD_UNSPECIFIED >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT709 =     HAL_DATASPACE_STANDARD_BT709 >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT601_625 = HAL_DATASPACE_STANDARD_BT601_625 >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT601_625_Unadjusted =
-                HAL_DATASPACE_STANDARD_BT601_625_UNADJUSTED >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT601_525 = HAL_DATASPACE_STANDARD_BT601_525 >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT601_525_Unadjusted =
-                HAL_DATASPACE_STANDARD_BT601_525_UNADJUSTED >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT2020 =    HAL_DATASPACE_STANDARD_BT2020 >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT2020Constant =
-                HAL_DATASPACE_STANDARD_BT2020_CONSTANT_LUMINANCE >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardBT470M =    HAL_DATASPACE_STANDARD_BT470M >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardFilm =      HAL_DATASPACE_STANDARD_FILM >> HAL_DATASPACE_STANDARD_SHIFT,
-        kColorStandardMax =       HAL_DATASPACE_STANDARD_MASK >> HAL_DATASPACE_STANDARD_SHIFT,
-
-        /* This marks a section of color-standard values that are not supported by graphics HAL,
-           but track defined color primaries-matrix coefficient combinations in media.
-           These are stable for a given release. */
-        kColorStandardExtendedStart = kColorStandardMax + 1,
-
-        /* This marks a section of color-standard values that are not supported by graphics HAL
-           nor using media defined color primaries or matrix coefficients. These may differ per
-           device. */
-        kColorStandardVendorStart = 0x10000,
-    };
-
-    enum ColorTransfer : uint32_t  {
-        kColorTransferUnspecified =
-                HAL_DATASPACE_TRANSFER_UNSPECIFIED >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferLinear =      HAL_DATASPACE_TRANSFER_LINEAR >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferSRGB =        HAL_DATASPACE_TRANSFER_SRGB >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferSMPTE_170M =
-                HAL_DATASPACE_TRANSFER_SMPTE_170M >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferGamma22 =     HAL_DATASPACE_TRANSFER_GAMMA2_2 >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferGamma28 =     HAL_DATASPACE_TRANSFER_GAMMA2_8 >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferST2084 =      HAL_DATASPACE_TRANSFER_ST2084 >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferHLG =         HAL_DATASPACE_TRANSFER_HLG >> HAL_DATASPACE_TRANSFER_SHIFT,
-        kColorTransferMax =         HAL_DATASPACE_TRANSFER_MASK >> HAL_DATASPACE_TRANSFER_SHIFT,
-
-        /* This marks a section of color-transfer values that are not supported by graphics HAL,
-           but track media-defined color-transfer. These are stable for a given release. */
-        kColorTransferExtendedStart = kColorTransferMax + 1,
-
-        /* This marks a section of color-transfer values that are not supported by graphics HAL
-           nor defined by media. These may differ per device. */
-        kColorTransferVendorStart = 0x10000,
-    };
-
-    enum ColorRange : uint32_t  {
-        kColorRangeUnspecified = HAL_DATASPACE_RANGE_UNSPECIFIED >> HAL_DATASPACE_RANGE_SHIFT,
-        kColorRangeFull =        HAL_DATASPACE_RANGE_FULL >> HAL_DATASPACE_RANGE_SHIFT,
-        kColorRangeLimited =     HAL_DATASPACE_RANGE_LIMITED >> HAL_DATASPACE_RANGE_SHIFT,
-        kColorRangeMax =         HAL_DATASPACE_RANGE_MASK >> HAL_DATASPACE_RANGE_SHIFT,
-
-        /* This marks a section of color-transfer values that are not supported by graphics HAL,
-           but track media-defined color-transfer. These are stable for a given release. */
-        kColorRangeExtendedStart = kColorRangeMax + 1,
-
-        /* This marks a section of color-transfer values that are not supported by graphics HAL
-           nor defined by media. These may differ per device. */
-        kColorRangeVendorStart = 0x10000,
-    };
-
-    /*
-     * Static utilities for codec support
-     */
-
-    // using int32_t for media range/standard/transfers to denote extended ranges
-    static int32_t wrapColorAspectsIntoColorStandard(
-            ColorAspects::Primaries primaries, ColorAspects::MatrixCoeffs coeffs);
-    static int32_t wrapColorAspectsIntoColorRange(ColorAspects::Range range);
-    static int32_t wrapColorAspectsIntoColorTransfer(ColorAspects::Transfer transfer);
-
-    static status_t unwrapColorAspectsFromColorRange(
-            int32_t range, ColorAspects::Range *aspect);
-    static status_t unwrapColorAspectsFromColorTransfer(
-            int32_t transfer, ColorAspects::Transfer *aspect);
-    static status_t unwrapColorAspectsFromColorStandard(
-            int32_t standard,
-            ColorAspects::Primaries *primaries, ColorAspects::MatrixCoeffs *coeffs);
-
-    static status_t convertPlatformColorAspectsToCodecAspects(
-            int32_t range, int32_t standard, int32_t transfer, ColorAspects &aspects);
-    static status_t convertCodecColorAspectsToPlatformAspects(
-            const ColorAspects &aspects,
-            int32_t *range, int32_t *standard, int32_t *transfer);
-
-    // updates unspecified range, standard and transfer values to their defaults
-    static void setDefaultPlatformColorAspectsIfNeeded(
-            int32_t &range, int32_t &standard, int32_t &transfer,
-            int32_t width, int32_t height);
-    static void setDefaultCodecColorAspectsIfNeeded(
-            ColorAspects &aspects, int32_t width, int32_t height);
-
 protected:
     CodecBase();
     virtual ~CodecBase();
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index 2bb1291..fe579b7 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -394,7 +394,7 @@
     bool isExecuting() const;
 
     uint64_t getGraphicBufferSize();
-    void addResource(const String8 &type, const String8 &subtype, uint64_t value);
+    void addResource(MediaResource::Type type, MediaResource::SubType subtype, uint64_t value);
 
     bool hasPendingBuffer(int portIndex);
     bool hasPendingBuffer();
diff --git a/include/media/stagefright/foundation/ColorUtils.h b/include/media/stagefright/foundation/ColorUtils.h
new file mode 100644
index 0000000..b95c80b
--- /dev/null
+++ b/include/media/stagefright/foundation/ColorUtils.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef COLOR_UTILS_H_
+
+#define COLOR_UTILS_H_
+
+#include <stdint.h>
+
+#define STRINGIFY_ENUMS
+
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <system/graphics.h>
+
+namespace android {
+
+struct ColorUtils {
+    /*
+     * Media-platform color constants. MediaCodec uses (an extended version of) platform-defined
+     * constants that are derived from HAL_DATASPACE, since these are directly exposed to the user.
+     * We extend the values to maintain the richer set of information defined inside media
+     * containers and bitstreams that are not supported by the platform. We also expect vendors
+     * to extend some of these values with vendor-specific values. These are separated into a
+     * vendor-extension section so they won't collide with future platform values.
+     */
+
+#define GET_HAL_ENUM(class, name) HAL_DATASPACE_##class##name
+#define GET_HAL_BITFIELD(class, name) (GET_HAL_ENUM(class, _##name) >> GET_HAL_ENUM(class, _SHIFT))
+
+    enum ColorStandard : uint32_t {
+        kColorStandardUnspecified =          GET_HAL_BITFIELD(STANDARD, UNSPECIFIED),
+        kColorStandardBT709 =                GET_HAL_BITFIELD(STANDARD, BT709),
+        kColorStandardBT601_625 =            GET_HAL_BITFIELD(STANDARD, BT601_625),
+        kColorStandardBT601_625_Unadjusted = GET_HAL_BITFIELD(STANDARD, BT601_625_UNADJUSTED),
+        kColorStandardBT601_525 =            GET_HAL_BITFIELD(STANDARD, BT601_525),
+        kColorStandardBT601_525_Unadjusted = GET_HAL_BITFIELD(STANDARD, BT601_525_UNADJUSTED),
+        kColorStandardBT2020 =               GET_HAL_BITFIELD(STANDARD, BT2020),
+        kColorStandardBT2020Constant =       GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE),
+        kColorStandardBT470M =               GET_HAL_BITFIELD(STANDARD, BT470M),
+        kColorStandardFilm =                 GET_HAL_BITFIELD(STANDARD, FILM),
+        kColorStandardMax =                  GET_HAL_BITFIELD(STANDARD, MASK),
+
+        /* This marks a section of color-standard values that are not supported by graphics HAL,
+           but track defined color primaries-matrix coefficient combinations in media.
+           These are stable for a given release. */
+        kColorStandardExtendedStart = kColorStandardMax + 1,
+
+        /* This marks a section of color-standard values that are not supported by graphics HAL
+           nor using media defined color primaries or matrix coefficients. These may differ per
+           device. */
+        kColorStandardVendorStart = 0x10000,
+    };
+
+    enum ColorTransfer : uint32_t  {
+        kColorTransferUnspecified = GET_HAL_BITFIELD(TRANSFER, UNSPECIFIED),
+        kColorTransferLinear =      GET_HAL_BITFIELD(TRANSFER, LINEAR),
+        kColorTransferSRGB =        GET_HAL_BITFIELD(TRANSFER, SRGB),
+        kColorTransferSMPTE_170M =  GET_HAL_BITFIELD(TRANSFER, SMPTE_170M),
+        kColorTransferGamma22 =     GET_HAL_BITFIELD(TRANSFER, GAMMA2_2),
+        kColorTransferGamma28 =     GET_HAL_BITFIELD(TRANSFER, GAMMA2_8),
+        kColorTransferST2084 =      GET_HAL_BITFIELD(TRANSFER, ST2084),
+        kColorTransferHLG =         GET_HAL_BITFIELD(TRANSFER, HLG),
+        kColorTransferMax =         GET_HAL_BITFIELD(TRANSFER, MASK),
+
+        /* This marks a section of color-transfer values that are not supported by graphics HAL,
+           but track media-defined color-transfer. These are stable for a given release. */
+        kColorTransferExtendedStart = kColorTransferMax + 1,
+
+        /* This marks a section of color-transfer values that are not supported by graphics HAL
+           nor defined by media. These may differ per device. */
+        kColorTransferVendorStart = 0x10000,
+    };
+
+    enum ColorRange : uint32_t  {
+        kColorRangeUnspecified = GET_HAL_BITFIELD(RANGE, UNSPECIFIED),
+        kColorRangeFull =        GET_HAL_BITFIELD(RANGE, FULL),
+        kColorRangeLimited =     GET_HAL_BITFIELD(RANGE, LIMITED),
+        kColorRangeMax =         GET_HAL_BITFIELD(RANGE, MASK),
+
+        /* This marks a section of color-transfer values that are not supported by graphics HAL,
+           but track media-defined color-transfer. These are stable for a given release. */
+        kColorRangeExtendedStart = kColorRangeMax + 1,
+
+        /* This marks a section of color-transfer values that are not supported by graphics HAL
+           nor defined by media. These may differ per device. */
+        kColorRangeVendorStart = 0x10000,
+    };
+
+#undef GET_HAL_BITFIELD
+#undef GET_HAL_ENUM
+
+    /*
+     * Static utilities for codec support
+     */
+
+    // using int32_t for media range/standard/transfers to denote extended ranges
+    // wrap methods change invalid aspects to the Unspecified value
+    static int32_t wrapColorAspectsIntoColorStandard(
+            ColorAspects::Primaries primaries, ColorAspects::MatrixCoeffs coeffs);
+    static int32_t wrapColorAspectsIntoColorRange(ColorAspects::Range range);
+    static int32_t wrapColorAspectsIntoColorTransfer(ColorAspects::Transfer transfer);
+
+    // unwrap methods change invalid aspects to the Other value
+    static status_t unwrapColorAspectsFromColorRange(
+            int32_t range, ColorAspects::Range *aspect);
+    static status_t unwrapColorAspectsFromColorTransfer(
+            int32_t transfer, ColorAspects::Transfer *aspect);
+    static status_t unwrapColorAspectsFromColorStandard(
+            int32_t standard,
+            ColorAspects::Primaries *primaries, ColorAspects::MatrixCoeffs *coeffs);
+
+    static status_t convertPlatformColorAspectsToCodecAspects(
+            int32_t range, int32_t standard, int32_t transfer, ColorAspects &aspects);
+    static status_t convertCodecColorAspectsToPlatformAspects(
+            const ColorAspects &aspects, int32_t *range, int32_t *standard, int32_t *transfer);
+
+    // updates unspecified range, standard and transfer values to their defaults
+    static void setDefaultPlatformColorAspectsIfNeeded(
+            int32_t &range, int32_t &standard, int32_t &transfer, int32_t width, int32_t height);
+    static void setDefaultCodecColorAspectsIfNeeded(
+            ColorAspects &aspects, int32_t width, int32_t height);
+};
+
+inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
+    using namespace android;
+    switch (i) {
+        case ColorUtils::kColorStandardUnspecified:          return "Unspecified";
+        case ColorUtils::kColorStandardBT709:                return "BT709";
+        case ColorUtils::kColorStandardBT601_625:            return "BT601_625";
+        case ColorUtils::kColorStandardBT601_625_Unadjusted: return "BT601_625_Unadjusted";
+        case ColorUtils::kColorStandardBT601_525:            return "BT601_525";
+        case ColorUtils::kColorStandardBT601_525_Unadjusted: return "BT601_525_Unadjusted";
+        case ColorUtils::kColorStandardBT2020:               return "BT2020";
+        case ColorUtils::kColorStandardBT2020Constant:       return "BT2020Constant";
+        case ColorUtils::kColorStandardBT470M:               return "BT470M";
+        case ColorUtils::kColorStandardFilm:                 return "Film";
+        default:                                            return def;
+    }
+}
+
+inline static const char *asString(android::ColorUtils::ColorTransfer i, const char *def = "??") {
+    using namespace android;
+    switch (i) {
+        case ColorUtils::kColorTransferUnspecified: return "Unspecified";
+        case ColorUtils::kColorTransferLinear:      return "Linear";
+        case ColorUtils::kColorTransferSRGB:        return "SRGB";
+        case ColorUtils::kColorTransferSMPTE_170M:  return "SMPTE_170M";
+        case ColorUtils::kColorTransferGamma22:     return "Gamma22";
+        case ColorUtils::kColorTransferGamma28:     return "Gamma28";
+        case ColorUtils::kColorTransferST2084:      return "ST2084";
+        case ColorUtils::kColorTransferHLG:         return "HLG";
+        default:                                   return def;
+    }
+}
+
+inline static const char *asString(android::ColorUtils::ColorRange i, const char *def = "??") {
+    using namespace android;
+    switch (i) {
+        case ColorUtils::kColorRangeUnspecified: return "Unspecified";
+        case ColorUtils::kColorRangeFull:        return "Full";
+        case ColorUtils::kColorRangeLimited:     return "Limited";
+        default:                                return def;
+    }
+}
+
+}  // namespace android
+
+#endif  // COLOR_UTILS_H_
+
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index fef20d9..9eb5ec7 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -246,7 +246,7 @@
         mClientPid = pid;
     }
 
-    mFlags = flags;
+    mOrigFlags = mFlags = flags;
     mCbf = cbf;
 
     if (cbf != NULL) {
@@ -518,6 +518,9 @@
     }
     audio_io_handle_t input;
 
+    // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
+    // After fast request is denied, we will request again if IAudioRecord is re-created.
+
     status_t status;
     status = AudioSystem::getInputForAttr(&mAttributes, &input,
                                         (audio_session_t)mSessionId,
@@ -569,7 +572,6 @@
             ALOGW("AUDIO_INPUT_FLAG_FAST denied by client; transfer %d, "
                 "track %u Hz, input %u Hz",
                 mTransfer, mSampleRate, afSampleRate);
-            // once denied, do not request again if IAudioRecord is re-created
             mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
         }
     }
@@ -669,8 +671,7 @@
             ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu", frameCount);
             mAwaitBoost = true;
         } else {
-            ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
-            // once denied, do not request again if IAudioRecord is re-created
+            ALOGW("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
             mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
         }
     }
@@ -1140,6 +1141,8 @@
     ALOGW("dead IAudioRecord, creating a new one from %s()", from);
     ++mSequence;
 
+    mFlags = mOrigFlags;
+
     // if the new IAudioRecord is created, openRecord_l() will modify the
     // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.
     // It will also delete the strong references on previous IAudioRecord and IMemory
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 8e02900..423273d 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -455,7 +455,7 @@
         mClientPid = pid;
     }
     mAuxEffectId = 0;
-    mFlags = flags;
+    mOrigFlags = mFlags = flags;
     mCbf = cbf;
 
     if (cbf != NULL) {
@@ -1153,6 +1153,9 @@
     audio_stream_type_t streamType = mStreamType;
     audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL;
 
+    // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
+    // After fast request is denied, we will request again if IAudioTrack is re-created.
+
     status_t status;
     status = AudioSystem::getOutputForAttr(attr, &output,
                                            (audio_session_t)mSessionId, &streamType, mClientUid,
@@ -1211,7 +1214,6 @@
             ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client; transfer %d, "
                 "track %u Hz, output %u Hz",
                 mTransfer, mSampleRate, mAfSampleRate);
-            // once denied, do not request again if IAudioTrack is re-created
             mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
         }
     }
@@ -1353,31 +1355,10 @@
                 mAwaitBoost = true;
             }
         } else {
-            ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
-            // once denied, do not request again if IAudioTrack is re-created
+            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
             mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
         }
     }
-    if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
-        if (trackFlags & IAudioFlinger::TRACK_OFFLOAD) {
-            ALOGV("AUDIO_OUTPUT_FLAG_OFFLOAD successful");
-        } else {
-            ALOGW("AUDIO_OUTPUT_FLAG_OFFLOAD denied by server");
-            mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
-            // FIXME This is a warning, not an error, so don't return error status
-            //return NO_INIT;
-        }
-    }
-    if (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
-        if (trackFlags & IAudioFlinger::TRACK_DIRECT) {
-            ALOGV("AUDIO_OUTPUT_FLAG_DIRECT successful");
-        } else {
-            ALOGW("AUDIO_OUTPUT_FLAG_DIRECT denied by server");
-            mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-            // FIXME This is a warning, not an error, so don't return error status
-            //return NO_INIT;
-        }
-    }
 
     // Make sure that application is notified with sufficient margin before underrun.
     // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
@@ -2089,6 +2070,8 @@
         mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount);
     }
 
+    mFlags = mOrigFlags;
+
     // If a new IAudioTrack is successfully created, createTrack_l() will modify the
     // following member variables: mAudioTrack, mCblkMemory and mCblk.
     // It will also delete the strong references on previous IAudioTrack and IMemory.
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index 40ec0cb..e636a50 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -21,38 +21,36 @@
 
 namespace android {
 
-const char kResourceSecureCodec[] = "secure-codec";
-const char kResourceNonSecureCodec[] = "non-secure-codec";
-const char kResourceAudioCodec[] = "audio-codec";
-const char kResourceVideoCodec[] = "video-codec";
-const char kResourceGraphicMemory[] = "graphic-memory";
+MediaResource::MediaResource()
+        : mType(kUnspecified),
+          mSubType(kUnspecifiedSubType),
+          mValue(0) {}
 
-MediaResource::MediaResource() : mValue(0) {}
-
-MediaResource::MediaResource(String8 type, uint64_t value)
+MediaResource::MediaResource(Type type, uint64_t value)
         : mType(type),
+          mSubType(kUnspecifiedSubType),
           mValue(value) {}
 
-MediaResource::MediaResource(String8 type, String8 subType, uint64_t value)
+MediaResource::MediaResource(Type type, SubType subType, uint64_t value)
         : mType(type),
           mSubType(subType),
           mValue(value) {}
 
 void MediaResource::readFromParcel(const Parcel &parcel) {
-    mType = parcel.readString8();
-    mSubType = parcel.readString8();
+    mType = static_cast<Type>(parcel.readInt32());
+    mSubType = static_cast<SubType>(parcel.readInt32());
     mValue = parcel.readUint64();
 }
 
 void MediaResource::writeToParcel(Parcel *parcel) const {
-    parcel->writeString8(mType);
-    parcel->writeString8(mSubType);
+    parcel->writeInt32(static_cast<int32_t>(mType));
+    parcel->writeInt32(static_cast<int32_t>(mSubType));
     parcel->writeUint64(mValue);
 }
 
 String8 MediaResource::toString() const {
     String8 str;
-    str.appendFormat("%s/%s:%llu", mType.string(), mSubType.string(), (unsigned long long)mValue);
+    str.appendFormat("%s/%s:%llu", asString(mType), asString(mSubType), (unsigned long long)mValue);
     return str;
 }
 
diff --git a/media/libstagefright/CodecBase.cpp b/media/libstagefright/CodecBase.cpp
index c9de2b0..f729d4d 100644
--- a/media/libstagefright/CodecBase.cpp
+++ b/media/libstagefright/CodecBase.cpp
@@ -20,8 +20,6 @@
 #include <inttypes.h>
 
 #include <media/stagefright/CodecBase.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/ALookup.h>
 
 namespace android {
 
@@ -37,277 +35,4 @@
 CodecBase::PortDescription::~PortDescription() {
 }
 
-/***************************************** COLOR SUPPORT *****************************************/
-
-// shortcut names for brevity in the following tables
-typedef ColorAspects CA;
-typedef CodecBase CB;
-
-ALookup<CB::ColorRange, CA::Range> sRanges{
-    {
-        { CB::kColorRangeLimited, CA::RangeLimited },
-        { CB::kColorRangeFull, CA::RangeFull },
-        { CB::kColorRangeUnspecified, CA::RangeUnspecified },
-    }
-};
-
-ALookup<CB::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>> sStandards {
-    {
-        { CB::kColorStandardUnspecified,    { CA::PrimariesUnspecified, CA::MatrixUnspecified } },
-        { CB::kColorStandardBT709,          { CA::PrimariesBT709_5, CA::MatrixBT709_5 } },
-        { CB::kColorStandardBT601_625,      { CA::PrimariesBT601_6_625, CA::MatrixBT601_6 } },
-        { CB::kColorStandardBT601_625_Unadjusted,
-                                            // this is a really close match
-                                            { CA::PrimariesBT601_6_625, CA::MatrixBT709_5 } },
-        { CB::kColorStandardBT601_525,      { CA::PrimariesBT601_6_525, CA::MatrixBT601_6 } },
-        { CB::kColorStandardBT601_525_Unadjusted,
-                                            { CA::PrimariesBT601_6_525, CA::MatrixSMPTE240M } },
-        { CB::kColorStandardBT2020,         { CA::PrimariesBT2020, CA::MatrixBT2020 } },
-        { CB::kColorStandardBT2020Constant, { CA::PrimariesBT2020, CA::MatrixBT2020Constant } },
-        { CB::kColorStandardBT470M,         { CA::PrimariesBT470_6M, CA::MatrixBT470_6M } },
-        // NOTE: there is no close match to the matrix used by standard film, chose closest
-        { CB::kColorStandardFilm,           { CA::PrimariesGenericFilm, CA::MatrixBT2020 } },
-    }
-};
-
-ALookup<CB::ColorTransfer, CA::Transfer> sTransfers{
-    {
-        { CB::kColorTransferUnspecified,    CA::TransferUnspecified },
-        { CB::kColorTransferLinear,         CA::TransferLinear },
-        { CB::kColorTransferSRGB,           CA::TransferSRGB },
-        { CB::kColorTransferSMPTE_170M,     CA::TransferSMPTE170M },
-        { CB::kColorTransferGamma22,        CA::TransferGamma22 },
-        { CB::kColorTransferGamma28,        CA::TransferGamma28 },
-        { CB::kColorTransferST2084,         CA::TransferST2084 },
-        { CB::kColorTransferHLG,            CA::TransferHLG },
-    }
-};
-
-static bool isValid(ColorAspects::Primaries p) {
-    return p <= ColorAspects::PrimariesOther;
-}
-
-static bool isDefined(ColorAspects::Primaries p) {
-    return p <= ColorAspects::PrimariesBT2020;
-}
-
-static bool isValid(ColorAspects::MatrixCoeffs c) {
-    return c <= ColorAspects::MatrixOther;
-}
-
-static bool isDefined(ColorAspects::MatrixCoeffs c) {
-    return c <= ColorAspects::MatrixBT2020Constant;
-}
-
-//static
-int32_t CodecBase::wrapColorAspectsIntoColorStandard(
-        ColorAspects::Primaries primaries, ColorAspects::MatrixCoeffs coeffs) {
-    ColorStandard res;
-    if (sStandards.map(std::make_pair(primaries, coeffs), &res)) {
-        return res;
-    } else if (!isValid(primaries) || !isValid(coeffs)) {
-        return kColorStandardUnspecified;
-    }
-
-    // check platform media limits
-    uint32_t numPrimaries = ColorAspects::PrimariesBT2020 + 1;
-    if (isDefined(primaries) && isDefined(coeffs)) {
-        return kColorStandardExtendedStart + primaries + coeffs * numPrimaries;
-    } else {
-        return kColorStandardVendorStart + primaries + coeffs * 0x100;
-    }
-}
-
-//static
-status_t CodecBase::unwrapColorAspectsFromColorStandard(
-        int32_t standard,
-        ColorAspects::Primaries *primaries, ColorAspects::MatrixCoeffs *coeffs) {
-    std::pair<ColorAspects::Primaries, ColorAspects::MatrixCoeffs> res;
-    if (sStandards.map((ColorStandard)standard, &res)) {
-        *primaries = res.first;
-        *coeffs = res.second;
-        return OK;
-    }
-
-    int32_t start = kColorStandardExtendedStart;
-    int32_t numPrimaries = ColorAspects::PrimariesBT2020 + 1;
-    int32_t numCoeffs = ColorAspects::MatrixBT2020Constant + 1;
-    if (standard >= (int32_t)kColorStandardVendorStart) {
-        start = kColorStandardVendorStart;
-        numPrimaries = ColorAspects::PrimariesOther + 1; // 0x100
-        numCoeffs = ColorAspects::MatrixOther + 1; // 0x100;
-    }
-    if (standard >= start && standard < start + numPrimaries * numCoeffs) {
-        int32_t product = standard - start;
-        *primaries = (ColorAspects::Primaries)(product % numPrimaries);
-        *coeffs = (ColorAspects::MatrixCoeffs)(product / numPrimaries);
-        return OK;
-    }
-    *primaries = ColorAspects::PrimariesOther;
-    *coeffs = ColorAspects::MatrixOther;
-    return BAD_VALUE;
-}
-
-static bool isValid(ColorAspects::Range r) {
-    return r <= ColorAspects::RangeOther;
-}
-
-static bool isDefined(ColorAspects::Range r) {
-    return r <= ColorAspects::RangeLimited;
-}
-
-//  static
-int32_t CodecBase::wrapColorAspectsIntoColorRange(ColorAspects::Range range) {
-    ColorRange res;
-    if (sRanges.map(range, &res)) {
-        return res;
-    } else if (!isValid(range)) {
-        return kColorRangeUnspecified;
-    } else {
-        CHECK(!isDefined(range));
-        // all platform values are in sRanges
-        return kColorRangeVendorStart + range;
-    }
-}
-
-//static
-status_t CodecBase::unwrapColorAspectsFromColorRange(
-        int32_t range, ColorAspects::Range *aspect) {
-    if (sRanges.map((ColorRange)range, aspect)) {
-        return OK;
-    }
-
-    int32_t start = kColorRangeVendorStart;
-    int32_t numRanges = ColorAspects::RangeOther + 1; // 0x100
-    if (range >= start && range < start + numRanges) {
-        *aspect = (ColorAspects::Range)(range - start);
-        return OK;
-    }
-    *aspect = ColorAspects::RangeOther;
-    return BAD_VALUE;
-}
-
-static bool isValid(ColorAspects::Transfer t) {
-    return t <= ColorAspects::TransferOther;
-}
-
-static bool isDefined(ColorAspects::Transfer t) {
-    return t <= ColorAspects::TransferHLG
-            || (t >= ColorAspects::TransferSMPTE240M && t <= ColorAspects::TransferST428);
-}
-
-//  static
-int32_t CodecBase::wrapColorAspectsIntoColorTransfer(
-        ColorAspects::Transfer transfer) {
-    ColorTransfer res;
-    if (sTransfers.map(transfer, &res)) {
-        return res;
-    } else if (!isValid(transfer)) {
-        return kColorTransferUnspecified;
-    } else if (isDefined(transfer)) {
-        return kColorTransferExtendedStart + transfer;
-    } else {
-        // all platform values are in sRanges
-        return kColorTransferVendorStart + transfer;
-    }
-}
-
-//static
-status_t CodecBase::unwrapColorAspectsFromColorTransfer(
-        int32_t transfer, ColorAspects::Transfer *aspect) {
-    if (sTransfers.map((ColorTransfer)transfer, aspect)) {
-        return OK;
-    }
-
-    int32_t start = kColorTransferExtendedStart;
-    int32_t numTransfers = ColorAspects::TransferST428 + 1;
-    if (transfer >= (int32_t)kColorTransferVendorStart) {
-        start = kColorTransferVendorStart;
-        numTransfers = ColorAspects::TransferOther + 1; // 0x100
-    }
-    if (transfer >= start && transfer < start + numTransfers) {
-        *aspect = (ColorAspects::Transfer)(transfer - start);
-        return OK;
-    }
-    *aspect = ColorAspects::TransferOther;
-    return BAD_VALUE;
-}
-
-// static
-status_t CodecBase::convertPlatformColorAspectsToCodecAspects(
-    int32_t range, int32_t standard, int32_t transfer, ColorAspects &aspects) {
-    status_t res1 = unwrapColorAspectsFromColorRange(range, &aspects.mRange);
-    status_t res2 = unwrapColorAspectsFromColorStandard(
-            standard, &aspects.mPrimaries, &aspects.mMatrixCoeffs);
-    status_t res3 = unwrapColorAspectsFromColorTransfer(transfer, &aspects.mTransfer);
-    return res1 != OK ? res1 : (res2 != OK ? res2 : res3);
-}
-
-// static
-status_t CodecBase::convertCodecColorAspectsToPlatformAspects(
-    const ColorAspects &aspects, int32_t *range, int32_t *standard, int32_t *transfer) {
-    *range = wrapColorAspectsIntoColorRange(aspects.mRange);
-    *standard = wrapColorAspectsIntoColorStandard(aspects.mPrimaries, aspects.mMatrixCoeffs);
-    *transfer = wrapColorAspectsIntoColorTransfer(aspects.mTransfer);
-    if (isValid(aspects.mRange) && isValid(aspects.mPrimaries)
-            && isValid(aspects.mMatrixCoeffs) && isValid(aspects.mTransfer)) {
-        return OK;
-    } else {
-        return BAD_VALUE;
-    }
-}
-
-// static
-void CodecBase::setDefaultPlatformColorAspectsIfNeeded(
-        int32_t &range, int32_t &standard, int32_t &transfer,
-        int32_t width, int32_t height) {
-    if (range == CodecBase::kColorRangeUnspecified) {
-        range = CodecBase::kColorRangeLimited;
-    }
-
-    if (standard == CodecBase::kColorStandardUnspecified) {
-        // Default to BT2020, BT709 or BT601 based on size. Allow 2.35:1 aspect ratio. Limit BT601
-        // to PAL or smaller, BT2020 to 4K or larger, leaving BT709 for all resolutions in between.
-        if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
-            standard = CodecBase::kColorStandardBT2020;
-        } else if ((width <= 720 && height > 480) || (height <= 720 && width > 480)) {
-            standard = CodecBase::kColorStandardBT601_625;
-        } else if ((width <= 720 && height <= 480) || (height <= 720 && width <= 480)) {
-            standard = CodecBase::kColorStandardBT601_525;
-        } else {
-            standard = CodecBase::kColorStandardBT709;
-        }
-    }
-
-    if (transfer == CodecBase::kColorTransferUnspecified) {
-        transfer = CodecBase::kColorTransferSMPTE_170M;
-    }
-}
-
-// static
-void CodecBase::setDefaultCodecColorAspectsIfNeeded(
-        ColorAspects &aspects, int32_t width, int32_t height) {
-    // reuse other method to get default aspects
-    int32_t range = 0, standard = 0, transfer = 0;
-    setDefaultPlatformColorAspectsIfNeeded(range, standard, transfer, width, height);
-    ColorAspects defaultAspects;
-    memset(&defaultAspects, 0, sizeof(defaultAspects));
-    convertPlatformColorAspectsToCodecAspects(range, standard, transfer, defaultAspects);
-
-    if (aspects.mRange == ColorAspects::RangeUnspecified) {
-        aspects.mRange = defaultAspects.mRange;
-    }
-    if (aspects.mPrimaries == ColorAspects::PrimariesUnspecified) {
-        aspects.mPrimaries = defaultAspects.mPrimaries;
-    }
-    if (aspects.mMatrixCoeffs == ColorAspects::MatrixUnspecified) {
-        aspects.mMatrixCoeffs = defaultAspects.mMatrixCoeffs;
-    }
-    if (aspects.mTransfer == ColorAspects::TransferUnspecified) {
-        aspects.mTransfer = defaultAspects.mTransfer;
-    }
-}
-
-/**************************************************************************************************/
-
 }  // namespace android
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index fbdf56f..9a5be29 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -399,9 +399,11 @@
 
     status_t err;
     Vector<MediaResource> resources;
-    const char *type = secureCodec ? kResourceSecureCodec : kResourceNonSecureCodec;
-    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
-    resources.push_back(MediaResource(String8(type), String8(subtype), 1));
+    MediaResource::Type type =
+            secureCodec ? MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
+    MediaResource::SubType subtype =
+            mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
+    resources.push_back(MediaResource(type, subtype, 1));
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -468,13 +470,14 @@
 
     status_t err;
     Vector<MediaResource> resources;
-    const char *type = (mFlags & kFlagIsSecure) ?
-            kResourceSecureCodec : kResourceNonSecureCodec;
-    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
-    resources.push_back(MediaResource(String8(type), String8(subtype), 1));
+    MediaResource::Type type = (mFlags & kFlagIsSecure) ?
+            MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
+    MediaResource::SubType subtype =
+            mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
+    resources.push_back(MediaResource(type, subtype, 1));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
-    resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
+    resources.push_back(MediaResource(MediaResource::kGraphicMemory, 1));
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -553,7 +556,8 @@
     return size;
 }
 
-void MediaCodec::addResource(const String8 &type, const String8 &subtype, uint64_t value) {
+void MediaCodec::addResource(
+        MediaResource::Type type, MediaResource::SubType subtype, uint64_t value) {
     Vector<MediaResource> resources;
     resources.push_back(MediaResource(type, subtype, value));
     mResourceManagerService->addResource(
@@ -565,13 +569,14 @@
 
     status_t err;
     Vector<MediaResource> resources;
-    const char *type = (mFlags & kFlagIsSecure) ?
-            kResourceSecureCodec : kResourceNonSecureCodec;
-    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
-    resources.push_back(MediaResource(String8(type), String8(subtype), 1));
+    MediaResource::Type type = (mFlags & kFlagIsSecure) ?
+            MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
+    MediaResource::SubType subtype =
+            mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
+    resources.push_back(MediaResource(type, subtype, 1));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
-    resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
+    resources.push_back(MediaResource(MediaResource::kGraphicMemory, 1));
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -1228,18 +1233,18 @@
                         mFlags &= ~kFlagUsesSoftwareRenderer;
                     }
 
-                    String8 resourceType;
+                    MediaResource::Type resourceType;
                     if (mComponentName.endsWith(".secure")) {
                         mFlags |= kFlagIsSecure;
-                        resourceType = String8(kResourceSecureCodec);
+                        resourceType = MediaResource::kSecureCodec;
                     } else {
                         mFlags &= ~kFlagIsSecure;
-                        resourceType = String8(kResourceNonSecureCodec);
+                        resourceType = MediaResource::kNonSecureCodec;
                     }
 
                     if (mIsVideo) {
                         // audio codec is currently ignored.
-                        addResource(resourceType, String8(kResourceVideoCodec), 1);
+                        addResource(resourceType, MediaResource::kVideoCodec, 1);
                     }
 
                     (new AMessage)->postReply(mReplyID);
@@ -1376,10 +1381,9 @@
                             // allocating input buffers, so this is a good
                             // indication that now all buffers are allocated.
                             if (mIsVideo) {
-                                String8 subtype;
                                 addResource(
-                                        String8(kResourceGraphicMemory),
-                                        subtype,
+                                        MediaResource::kGraphicMemory,
+                                        MediaResource::kUnspecifiedSubType,
                                         getGraphicBufferSize());
                             }
                             setState(STARTED);
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index 711601f..3c3ed59 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -15,6 +15,7 @@
     AString.cpp                   \
     AStringUtils.cpp              \
     AWakeLock.cpp                 \
+    ColorUtils.cpp                \
     MediaBuffer.cpp               \
     MediaBufferGroup.cpp          \
     MetaData.cpp                  \
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
new file mode 100644
index 0000000..2b86b0e
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -0,0 +1,312 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALookup.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+namespace android {
+
+// shortcut names for brevity in the following tables
+typedef ColorAspects CA;
+typedef ColorUtils CU;
+
+ALookup<CU::ColorRange, CA::Range> sRanges{
+    {
+        { CU::kColorRangeLimited, CA::RangeLimited },
+        { CU::kColorRangeFull, CA::RangeFull },
+        { CU::kColorRangeUnspecified, CA::RangeUnspecified },
+    }
+};
+
+ALookup<CU::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>> sStandards {
+    {
+        { CU::kColorStandardUnspecified,    { CA::PrimariesUnspecified, CA::MatrixUnspecified } },
+        { CU::kColorStandardBT709,          { CA::PrimariesBT709_5, CA::MatrixBT709_5 } },
+        { CU::kColorStandardBT601_625,      { CA::PrimariesBT601_6_625, CA::MatrixBT601_6 } },
+        { CU::kColorStandardBT601_625_Unadjusted,
+                                            // this is a really close match
+                                            { CA::PrimariesBT601_6_625, CA::MatrixBT709_5 } },
+        { CU::kColorStandardBT601_525,      { CA::PrimariesBT601_6_525, CA::MatrixBT601_6 } },
+        { CU::kColorStandardBT601_525_Unadjusted,
+                                            { CA::PrimariesBT601_6_525, CA::MatrixSMPTE240M } },
+        { CU::kColorStandardBT2020,         { CA::PrimariesBT2020, CA::MatrixBT2020 } },
+        { CU::kColorStandardBT2020Constant, { CA::PrimariesBT2020, CA::MatrixBT2020Constant } },
+        { CU::kColorStandardBT470M,         { CA::PrimariesBT470_6M, CA::MatrixBT470_6M } },
+        // NOTE: there is no close match to the matrix used by standard film, chose closest
+        { CU::kColorStandardFilm,           { CA::PrimariesGenericFilm, CA::MatrixBT2020 } },
+    }
+};
+
+ALookup<CU::ColorTransfer, CA::Transfer> sTransfers{
+    {
+        { CU::kColorTransferUnspecified,    CA::TransferUnspecified },
+        { CU::kColorTransferLinear,         CA::TransferLinear },
+        { CU::kColorTransferSRGB,           CA::TransferSRGB },
+        { CU::kColorTransferSMPTE_170M,     CA::TransferSMPTE170M },
+        { CU::kColorTransferGamma22,        CA::TransferGamma22 },
+        { CU::kColorTransferGamma28,        CA::TransferGamma28 },
+        { CU::kColorTransferST2084,         CA::TransferST2084 },
+        { CU::kColorTransferHLG,            CA::TransferHLG },
+    }
+};
+
+static bool isValid(ColorAspects::Primaries p) {
+    return p <= ColorAspects::PrimariesOther;
+}
+
+static bool isDefined(ColorAspects::Primaries p) {
+    return p <= ColorAspects::PrimariesBT2020;
+}
+
+static bool isValid(ColorAspects::MatrixCoeffs c) {
+    return c <= ColorAspects::MatrixOther;
+}
+
+static bool isDefined(ColorAspects::MatrixCoeffs c) {
+    return c <= ColorAspects::MatrixBT2020Constant;
+}
+
+//static
+int32_t ColorUtils::wrapColorAspectsIntoColorStandard(
+        ColorAspects::Primaries primaries, ColorAspects::MatrixCoeffs coeffs) {
+    ColorStandard res;
+    if (sStandards.map(std::make_pair(primaries, coeffs), &res)) {
+        return res;
+    } else if (!isValid(primaries) || !isValid(coeffs)) {
+        return kColorStandardUnspecified;
+    }
+
+    // check platform media limits
+    uint32_t numPrimaries = ColorAspects::PrimariesBT2020 + 1;
+    if (isDefined(primaries) && isDefined(coeffs)) {
+        return kColorStandardExtendedStart + primaries + coeffs * numPrimaries;
+    } else {
+        return kColorStandardVendorStart + primaries + coeffs * 0x100;
+    }
+}
+
+//static
+status_t ColorUtils::unwrapColorAspectsFromColorStandard(
+        int32_t standard,
+        ColorAspects::Primaries *primaries, ColorAspects::MatrixCoeffs *coeffs) {
+    std::pair<ColorAspects::Primaries, ColorAspects::MatrixCoeffs> res;
+    if (sStandards.map((ColorStandard)standard, &res)) {
+        *primaries = res.first;
+        *coeffs = res.second;
+        return OK;
+    }
+
+    int32_t start = kColorStandardExtendedStart;
+    int32_t numPrimaries = ColorAspects::PrimariesBT2020 + 1;
+    int32_t numCoeffs = ColorAspects::MatrixBT2020Constant + 1;
+    if (standard >= (int32_t)kColorStandardVendorStart) {
+        start = kColorStandardVendorStart;
+        numPrimaries = ColorAspects::PrimariesOther + 1; // 0x100
+        numCoeffs = ColorAspects::MatrixOther + 1; // 0x100;
+    }
+    if (standard >= start && standard < start + numPrimaries * numCoeffs) {
+        int32_t product = standard - start;
+        *primaries = (ColorAspects::Primaries)(product % numPrimaries);
+        *coeffs = (ColorAspects::MatrixCoeffs)(product / numPrimaries);
+        return OK;
+    }
+    *primaries = ColorAspects::PrimariesOther;
+    *coeffs = ColorAspects::MatrixOther;
+    return BAD_VALUE;
+}
+
+static bool isValid(ColorAspects::Range r) {
+    return r <= ColorAspects::RangeOther;
+}
+
+static bool isDefined(ColorAspects::Range r) {
+    return r <= ColorAspects::RangeLimited;
+}
+
+//  static
+int32_t ColorUtils::wrapColorAspectsIntoColorRange(ColorAspects::Range range) {
+    ColorRange res;
+    if (sRanges.map(range, &res)) {
+        return res;
+    } else if (!isValid(range)) {
+        return kColorRangeUnspecified;
+    } else {
+        CHECK(!isDefined(range));
+        // all platform values are in sRanges
+        return kColorRangeVendorStart + range;
+    }
+}
+
+//static
+status_t ColorUtils::unwrapColorAspectsFromColorRange(
+        int32_t range, ColorAspects::Range *aspect) {
+    if (sRanges.map((ColorRange)range, aspect)) {
+        return OK;
+    }
+
+    int32_t start = kColorRangeVendorStart;
+    int32_t numRanges = ColorAspects::RangeOther + 1; // 0x100
+    if (range >= start && range < start + numRanges) {
+        *aspect = (ColorAspects::Range)(range - start);
+        return OK;
+    }
+    *aspect = ColorAspects::RangeOther;
+    return BAD_VALUE;
+}
+
+static bool isValid(ColorAspects::Transfer t) {
+    return t <= ColorAspects::TransferOther;
+}
+
+static bool isDefined(ColorAspects::Transfer t) {
+    return t <= ColorAspects::TransferHLG
+            || (t >= ColorAspects::TransferSMPTE240M && t <= ColorAspects::TransferST428);
+}
+
+//  static
+int32_t ColorUtils::wrapColorAspectsIntoColorTransfer(
+        ColorAspects::Transfer transfer) {
+    ColorTransfer res;
+    if (sTransfers.map(transfer, &res)) {
+        return res;
+    } else if (!isValid(transfer)) {
+        return kColorTransferUnspecified;
+    } else if (isDefined(transfer)) {
+        return kColorTransferExtendedStart + transfer;
+    } else {
+        // all platform values are in sRanges
+        return kColorTransferVendorStart + transfer;
+    }
+}
+
+//static
+status_t ColorUtils::unwrapColorAspectsFromColorTransfer(
+        int32_t transfer, ColorAspects::Transfer *aspect) {
+    if (sTransfers.map((ColorTransfer)transfer, aspect)) {
+        return OK;
+    }
+
+    int32_t start = kColorTransferExtendedStart;
+    int32_t numTransfers = ColorAspects::TransferST428 + 1;
+    if (transfer >= (int32_t)kColorTransferVendorStart) {
+        start = kColorTransferVendorStart;
+        numTransfers = ColorAspects::TransferOther + 1; // 0x100
+    }
+    if (transfer >= start && transfer < start + numTransfers) {
+        *aspect = (ColorAspects::Transfer)(transfer - start);
+        return OK;
+    }
+    *aspect = ColorAspects::TransferOther;
+    return BAD_VALUE;
+}
+
+// static
+status_t ColorUtils::convertPlatformColorAspectsToCodecAspects(
+    int32_t range, int32_t standard, int32_t transfer, ColorAspects &aspects) {
+    status_t res1 = unwrapColorAspectsFromColorRange(range, &aspects.mRange);
+    status_t res2 = unwrapColorAspectsFromColorStandard(
+            standard, &aspects.mPrimaries, &aspects.mMatrixCoeffs);
+    status_t res3 = unwrapColorAspectsFromColorTransfer(transfer, &aspects.mTransfer);
+    return res1 != OK ? res1 : (res2 != OK ? res2 : res3);
+}
+
+// static
+status_t ColorUtils::convertCodecColorAspectsToPlatformAspects(
+    const ColorAspects &aspects, int32_t *range, int32_t *standard, int32_t *transfer) {
+    *range = wrapColorAspectsIntoColorRange(aspects.mRange);
+    *standard = wrapColorAspectsIntoColorStandard(aspects.mPrimaries, aspects.mMatrixCoeffs);
+    *transfer = wrapColorAspectsIntoColorTransfer(aspects.mTransfer);
+    if (isValid(aspects.mRange) && isValid(aspects.mPrimaries)
+            && isValid(aspects.mMatrixCoeffs) && isValid(aspects.mTransfer)) {
+        return OK;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+// static
+void ColorUtils::setDefaultPlatformColorAspectsIfNeeded(
+        int32_t &range, int32_t &standard, int32_t &transfer,
+        int32_t width, int32_t height) {
+    if (range == ColorUtils::kColorRangeUnspecified) {
+        range = ColorUtils::kColorRangeLimited;
+    }
+
+    if (standard == ColorUtils::kColorStandardUnspecified) {
+        // Default to BT2020, BT709 or BT601 based on size. Allow 2.35:1 aspect ratio. Limit BT601
+        // to PAL or smaller, BT2020 to 4K or larger, leaving BT709 for all resolutions in between.
+        if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
+            standard = ColorUtils::kColorStandardBT2020;
+        } else if ((width <= 720 && height > 480) || (height <= 720 && width > 480)) {
+            standard = ColorUtils::kColorStandardBT601_625;
+        } else if ((width <= 720 && height <= 480) || (height <= 720 && width <= 480)) {
+            standard = ColorUtils::kColorStandardBT601_525;
+        } else {
+            standard = ColorUtils::kColorStandardBT709;
+        }
+    }
+
+    if (transfer == ColorUtils::kColorTransferUnspecified) {
+        transfer = ColorUtils::kColorTransferSMPTE_170M;
+    }
+}
+
+// static
+void ColorUtils::setDefaultCodecColorAspectsIfNeeded(
+        ColorAspects &aspects, int32_t width, int32_t height) {
+    ColorAspects::MatrixCoeffs coeffs;
+    ColorAspects::Primaries primaries;
+
+    // Default to BT2020, BT709 or BT601 based on size. Allow 2.35:1 aspect ratio. Limit BT601
+    // to PAL or smaller, BT2020 to 4K or larger, leaving BT709 for all resolutions in between.
+    if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
+        primaries = ColorAspects::PrimariesBT2020;
+        coeffs = ColorAspects::MatrixBT2020;
+    } else if ((width <= 720 && height > 480 && height <= 576)
+            || (height <= 720 && width > 480 && width <= 576)) {
+        primaries = ColorAspects::PrimariesBT601_6_625;
+        coeffs = ColorAspects::MatrixBT601_6;
+    } else if ((width <= 720 && height <= 480) || (height <= 720 && width <= 480)) {
+        primaries = ColorAspects::PrimariesBT601_6_525;
+        coeffs = ColorAspects::MatrixBT601_6;
+    } else {
+        primaries = ColorAspects::PrimariesBT709_5;
+        coeffs = ColorAspects::MatrixBT709_5;
+    }
+
+    if (aspects.mRange == ColorAspects::RangeUnspecified) {
+        aspects.mRange = ColorAspects::RangeLimited;
+    }
+
+    if (aspects.mPrimaries == ColorAspects::PrimariesUnspecified) {
+        aspects.mPrimaries = primaries;
+    }
+    if (aspects.mMatrixCoeffs == ColorAspects::MatrixUnspecified) {
+        aspects.mMatrixCoeffs = coeffs;
+    }
+    if (aspects.mTransfer == ColorAspects::TransferUnspecified) {
+        aspects.mTransfer = ColorAspects::TransferSMPTE170M;
+    }
+}
+
+}  // namespace android
+
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 787f53f..cf7c8fc 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -50,15 +50,11 @@
 {
     // Devices are considered equal if they:
     // - are of the same type (a device type cannot be AUDIO_DEVICE_NONE)
-    // - have the same address or one device does not specify the address
-    // - have the same channel mask or one device does not specify the channel mask
+    // - have the same address
     if (other == 0) {
         return false;
     }
-    return (mDeviceType == other->mDeviceType) &&
-           (mAddress == "" || other->mAddress == "" || mAddress == other->mAddress) &&
-           (mChannelMask == 0 || other->mChannelMask == 0 ||
-                mChannelMask == other->mChannelMask);
+    return (mDeviceType == other->mDeviceType) && (mAddress == other->mAddress);
 }
 
 void DeviceVector::refreshTypes()
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index dd2a60a..b7c7879 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -310,12 +310,6 @@
         if (!deviceList.isEmpty()) {
             return deviceList.itemAt(0);
         }
-        deviceList = hwModule->getDeclaredDevices().getDevicesFromType(device);
-        if (!deviceList.isEmpty()) {
-            deviceList.itemAt(0)->setName(String8(device_name));
-            deviceList.itemAt(0)->mAddress = address;
-            return deviceList.itemAt(0);
-        }
     }
 
     sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device);
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index ed807c6..7f8ed1f 100755
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -142,6 +142,8 @@
 {
     const SwAudioOutputCollection &outputs = mPolicyEngine->mApmObserver->getOutputs();
 
+    //FIXME: getStrategyForUsage() should return STRATEGY_ACCESSIBILITY and getDeviceForStrategy()
+    // should be implemented accordingly for STRATEGY_ACCESSIBILITY
     if (usage == AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY &&
             (outputs.isStreamActive(AUDIO_STREAM_RING) ||
              outputs.isStreamActive(AUDIO_STREAM_ALARM))) {
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 1d2727e..f8ba3f2 100755
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -195,18 +195,9 @@
 
 routing_strategy Engine::getStrategyForUsage(audio_usage_t usage)
 {
-    const SwAudioOutputCollection &outputs = mApmObserver->getOutputs();
-
     // usage to strategy mapping
     switch (usage) {
     case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
-        if (outputs.isStreamActive(AUDIO_STREAM_RING) ||
-                outputs.isStreamActive(AUDIO_STREAM_ALARM)) {
-            return STRATEGY_SONIFICATION;
-        }
-        if (isInCall()) {
-            return STRATEGY_PHONE;
-        }
         return STRATEGY_ACCESSIBILITY;
 
     case AUDIO_USAGE_MEDIA:
@@ -245,6 +236,17 @@
 
     const SwAudioOutputCollection &outputs = mApmObserver->getOutputs();
 
+    return getDeviceForStrategyInt(strategy, (DeviceVector&)availableOutputDevices,
+                                   availableInputDevices, outputs);
+}
+
+
+
+audio_devices_t Engine::getDeviceForStrategyInt(routing_strategy strategy,
+                                                DeviceVector &availableOutputDevices,
+                                                const DeviceVector &availableInputDevices,
+                                                const SwAudioOutputCollection &outputs) const
+{
     uint32_t device = AUDIO_DEVICE_NONE;
     uint32_t availableOutputDevicesType = availableOutputDevices.types();
 
@@ -260,14 +262,16 @@
 
     case STRATEGY_SONIFICATION_RESPECTFUL:
         if (isInCall()) {
-            device = getDeviceForStrategy(STRATEGY_SONIFICATION);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
         } else if (outputs.isStreamActiveRemotely(AUDIO_STREAM_MUSIC,
                 SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
             // while media is playing on a remote device, use the the sonification behavior.
             // Note that we test this usecase before testing if media is playing because
             //   the isStreamActive() method only informs about the activity of a stream, not
             //   if it's for local playback. Note also that we use the same delay between both tests
-            device = getDeviceForStrategy(STRATEGY_SONIFICATION);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
             //user "safe" speaker if available instead of normal speaker to avoid triggering
             //other acoustic safety mechanisms for notification
             if ((device & AUDIO_DEVICE_OUT_SPEAKER) &&
@@ -275,12 +279,15 @@
                 device |= AUDIO_DEVICE_OUT_SPEAKER_SAFE;
                 device &= ~AUDIO_DEVICE_OUT_SPEAKER;
             }
-        } else if (outputs.isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
+        } else if (outputs.isStreamActive(
+                                AUDIO_STREAM_MUSIC, SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
             // while media is playing (or has recently played), use the same device
-            device = getDeviceForStrategy(STRATEGY_MEDIA);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_MEDIA, availableOutputDevices, availableInputDevices, outputs);
         } else {
             // when media is not playing anymore, fall back on the sonification behavior
-            device = getDeviceForStrategy(STRATEGY_SONIFICATION);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
             //user "safe" speaker if available instead of normal speaker to avoid triggering
             //other acoustic safety mechanisms for notification
             if ((device & AUDIO_DEVICE_OUT_SPEAKER) &&
@@ -294,7 +301,8 @@
     case STRATEGY_DTMF:
         if (!isInCall()) {
             // when off call, DTMF strategy follows the same rules as MEDIA strategy
-            device = getDeviceForStrategy(STRATEGY_MEDIA);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_MEDIA, availableOutputDevices, availableInputDevices, outputs);
             break;
         }
         // when in call, DTMF and PHONE strategies follow the same rules
@@ -321,8 +329,8 @@
                 availableOutputDevicesType = availPrimaryOutputDevices;
             }
         }
-        // for phone strategy, we first consider the forced use and then the available devices by order
-        // of priority
+        // for phone strategy, we first consider the forced use and then the available devices by
+        // order of priority
         switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) {
         case AUDIO_POLICY_FORCE_BT_SCO:
             if (!isInCall() || strategy != STRATEGY_DTMF) {
@@ -408,7 +416,8 @@
         // If incall, just select the STRATEGY_PHONE device: The rest of the behavior is handled by
         // handleIncallSonification().
         if (isInCall()) {
-            device = getDeviceForStrategy(STRATEGY_PHONE);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
             break;
         }
         // FALL THROUGH
@@ -429,7 +438,6 @@
         // The second device used for sonification is the same as the device used by media strategy
         // FALL THROUGH
 
-    // FIXME: STRATEGY_ACCESSIBILITY and STRATEGY_REROUTING follow STRATEGY_MEDIA for now
     case STRATEGY_ACCESSIBILITY:
         if (strategy == STRATEGY_ACCESSIBILITY) {
             // do not route accessibility prompts to a digital output currently configured with a
@@ -443,20 +451,35 @@
                     availableOutputDevicesType = availableOutputDevices.types() & ~devices;
                 }
             }
+            availableOutputDevices =
+                    availableOutputDevices.getDevicesFromType(availableOutputDevicesType);
+            if (outputs.isStreamActive(AUDIO_STREAM_RING) ||
+                    outputs.isStreamActive(AUDIO_STREAM_ALARM)) {
+                return getDeviceForStrategyInt(
+                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
+            }
+            if (isInCall()) {
+                return getDeviceForStrategyInt(
+                        STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
+            }
         }
+        // For other cases, STRATEGY_ACCESSIBILITY behaves like STRATEGY_MEDIA
         // FALL THROUGH
 
+    // FIXME: STRATEGY_REROUTING follow STRATEGY_MEDIA for now
     case STRATEGY_REROUTING:
     case STRATEGY_MEDIA: {
         uint32_t device2 = AUDIO_DEVICE_NONE;
         if (strategy != STRATEGY_SONIFICATION) {
             // no sonification on remote submix (e.g. WFD)
-            if (availableOutputDevices.getDevice(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, String8("0")) != 0) {
+            if (availableOutputDevices.getDevice(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                                 String8("0")) != 0) {
                 device2 = availableOutputDevices.types() & AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
             }
         }
         if (isInCall() && (strategy == STRATEGY_MEDIA)) {
-            device = getDeviceForStrategy(STRATEGY_PHONE);
+            device = getDeviceForStrategyInt(
+                    STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
             break;
         }
         if ((device2 == AUDIO_DEVICE_NONE) &&
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 8b6eaf6..ed93d1c 100755
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -125,6 +125,10 @@
     routing_strategy getStrategyForStream(audio_stream_type_t stream);
     routing_strategy getStrategyForUsage(audio_usage_t usage);
     audio_devices_t getDeviceForStrategy(routing_strategy strategy) const;
+    audio_devices_t getDeviceForStrategyInt(routing_strategy strategy,
+                                            DeviceVector &availableOutputDevices,
+                                            const DeviceVector &availableInputDevices,
+                                            const SwAudioOutputCollection &outputs) const;
     audio_devices_t getDeviceForInputSource(audio_source_t inputSource) const;
     audio_mode_t mPhoneState;  /**< current phone state. */
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index a7b90f4..21107a1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1794,8 +1794,15 @@
 {
     ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax);
     mVolumeCurves->initStreamVolume(stream, indexMin, indexMax);
-    if (stream == AUDIO_STREAM_MUSIC) {
-        mVolumeCurves->initStreamVolume(AUDIO_STREAM_ACCESSIBILITY, indexMin, indexMax);
+
+    // initialize other private stream volumes which follow this one
+    routing_strategy strategy = getStrategy(stream);
+    for (int curStream = 0; curStream < AUDIO_STREAM_CNT; curStream++) {
+        routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
+        if (!strategiesMatchForvolume(strategy, curStrategy)) {
+            continue;
+        }
+        mVolumeCurves->initStreamVolume((audio_stream_type_t)curStream, indexMin, indexMax);
     }
 }
 
@@ -1823,38 +1830,43 @@
     if (device == AUDIO_DEVICE_OUT_DEFAULT) {
         mVolumeCurves->clearCurrentVolumeIndex(stream);
     }
-    mVolumeCurves->addCurrentVolumeIndex(stream, device, index);
+
+    // update other private stream volumes which follow this one
+    routing_strategy strategy = getStrategy(stream);
+    for (int curStream = 0; curStream < AUDIO_STREAM_CNT; curStream++) {
+        routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
+        if (!strategiesMatchForvolume(strategy, curStrategy)) {
+            continue;
+        }
+        mVolumeCurves->addCurrentVolumeIndex((audio_stream_type_t)curStream, device, index);
+    }
 
     // update volume on all outputs whose current device is also selected by the same
     // strategy as the device specified by the caller
-    audio_devices_t selectedDevices = getDeviceForStrategy(getStrategy(stream), true /*fromCache*/);
-    // it is possible that the requested device is not selected by the strategy (e.g an explicit
-    // audio patch is active causing getDevicesForStream() to return this device. We must make
-    // sure that the device passed is part of the devices considered when applying volume below.
-    selectedDevices |= device;
-
-    //FIXME: AUDIO_STREAM_ACCESSIBILITY volume follows AUDIO_STREAM_MUSIC for now
-    audio_devices_t accessibilityDevice = AUDIO_DEVICE_NONE;
-    if (stream == AUDIO_STREAM_MUSIC) {
-        mVolumeCurves->addCurrentVolumeIndex(AUDIO_STREAM_ACCESSIBILITY, device, index);
-        accessibilityDevice = getDeviceForStrategy(STRATEGY_ACCESSIBILITY, true /*fromCache*/);
-    }
-
     status_t status = NO_ERROR;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         audio_devices_t curDevice = Volume::getDeviceForVolume(desc->device());
-        if ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & selectedDevices) != 0)) {
-            status_t volStatus = checkAndSetVolume(stream, index, desc, curDevice);
-            if (volStatus != NO_ERROR) {
-                status = volStatus;
+        for (int curStream = 0; curStream < AUDIO_STREAM_CNT; curStream++) {
+            routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
+            if (!strategiesMatchForvolume(strategy, curStrategy)) {
+                continue;
             }
-        }
-        if ((accessibilityDevice != AUDIO_DEVICE_NONE) &&
-                ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & accessibilityDevice) != 0)))
-        {
-            status_t volStatus = checkAndSetVolume(AUDIO_STREAM_ACCESSIBILITY,
-                                                   index, desc, curDevice);
+            audio_devices_t curStreamDevice = getDeviceForStrategy(curStrategy, true /*fromCache*/);
+            // it is possible that the requested device is not selected by the strategy
+            // (e.g an explicit audio patch is active causing getDevicesForStream()
+            // to return this device. We must make sure that the device passed is part of the
+            // devices considered when applying volume below.
+            curStreamDevice |= device;
+
+            if (((device == AUDIO_DEVICE_OUT_DEFAULT) ||
+                    ((curDevice & curStreamDevice) != 0))) {
+                status_t volStatus =
+                        checkAndSetVolume((audio_stream_type_t)curStream, index, desc, curDevice);
+                if (volStatus != NO_ERROR) {
+                    status = volStatus;
+                }
+            }
         }
     }
     return status;
@@ -1957,7 +1969,17 @@
 
 bool AudioPolicyManager::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
 {
-    return mOutputs.isStreamActive(stream, inPastMs);
+    bool active = false;
+    routing_strategy strategy = getStrategy(stream);
+    for (int curStream = 0; curStream < AUDIO_STREAM_CNT && !active; curStream++) {
+        routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
+        if (!strategiesMatchForvolume(strategy, curStrategy)) {
+            continue;
+        }
+        active = mOutputs.isStreamActive((audio_stream_type_t)curStream, inPastMs);
+    }
+
+    return active;
 }
 
 bool AudioPolicyManager::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
@@ -2838,7 +2860,7 @@
     disconnectAudioSource(sourceDesc);
 
     routing_strategy strategy = (routing_strategy) getStrategyForAttr(&sourceDesc->mAttributes);
-    audio_stream_type_t stream = audio_attributes_to_stream_type(&sourceDesc->mAttributes);
+    audio_stream_type_t stream = streamTypefromAttributesInt(&sourceDesc->mAttributes);
     sp<DeviceDescriptor> srcDeviceDesc = sourceDesc->mDevice;
 
     audio_devices_t sinkDevice = getDeviceForStrategy(strategy, true);
@@ -2971,7 +2993,7 @@
     }
     removeAudioPatch(sourceDesc->mPatchDesc->mHandle);
 
-    audio_stream_type_t stream = audio_attributes_to_stream_type(&sourceDesc->mAttributes);
+    audio_stream_type_t stream = streamTypefromAttributesInt(&sourceDesc->mAttributes);
     sp<SwAudioOutputDescriptor> swOutputDesc = sourceDesc->mSwOutput.promote();
     if (swOutputDesc != 0) {
         stopSource(swOutputDesc, stream, false);
@@ -4170,10 +4192,10 @@
     //      use device for strategy phone
     // 3: the strategy for enforced audible is active but not enforced on the output:
     //      use the device for strategy enforced audible
-    // 4: the strategy accessibility is active on the output:
-    //      use device for strategy accessibility
-    // 5: the strategy sonification is active on the output:
+    // 4: the strategy sonification is active on the output:
     //      use device for strategy sonification
+    // 5: the strategy accessibility is active on the output:
+    //      use device for strategy accessibility
     // 6: the strategy "respectful" sonification is active on the output:
     //      use device for strategy "respectful" sonification
     // 7: the strategy media is active on the output:
@@ -4190,10 +4212,10 @@
         device = getDeviceForStrategy(STRATEGY_PHONE, fromCache);
     } else if (isStrategyActive(outputDesc, STRATEGY_ENFORCED_AUDIBLE)) {
         device = getDeviceForStrategy(STRATEGY_ENFORCED_AUDIBLE, fromCache);
-    } else if (isStrategyActive(outputDesc, STRATEGY_ACCESSIBILITY)) {
-        device = getDeviceForStrategy(STRATEGY_ACCESSIBILITY, fromCache);
     } else if (isStrategyActive(outputDesc, STRATEGY_SONIFICATION)) {
         device = getDeviceForStrategy(STRATEGY_SONIFICATION, fromCache);
+    } else if (isStrategyActive(outputDesc, STRATEGY_ACCESSIBILITY)) {
+        device = getDeviceForStrategy(STRATEGY_ACCESSIBILITY, fromCache);
     } else if (isStrategyActive(outputDesc, STRATEGY_SONIFICATION_RESPECTFUL)) {
         device = getDeviceForStrategy(STRATEGY_SONIFICATION_RESPECTFUL, fromCache);
     } else if (isStrategyActive(outputDesc, STRATEGY_MEDIA)) {
@@ -4229,6 +4251,13 @@
     return device;
 }
 
+bool AudioPolicyManager::strategiesMatchForvolume(routing_strategy strategy1,
+                                                  routing_strategy strategy2) {
+    return ((strategy1 == strategy2) ||
+            ((strategy1 == STRATEGY_ACCESSIBILITY) && (strategy2 == STRATEGY_MEDIA)) ||
+            ((strategy1 == STRATEGY_MEDIA) && (strategy2 == STRATEGY_ACCESSIBILITY)));
+}
+
 uint32_t AudioPolicyManager::getStrategyForStream(audio_stream_type_t stream) {
     return (uint32_t)getStrategy(stream);
 }
@@ -4240,16 +4269,22 @@
     if (stream < (audio_stream_type_t) 0 || stream >= AUDIO_STREAM_PUBLIC_CNT) {
         return AUDIO_DEVICE_NONE;
     }
-    audio_devices_t devices;
+    audio_devices_t devices = AUDIO_DEVICE_NONE;
     routing_strategy strategy = getStrategy(stream);
-    devices = getDeviceForStrategy(strategy, true /*fromCache*/);
-    SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(devices, mOutputs);
-    for (size_t i = 0; i < outputs.size(); i++) {
-        sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputs[i]);
-        if (isStrategyActive(outputDesc, strategy)) {
-            devices = outputDesc->device();
-            break;
+    for (int curStrategy = 0; curStrategy < NUM_STRATEGIES; curStrategy++) {
+        if (!strategiesMatchForvolume(strategy, (routing_strategy)curStrategy)) {
+            continue;
         }
+        audio_devices_t curDevices =
+                getDeviceForStrategy((routing_strategy)curStrategy, true /*fromCache*/);
+        SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(curDevices, mOutputs);
+        for (size_t i = 0; i < outputs.size(); i++) {
+            sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputs[i]);
+            if (isStrategyActive(outputDesc, (routing_strategy)curStrategy)) {
+                curDevices |= outputDesc->device();
+            }
+        }
+        devices |= curDevices;
     }
 
     /*Filter SPEAKER_SAFE out of results, as AudioService doesn't know about it
@@ -4361,15 +4396,8 @@
     // the device = the device from the descriptor in the RouteMap, and exit.
     for (size_t routeIndex = 0; routeIndex < mOutputRoutes.size(); routeIndex++) {
         sp<SessionRoute> route = mOutputRoutes.valueAt(routeIndex);
-        routing_strategy strat = getStrategy(route->mStreamType);
-        // Special case for accessibility strategy which must follow any strategy it is
-        // currently remapped to
-        bool strategyMatch = (strat == strategy) ||
-                             ((strategy == STRATEGY_ACCESSIBILITY) &&
-                              ((mEngine->getStrategyForUsage(
-                                      AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY) == strat) ||
-                               (strat == STRATEGY_MEDIA)));
-        if (strategyMatch && route->isActive()) {
+        routing_strategy routeStrategy = getStrategy(route->mStreamType);
+        if ((routeStrategy == strategy) && route->isActive()) {
             return route->mDeviceDescriptor->type();
         }
     }
@@ -5007,15 +5035,6 @@
     case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
         return AUDIO_STREAM_MUSIC;
     case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
-        if (isStreamActive(AUDIO_STREAM_ALARM)) {
-            return AUDIO_STREAM_ALARM;
-        }
-        if (isStreamActive(AUDIO_STREAM_RING)) {
-            return AUDIO_STREAM_RING;
-        }
-        if (isInCall()) {
-            return AUDIO_STREAM_VOICE_CALL;
-        }
         return AUDIO_STREAM_ACCESSIBILITY;
     case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
         return AUDIO_STREAM_SYSTEM;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 38700fc..d6e48ab 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -506,6 +506,9 @@
         void clearAudioSources(uid_t uid);
 
 
+        static bool strategiesMatchForvolume(routing_strategy strategy1,
+                                             routing_strategy strategy2);
+
         uid_t mUidCached;
         AudioPolicyClientInterface *mpClientInterface;  // audio policy client interface
         sp<SwAudioOutputDescriptor> mPrimaryOutput;     // primary output descriptor
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 6490682..4d12015 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -53,7 +53,13 @@
         // Check if lens is fixed-focus
         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
             m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
+        } else {
+            m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
         }
+        m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+        m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
+        m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+        m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
     }
 }
 
@@ -253,80 +259,99 @@
     if (frameNumber <= mLast3AFrameNumber) {
         ALOGV("%s: Already sent 3A for frame number %d, skipping",
                 __FUNCTION__, frameNumber);
+
+        // Remove the entry if there is one for this frame number in mPending3AStates.
+        mPending3AStates.removeItem(frameNumber);
         return OK;
     }
 
-    mLast3AFrameNumber = frameNumber;
+    AlgState pendingState;
 
-    // Get 3A states from result metadata
+    ssize_t index = mPending3AStates.indexOfKey(frameNumber);
+    if (index != NAME_NOT_FOUND) {
+        pendingState = mPending3AStates.valueAt(index);
+    }
+
+    // Update 3A states from the result.
     bool gotAllStates = true;
 
-    AlgState new3aState;
-
     // TODO: Also use AE mode, AE trigger ID
+    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
+            &pendingState.afMode, frameNumber, cameraId);
 
-    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
-            &new3aState.afMode, frameNumber, cameraId);
+    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
+            &pendingState.awbMode, frameNumber, cameraId);
 
-    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
-            &new3aState.awbMode, frameNumber, cameraId);
+    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
+            &pendingState.aeState, frameNumber, cameraId);
 
-    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
-            &new3aState.aeState, frameNumber, cameraId);
+    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
+            &pendingState.afState, frameNumber, cameraId);
 
-    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
-            &new3aState.afState, frameNumber, cameraId);
-
-    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
-            &new3aState.awbState, frameNumber, cameraId);
+    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
+            &pendingState.awbState, frameNumber, cameraId);
 
     if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
-        new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
-        new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
+        pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
+        pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
     } else {
-        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
-                 &new3aState.afTriggerId, frameNumber, cameraId);
+        gotAllStates &= updatePendingState<int32_t>(metadata,
+                ANDROID_CONTROL_AF_TRIGGER_ID, &pendingState.afTriggerId, frameNumber, cameraId);
 
-        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
-                 &new3aState.aeTriggerId, frameNumber, cameraId);
+        gotAllStates &= updatePendingState<int32_t>(metadata,
+            ANDROID_CONTROL_AE_PRECAPTURE_ID, &pendingState.aeTriggerId, frameNumber, cameraId);
     }
 
-    if (!gotAllStates) return BAD_VALUE;
+    if (!gotAllStates) {
+        // If not all states are received, put the pending state to mPending3AStates.
+        if (index == NAME_NOT_FOUND) {
+            mPending3AStates.add(frameNumber, pendingState);
+        } else {
+            mPending3AStates.replaceValueAt(index, pendingState);
+        }
+        return NOT_ENOUGH_DATA;
+    }
 
-    if (new3aState.aeState != m3aState.aeState) {
+    // Once all 3A states are received, notify the client about 3A changes.
+    if (pendingState.aeState != m3aState.aeState) {
         ALOGV("%s: Camera %d: AE state %d->%d",
                 __FUNCTION__, cameraId,
-                m3aState.aeState, new3aState.aeState);
-        client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
+                m3aState.aeState, pendingState.aeState);
+        client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
     }
 
-    if (new3aState.afState != m3aState.afState ||
-        new3aState.afMode != m3aState.afMode ||
-        new3aState.afTriggerId != m3aState.afTriggerId) {
+    if (pendingState.afState != m3aState.afState ||
+        pendingState.afMode != m3aState.afMode ||
+        pendingState.afTriggerId != m3aState.afTriggerId) {
         ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
                 __FUNCTION__, cameraId,
-                m3aState.afState, new3aState.afState,
-                m3aState.afMode, new3aState.afMode,
-                m3aState.afTriggerId, new3aState.afTriggerId);
-        client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
+                m3aState.afState, pendingState.afState,
+                m3aState.afMode, pendingState.afMode,
+                m3aState.afTriggerId, pendingState.afTriggerId);
+        client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
     }
-    if (new3aState.awbState != m3aState.awbState ||
-        new3aState.awbMode != m3aState.awbMode) {
+    if (pendingState.awbState != m3aState.awbState ||
+        pendingState.awbMode != m3aState.awbMode) {
         ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
                 __FUNCTION__, cameraId,
-                m3aState.awbState, new3aState.awbState,
-                m3aState.awbMode, new3aState.awbMode);
-        client->notifyAutoWhitebalance(new3aState.awbState,
-                new3aState.aeTriggerId);
+                m3aState.awbState, pendingState.awbState,
+                m3aState.awbMode, pendingState.awbMode);
+        client->notifyAutoWhitebalance(pendingState.awbState,
+                pendingState.aeTriggerId);
     }
 
-    m3aState = new3aState;
+    if (index != NAME_NOT_FOUND) {
+        mPending3AStates.removeItemsAt(index);
+    }
+
+    m3aState = pendingState;
+    mLast3AFrameNumber = frameNumber;
 
     return OK;
 }
 
 template<typename Src, typename T>
-bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
+bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
         T* value, int32_t frameNumber, int cameraId) {
     camera_metadata_ro_entry_t entry;
     if (value == NULL) {
@@ -335,9 +360,14 @@
         return false;
     }
 
+    // Already got the value for this tag.
+    if (*value != static_cast<T>(NOT_SET)) {
+        return true;
+    }
+
     entry = result.find(tag);
     if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
+        ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
                 __FUNCTION__, cameraId,
                 get_camera_metadata_tag_name(tag), frameNumber);
         return false;
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 68cf55b..a5b81a7 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -43,6 +43,8 @@
     ~FrameProcessor();
 
   private:
+    static const int32_t NOT_SET = -1;
+
     wp<Camera2Client> mClient;
 
     bool mSynthesize3ANotify;
@@ -63,7 +65,7 @@
 
     // Helper for process3aState
     template<typename Src, typename T>
-    bool get3aResult(const CameraMetadata& result, int32_t tag, T* value,
+    bool updatePendingState(const CameraMetadata& result, int32_t tag, T* value,
             int32_t frameNumber, int cameraId);
 
 
@@ -81,15 +83,20 @@
 
         // These defaults need to match those in Parameters.cpp
         AlgState() :
-                afMode(ANDROID_CONTROL_AF_MODE_AUTO),
-                awbMode(ANDROID_CONTROL_AWB_MODE_AUTO),
-                aeState(ANDROID_CONTROL_AE_STATE_INACTIVE),
-                afState(ANDROID_CONTROL_AF_STATE_INACTIVE),
-                awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE),
-                afTriggerId(0),
-                aeTriggerId(0) {
+                afMode((camera_metadata_enum_android_control_af_mode)NOT_SET),
+                awbMode((camera_metadata_enum_android_control_awb_mode)NOT_SET),
+                aeState((camera_metadata_enum_android_control_ae_state)NOT_SET),
+                afState((camera_metadata_enum_android_control_af_state)NOT_SET),
+                awbState((camera_metadata_enum_android_control_awb_state)NOT_SET),
+                afTriggerId(NOT_SET),
+                aeTriggerId(NOT_SET) {
         }
-    } m3aState;
+    };
+
+    AlgState m3aState;
+
+    // frame number -> pending 3A states that not all data are received yet.
+    KeyedVector<int32_t, AlgState> mPending3AStates;
 
     // Whether the partial result is enabled for this device
     bool mUsePartialResult;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 0692e5a..316cfda 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -710,6 +710,11 @@
         request != NULL) {
 
         request->swap(metadata);
+    } else if (err == BAD_VALUE) {
+        res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Camera %d: Template ID %d is invalid or not supported: %s (%d)",
+                mCameraId, templateId, strerror(-err), err);
+
     } else {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %d: Error creating default request for template %d: %s (%d)",
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index ee84ff0..c516a72 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -534,9 +534,11 @@
         mOutputStreams[i]->dump(fd,args);
     }
 
-    lines = String8("    Camera3 Buffer Manager:\n");
-    write(fd, lines.string(), lines.size());
-    mBufferManager->dump(fd, args);
+    if (mBufferManager != NULL) {
+        lines = String8("    Camera3 Buffer Manager:\n");
+        write(fd, lines.string(), lines.size());
+        mBufferManager->dump(fd, args);
+    }
 
     lines = String8("    In-flight requests:\n");
     if (mInFlightMap.size() == 0) {
@@ -1237,6 +1239,13 @@
         CameraMetadata *request) {
     ATRACE_CALL();
     ALOGV("%s: for template %d", __FUNCTION__, templateId);
+
+    if (templateId <= 0 || templateId >= CAMERA3_TEMPLATE_COUNT) {
+        android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26866110",
+                IPCThreadState::self()->getCallingUid(), nullptr, 0);
+        return BAD_VALUE;
+    }
+
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -2049,176 +2058,6 @@
     return OK;
 }
 
-/**
- * Check if all 3A fields are ready, and send off a partial 3A-only result
- * to the output frame queue
- */
-bool Camera3Device::processPartial3AResult(
-        uint32_t frameNumber,
-        const CameraMetadata& partial, const CaptureResultExtras& resultExtras) {
-
-    // Check if all 3A states are present
-    // The full list of fields is
-    //   android.control.afMode
-    //   android.control.awbMode
-    //   android.control.aeState
-    //   android.control.awbState
-    //   android.control.afState
-    //   android.control.afTriggerID
-    //   android.control.aePrecaptureID
-    // TODO: Add android.control.aeMode
-
-    bool gotAllStates = true;
-
-    uint8_t afMode;
-    uint8_t awbMode;
-    uint8_t aeState;
-    uint8_t afState;
-    uint8_t awbState;
-
-    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_MODE,
-        &afMode, frameNumber);
-
-    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_MODE,
-        &awbMode, frameNumber);
-
-    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_STATE,
-        &aeState, frameNumber);
-
-    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_STATE,
-        &afState, frameNumber);
-
-    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_STATE,
-        &awbState, frameNumber);
-
-    if (!gotAllStates) return false;
-
-    ALOGVV("%s: Camera %d: Frame %d, Request ID %d: AF mode %d, AWB mode %d, "
-        "AF state %d, AE state %d, AWB state %d, "
-        "AF trigger %d, AE precapture trigger %d",
-        __FUNCTION__, mId, frameNumber, resultExtras.requestId,
-        afMode, awbMode,
-        afState, aeState, awbState,
-        resultExtras.afTriggerId, resultExtras.precaptureTriggerId);
-
-    // Got all states, so construct a minimal result to send
-    // In addition to the above fields, this means adding in
-    //   android.request.frameCount
-    //   android.request.requestId
-    //   android.quirks.partialResult (for HAL version below HAL3.2)
-
-    const size_t kMinimal3AResultEntries = 10;
-
-    Mutex::Autolock l(mOutputLock);
-
-    CaptureResult captureResult;
-    captureResult.mResultExtras = resultExtras;
-    captureResult.mMetadata = CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0);
-    // TODO: change this to sp<CaptureResult>. This will need other changes, including,
-    // but not limited to CameraDeviceBase::getNextResult
-    CaptureResult& min3AResult =
-            *mResultQueue.insert(mResultQueue.end(), captureResult);
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_REQUEST_FRAME_COUNT,
-            // TODO: This is problematic casting. Need to fix CameraMetadata.
-            reinterpret_cast<int32_t*>(&frameNumber), frameNumber)) {
-        return false;
-    }
-
-    int32_t requestId = resultExtras.requestId;
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_REQUEST_ID,
-            &requestId, frameNumber)) {
-        return false;
-    }
-
-    if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
-        static const uint8_t partialResult = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
-        if (!insert3AResult(min3AResult.mMetadata, ANDROID_QUIRKS_PARTIAL_RESULT,
-                &partialResult, frameNumber)) {
-            return false;
-        }
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_MODE,
-            &afMode, frameNumber)) {
-        return false;
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AWB_MODE,
-            &awbMode, frameNumber)) {
-        return false;
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AE_STATE,
-            &aeState, frameNumber)) {
-        return false;
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_STATE,
-            &afState, frameNumber)) {
-        return false;
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AWB_STATE,
-            &awbState, frameNumber)) {
-        return false;
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_TRIGGER_ID,
-            &resultExtras.afTriggerId, frameNumber)) {
-        return false;
-    }
-
-    if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
-            &resultExtras.precaptureTriggerId, frameNumber)) {
-        return false;
-    }
-
-    // We only send the aggregated partial when all 3A related metadata are available
-    // For both API1 and API2.
-    // TODO: we probably should pass through all partials to API2 unconditionally.
-    mResultSignal.signal();
-
-    return true;
-}
-
-template<typename T>
-bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
-        T* value, uint32_t frameNumber) {
-    (void) frameNumber;
-
-    camera_metadata_ro_entry_t entry;
-
-    entry = result.find(tag);
-    if (entry.count == 0) {
-        ALOGVV("%s: Camera %d: Frame %d: No %s provided by HAL!", __FUNCTION__,
-            mId, frameNumber, get_camera_metadata_tag_name(tag));
-        return false;
-    }
-
-    if (sizeof(T) == sizeof(uint8_t)) {
-        *value = entry.data.u8[0];
-    } else if (sizeof(T) == sizeof(int32_t)) {
-        *value = entry.data.i32[0];
-    } else {
-        ALOGE("%s: Unexpected type", __FUNCTION__);
-        return false;
-    }
-    return true;
-}
-
-template<typename T>
-bool Camera3Device::insert3AResult(CameraMetadata& result, int32_t tag,
-        const T* value, uint32_t frameNumber) {
-    if (result.update(tag, value, 1) != NO_ERROR) {
-        mResultQueue.erase(--mResultQueue.end(), mResultQueue.end());
-        SET_ERR("Frame %d: Failed to set %s in partial metadata",
-                frameNumber, get_camera_metadata_tag_name(tag));
-        return false;
-    }
-    return true;
-}
-
 void Camera3Device::returnOutputBuffers(
         const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
         nsecs_t timestamp) {
@@ -2286,6 +2125,48 @@
     }
 }
 
+void Camera3Device::insertResultLocked(CaptureResult *result, uint32_t frameNumber,
+            const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+    if (result == nullptr) return;
+
+    if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+            (int32_t*)&frameNumber, 1) != OK) {
+        SET_ERR("Failed to set frame number %d in metadata", frameNumber);
+        return;
+    }
+
+    if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
+        SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
+        return;
+    }
+
+    overrideResultForPrecaptureCancel(&result->mMetadata, aeTriggerCancelOverride);
+
+    // Valid result, insert into queue
+    List<CaptureResult>::iterator queuedResult =
+            mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
+    ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+           ", burstId = %" PRId32, __FUNCTION__,
+           queuedResult->mResultExtras.requestId,
+           queuedResult->mResultExtras.frameNumber,
+           queuedResult->mResultExtras.burstId);
+
+    mResultSignal.signal();
+}
+
+
+void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
+        const CaptureResultExtras &resultExtras, uint32_t frameNumber,
+        const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+    Mutex::Autolock l(mOutputLock);
+
+    CaptureResult captureResult;
+    captureResult.mResultExtras = resultExtras;
+    captureResult.mMetadata = partialResult;
+
+    insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+}
+
 
 void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
         CaptureResultExtras &resultExtras,
@@ -2321,16 +2202,6 @@
     captureResult.mResultExtras = resultExtras;
     captureResult.mMetadata = pendingMetadata;
 
-    if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
-            (int32_t*)&frameNumber, 1) != OK) {
-        SET_ERR("Failed to set frame# in metadata (%d)",
-                frameNumber);
-        return;
-    } else {
-        ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
-                __FUNCTION__, mId, frameNumber);
-    }
-
     // Append any previous partials to form a complete result
     if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
         captureResult.mMetadata.append(collectedPartialResult);
@@ -2339,26 +2210,14 @@
     captureResult.mMetadata.sort();
 
     // Check that there's a timestamp in the result metadata
-    camera_metadata_entry entry =
-            captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+    camera_metadata_entry entry = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
     if (entry.count == 0) {
         SET_ERR("No timestamp provided by HAL for frame %d!",
                 frameNumber);
         return;
     }
 
-    overrideResultForPrecaptureCancel(&captureResult.mMetadata, aeTriggerCancelOverride);
-
-    // Valid result, insert into queue
-    List<CaptureResult>::iterator queuedResult =
-            mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
-    ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
-           ", burstId = %" PRId32, __FUNCTION__,
-           queuedResult->mResultExtras.requestId,
-           queuedResult->mResultExtras.frameNumber,
-           queuedResult->mResultExtras.burstId);
-
-    mResultSignal.signal();
+    insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
 }
 
 /**
@@ -2435,7 +2294,7 @@
                 }
                 isPartialResult = (result->partial_result < mNumPartialResults);
                 if (isPartialResult) {
-                    request.partialResult.collectedResult.append(result->result);
+                    request.collectedPartialResult.append(result->result);
                 }
             } else {
                 camera_metadata_ro_entry_t partialResultEntry;
@@ -2448,21 +2307,17 @@
                     // A partial result. Flag this as such, and collect this
                     // set of metadata into the in-flight entry.
                     isPartialResult = true;
-                    request.partialResult.collectedResult.append(
+                    request.collectedPartialResult.append(
                         result->result);
-                    request.partialResult.collectedResult.erase(
+                    request.collectedPartialResult.erase(
                         ANDROID_QUIRKS_PARTIAL_RESULT);
                 }
             }
 
             if (isPartialResult) {
-                // Fire off a 3A-only result if possible
-                if (!request.partialResult.haveSent3A) {
-                    request.partialResult.haveSent3A =
-                            processPartial3AResult(frameNumber,
-                                    request.partialResult.collectedResult,
-                                    request.resultExtras);
-                }
+                // Send partial capture result
+                sendPartialCaptureResult(result->result, request.resultExtras, frameNumber,
+                        request.aeTriggerCancelOverride);
             }
         }
 
@@ -2477,9 +2332,9 @@
                 return;
             }
             if (mUsePartialResult &&
-                    !request.partialResult.collectedResult.isEmpty()) {
+                    !request.collectedPartialResult.isEmpty()) {
                 collectedPartialResult.acquire(
-                    request.partialResult.collectedResult);
+                    request.collectedPartialResult);
             }
             request.haveResultMetadata = true;
         }
@@ -2522,7 +2377,7 @@
         if (result->result != NULL && !isPartialResult) {
             if (shutterTimestamp == 0) {
                 request.pendingMetadata = result->result;
-                request.partialResult.collectedResult = collectedPartialResult;
+                request.collectedPartialResult = collectedPartialResult;
             } else {
                 CameraMetadata metadata;
                 metadata = result->result;
@@ -2700,7 +2555,7 @@
 
             // send pending result and buffers
             sendCaptureResult(r.pendingMetadata, r.resultExtras,
-                r.partialResult.collectedResult, msg.frame_number,
+                r.collectedPartialResult, msg.frame_number,
                 r.hasInputBuffer, r.aeTriggerCancelOverride);
             returnOutputBuffers(r.pendingOutputBuffers.array(),
                 r.pendingOutputBuffers.size(), r.shutterTimestamp);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index bee69ee..5b1c87e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -648,6 +648,10 @@
         // receives the shutter event.
         CameraMetadata pendingMetadata;
 
+        // The metadata of the partial results that framework receives from HAL so far
+        // and has sent out.
+        CameraMetadata collectedPartialResult;
+
         // Buffers are added by process_capture_result when output buffers
         // return from HAL but framework has not yet received the shutter
         // event. They will be returned to the streams when framework receives
@@ -658,19 +662,6 @@
         // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
         AeTriggerCancelOverride_t aeTriggerCancelOverride;
 
-
-        // Fields used by the partial result only
-        struct PartialResultInFlight {
-            // Set by process_capture_result once 3A has been sent to clients
-            bool    haveSent3A;
-            // Result metadata collected so far, when partial results are in use
-            CameraMetadata collectedResult;
-
-            PartialResultInFlight():
-                    haveSent3A(false) {
-            }
-        } partialResult;
-
         // Default constructor needed by KeyedVector
         InFlightRequest() :
                 shutterTimestamp(0),
@@ -706,23 +697,6 @@
             const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
 
     /**
-     * For the partial result, check if all 3A state fields are available
-     * and if so, queue up 3A-only result to the client. Returns true if 3A
-     * is sent.
-     */
-    bool processPartial3AResult(uint32_t frameNumber,
-            const CameraMetadata& partial, const CaptureResultExtras& resultExtras);
-
-    // Helpers for reading and writing 3A metadata into to/from partial results
-    template<typename T>
-    bool get3AResult(const CameraMetadata& result, int32_t tag,
-            T* value, uint32_t frameNumber);
-
-    template<typename T>
-    bool insert3AResult(CameraMetadata &result, int32_t tag, const T* value,
-            uint32_t frameNumber);
-
-    /**
      * Override result metadata for cancelling AE precapture trigger applied in
      * handleAePrecaptureCancelRequest().
      */
@@ -820,13 +794,24 @@
     void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
             size_t numBuffers, nsecs_t timestamp);
 
-    // Insert the capture result given the pending metadata, result extras,
+    // Send a partial capture result.
+    void sendPartialCaptureResult(const camera_metadata_t * partialResult,
+            const CaptureResultExtras &resultExtras, uint32_t frameNumber,
+            const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+
+    // Send a total capture result given the pending metadata and result extras,
     // partial results, and the frame number to the result queue.
     void sendCaptureResult(CameraMetadata &pendingMetadata,
             CaptureResultExtras &resultExtras,
             CameraMetadata &collectedPartialResult, uint32_t frameNumber,
             bool reprocess, const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
 
+    // Insert the result to the result queue after updating frame number and overriding AE
+    // trigger cancel.
+    // mOutputLock must be held when calling this function.
+    void insertResultLocked(CaptureResult *result, uint32_t frameNumber,
+            const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+
     /**** Scope for mInFlightLock ****/
 
     // Remove the in-flight request of the given index from mInFlightMap
diff --git a/services/mediacodec/minijail/seccomp_policy/mediacodec-seccomp-arm.policy b/services/mediacodec/minijail/seccomp_policy/mediacodec-seccomp-arm.policy
index 75fb9c8..4be96d5 100644
--- a/services/mediacodec/minijail/seccomp_policy/mediacodec-seccomp-arm.policy
+++ b/services/mediacodec/minijail/seccomp_policy/mediacodec-seccomp-arm.policy
@@ -39,6 +39,7 @@
 sched_get_priority_min: 1
 statfs64: 1
 sched_setscheduler: 1
+fstatat64: 1
 
 # for attaching to debuggerd on process crash
 sigaction: 1
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 3d4e0b5..e1235b8 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -43,7 +43,7 @@
     return itemsStr;
 }
 
-static bool hasResourceType(String8 type, Vector<MediaResource> resources) {
+static bool hasResourceType(MediaResource::Type type, Vector<MediaResource> resources) {
     for (size_t i = 0; i < resources.size(); ++i) {
         if (resources[i].mType == type) {
             return true;
@@ -52,7 +52,7 @@
     return false;
 }
 
-static bool hasResourceType(String8 type, ResourceInfos infos) {
+static bool hasResourceType(MediaResource::Type type, ResourceInfos infos) {
     for (size_t i = 0; i < infos.size(); ++i) {
         if (hasResourceType(type, infos[i].resources)) {
             return true;
@@ -96,8 +96,8 @@
     if (binder != NULL) {
         sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
         for (size_t i = 0; i < resources.size(); ++i) {
-            service->notifyResourceGranted(pid, String16(resources[i].mType),
-                    String16(resources[i].mSubType), resources[i].mValue);
+            service->notifyResourceGranted(pid, String16(asString(resources[i].mType)),
+                    String16(asString(resources[i].mSubType)), resources[i].mValue);
         }
     }
 }
@@ -275,12 +275,12 @@
         const MediaResource *nonSecureCodec = NULL;
         const MediaResource *graphicMemory = NULL;
         for (size_t i = 0; i < resources.size(); ++i) {
-            String8 type = resources[i].mType;
-            if (resources[i].mType == kResourceSecureCodec) {
+            MediaResource::Type type = resources[i].mType;
+            if (resources[i].mType == MediaResource::kSecureCodec) {
                 secureCodec = &resources[i];
-            } else if (type == kResourceNonSecureCodec) {
+            } else if (type == MediaResource::kNonSecureCodec) {
                 nonSecureCodec = &resources[i];
-            } else if (type == kResourceGraphicMemory) {
+            } else if (type == MediaResource::kGraphicMemory) {
                 graphicMemory = &resources[i];
             }
         }
@@ -288,19 +288,19 @@
         // first pass to handle secure/non-secure codec conflict
         if (secureCodec != NULL) {
             if (!mSupportsMultipleSecureCodecs) {
-                if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::kSecureCodec, &clients)) {
                     return false;
                 }
             }
             if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, String8(kResourceNonSecureCodec), &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::kNonSecureCodec, &clients)) {
                     return false;
                 }
             }
         }
         if (nonSecureCodec != NULL) {
             if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::kSecureCodec, &clients)) {
                     return false;
                 }
             }
@@ -320,11 +320,11 @@
         if (clients.size() == 0) {
             // if we are here, run the fourth pass to free one codec with the different type.
             if (secureCodec != NULL) {
-                MediaResource temp(String8(kResourceNonSecureCodec), 1);
+                MediaResource temp(MediaResource::kNonSecureCodec, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
             if (nonSecureCodec != NULL) {
-                MediaResource temp(String8(kResourceSecureCodec), 1);
+                MediaResource temp(MediaResource::kSecureCodec, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
         }
@@ -374,7 +374,7 @@
 }
 
 bool ResourceManagerService::getAllClients_l(
-        int callingPid, const String8 &type, Vector<sp<IResourceManagerClient>> *clients) {
+        int callingPid, MediaResource::Type type, Vector<sp<IResourceManagerClient>> *clients) {
     Vector<sp<IResourceManagerClient>> temp;
     for (size_t i = 0; i < mMap.size(); ++i) {
         ResourceInfos &infos = mMap.editValueAt(i);
@@ -384,7 +384,7 @@
                     // some higher/equal priority process owns the resource,
                     // this request can't be fulfilled.
                     ALOGE("getAllClients_l: can't reclaim resource %s from pid %d",
-                            type.string(), mMap.keyAt(i));
+                            asString(type), mMap.keyAt(i));
                     return false;
                 }
                 temp.push_back(infos[j].client);
@@ -392,7 +392,7 @@
         }
     }
     if (temp.size() == 0) {
-        ALOGV("getAllClients_l: didn't find any resource %s", type.string());
+        ALOGV("getAllClients_l: didn't find any resource %s", asString(type));
         return true;
     }
     clients->appendVector(temp);
@@ -400,7 +400,7 @@
 }
 
 bool ResourceManagerService::getLowestPriorityBiggestClient_l(
-        int callingPid, const String8 &type, sp<IResourceManagerClient> *client) {
+        int callingPid, MediaResource::Type type, sp<IResourceManagerClient> *client) {
     int lowestPriorityPid;
     int lowestPriority;
     int callingPriority;
@@ -425,7 +425,7 @@
 }
 
 bool ResourceManagerService::getLowestPriorityPid_l(
-        const String8 &type, int *lowestPriorityPid, int *lowestPriority) {
+        MediaResource::Type type, int *lowestPriorityPid, int *lowestPriority) {
     int pid = -1;
     int priority = -1;
     for (size_t i = 0; i < mMap.size(); ++i) {
@@ -472,7 +472,7 @@
 }
 
 bool ResourceManagerService::getBiggestClient_l(
-        int pid, const String8 &type, sp<IResourceManagerClient> *client) {
+        int pid, MediaResource::Type type, sp<IResourceManagerClient> *client) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
         ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid);
@@ -495,7 +495,7 @@
     }
 
     if (clientTemp == NULL) {
-        ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", type.string(), pid);
+        ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", asString(type), pid);
         return false;
     }
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 4769373..8f6fe9a 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -79,22 +79,22 @@
     // Gets the list of all the clients who own the specified resource type.
     // Returns false if any client belongs to a process with higher priority than the
     // calling process. The clients will remain unchanged if returns false.
-    bool getAllClients_l(int callingPid, const String8 &type,
+    bool getAllClients_l(int callingPid, MediaResource::Type type,
             Vector<sp<IResourceManagerClient>> *clients);
 
     // Gets the client who owns specified resource type from lowest possible priority process.
     // Returns false if the calling process priority is not higher than the lowest process
     // priority. The client will remain unchanged if returns false.
-    bool getLowestPriorityBiggestClient_l(int callingPid, const String8 &type,
+    bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
             sp<IResourceManagerClient> *client);
 
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
-    bool getLowestPriorityPid_l(const String8 &type, int *pid, int *priority);
+    bool getLowestPriorityPid_l(MediaResource::Type type, int *pid, int *priority);
 
     // Gets the client who owns biggest piece of specified resource type from pid.
     // Returns false if failed. The client will remain unchanged if failed.
-    bool getBiggestClient_l(int pid, const String8 &type, sp<IResourceManagerClient> *client);
+    bool getBiggestClient_l(int pid, MediaResource::Type type, sp<IResourceManagerClient> *client);
 
     bool isCallingPriorityHigher_l(int callingPid, int pid);
 
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index cffedc6..62b7711 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -152,24 +152,24 @@
     void addResource() {
         // kTestPid1 mTestClient1
         Vector<MediaResource> resources1;
-        resources1.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+        resources1.push_back(MediaResource(MediaResource::kSecureCodec, 1));
         mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources1);
-        resources1.push_back(MediaResource(String8(kResourceGraphicMemory), 200));
+        resources1.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
         Vector<MediaResource> resources11;
-        resources11.push_back(MediaResource(String8(kResourceGraphicMemory), 200));
+        resources11.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
         mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources11);
 
         // kTestPid2 mTestClient2
         Vector<MediaResource> resources2;
-        resources2.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
-        resources2.push_back(MediaResource(String8(kResourceGraphicMemory), 300));
+        resources2.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
+        resources2.push_back(MediaResource(MediaResource::kGraphicMemory, 300));
         mService->addResource(kTestPid2, getId(mTestClient2), mTestClient2, resources2);
 
         // kTestPid2 mTestClient3
         Vector<MediaResource> resources3;
         mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
-        resources3.push_back(MediaResource(String8(kResourceSecureCodec), 1));
-        resources3.push_back(MediaResource(String8(kResourceGraphicMemory), 100));
+        resources3.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+        resources3.push_back(MediaResource(MediaResource::kGraphicMemory, 100));
         mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
 
         const PidResourceInfosMap &map = mService->mMap;
@@ -237,14 +237,12 @@
     void testGetAllClients() {
         addResource();
 
-        String8 type = String8(kResourceSecureCodec);
-        String8 unknowType = String8("unknowType");
+        MediaResource::Type type = MediaResource::kSecureCodec;
         Vector<sp<IResourceManagerClient> > clients;
         EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, &clients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
         // will fail.
         EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, &clients));
-        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, unknowType, &clients));
         EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, &clients));
 
         EXPECT_EQ(2u, clients.size());
@@ -254,8 +252,8 @@
 
     void testReclaimResourceSecure() {
         Vector<MediaResource> resources;
-        resources.push_back(MediaResource(String8(kResourceSecureCodec), 1));
-        resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150));
+        resources.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+        resources.push_back(MediaResource(MediaResource::kGraphicMemory, 150));
 
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
@@ -356,7 +354,7 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             Vector<MediaResource> resources;
-            resources.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+            resources.push_back(MediaResource(MediaResource::kSecureCodec, 1));
 
             EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
             // secure codec from lowest process got reclaimed
@@ -374,8 +372,8 @@
 
     void testReclaimResourceNonSecure() {
         Vector<MediaResource> resources;
-        resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
-        resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150));
+        resources.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
+        resources.push_back(MediaResource(MediaResource::kGraphicMemory, 150));
 
         // ### secure codec can't coexist with non-secure codec ###
         {
@@ -429,7 +427,7 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             Vector<MediaResource> resources;
-            resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
+            resources.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
 
             EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
             // one non secure codec from lowest process got reclaimed
@@ -445,7 +443,7 @@
     }
 
     void testGetLowestPriorityBiggestClient() {
-        String8 type = String8(kResourceGraphicMemory);
+        MediaResource::Type type = MediaResource::kGraphicMemory;
         sp<IResourceManagerClient> client;
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
 
@@ -454,8 +452,8 @@
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, &client));
         EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
 
-        // kTestPid1 is the lowest priority process with kResourceGraphicMemory.
-        // mTestClient1 has the largest kResourceGraphicMemory within kTestPid1.
+        // kTestPid1 is the lowest priority process with MediaResource::kGraphicMemory.
+        // mTestClient1 has the largest MediaResource::kGraphicMemory within kTestPid1.
         EXPECT_EQ(mTestClient1, client);
     }
 
@@ -464,7 +462,7 @@
         int priority;
         TestProcessInfo processInfo;
 
-        String8 type = String8(kResourceGraphicMemory);
+        MediaResource::Type type = MediaResource::kGraphicMemory;
         EXPECT_FALSE(mService->getLowestPriorityPid_l(type, &pid, &priority));
 
         addResource();
@@ -475,7 +473,7 @@
         processInfo.getPriority(kTestPid1, &priority1);
         EXPECT_EQ(priority1, priority);
 
-        type = String8(kResourceNonSecureCodec);
+        type = MediaResource::kNonSecureCodec;
         EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
         EXPECT_EQ(kTestPid2, pid);
         int priority2;
@@ -484,7 +482,7 @@
     }
 
     void testGetBiggestClient() {
-        String8 type = String8(kResourceGraphicMemory);
+        MediaResource::Type type = MediaResource::kGraphicMemory;
         sp<IResourceManagerClient> client;
         EXPECT_FALSE(mService->getBiggestClient_l(kTestPid2, type, &client));