Merge "MPEG4Extractor: support CryptoInfo for cbc1, cbcs, cens" into pi-dev
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index ee3e98e..3010646 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -7050,6 +7050,10 @@
     /**
      * <p>The camera device is a logical camera backed by two or more physical cameras that are
      * also exposed to the application.</p>
+     * <p>Camera application shouldn't assume that there are at most 1 rear camera and 1 front
+     * camera in the system. For an application that switches between front and back cameras,
+     * the recommendation is to switch between the first rear camera and the first front
+     * camera in the list of supported camera devices.</p>
      * <p>This capability requires the camera device to support the following:</p>
      * <ul>
      * <li>This camera device must list the following static metadata entries in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>:<ul>
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 9d2daab..4603515 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -135,6 +135,7 @@
                 strerror(errno));
         return err;
     }
+    signal(SIGPIPE, SIG_IGN);
     return NO_ERROR;
 }
 
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index 5d0f68e..ad1ccbc 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -262,6 +262,17 @@
 void CryptoHal::clearHeapBase(int32_t seqNum) {
     Mutex::Autolock autoLock(mLock);
 
+    /*
+     * Clear the remote shared memory mapping by setting the shared
+     * buffer base to a null hidl_memory.
+     *
+     * TODO: Add a releaseSharedBuffer method in a future DRM HAL
+     * API version to make this explicit.
+     */
+    uint32_t bufferId = mHeapBases.valueFor(seqNum).getBufferId();
+    Return<void> hResult = mPlugin->setSharedBufferBase(hidl_memory(), bufferId);
+    ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
+
     mHeapBases.removeItem(seqNum);
 }
 
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index b6787af..ca9deab 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -506,7 +506,7 @@
 
         ImageItem &derivedImage = itemIdToItemMap.editValueAt(itemIndex);
         if (!derivedImage.dimgRefs.empty()) {
-            ALOGW("dimgRefs if not clean!");
+            ALOGW("dimgRefs not clean!");
         }
         derivedImage.dimgRefs.appendVector(mRefs);
 
@@ -1490,6 +1490,17 @@
 
     const ImageItem *image = &mItemIdToItemMap[itemIndex];
 
+    ssize_t tileItemIndex = -1;
+    if (image->isGrid()) {
+        if (image->dimgRefs.empty()) {
+            return NULL;
+        }
+        tileItemIndex = mItemIdToItemMap.indexOfKey(image->dimgRefs[0]);
+        if (tileItemIndex < 0) {
+            return NULL;
+        }
+    }
+
     sp<MetaData> meta = new MetaData;
     meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
 
@@ -1530,10 +1541,6 @@
     }
 
     if (image->isGrid()) {
-        ssize_t tileItemIndex = mItemIdToItemMap.indexOfKey(image->dimgRefs[0]);
-        if (tileItemIndex < 0) {
-            return NULL;
-        }
         meta->setInt32(kKeyGridRows, image->rows);
         meta->setInt32(kKeyGridCols, image->columns);
 
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
index 67ec244..9ac7e2a 100644
--- a/media/img_utils/src/DngUtils.cpp
+++ b/media/img_utils/src/DngUtils.cpp
@@ -302,29 +302,14 @@
     normalizedOCX = CLAMP(normalizedOCX, 0, 1);
     normalizedOCY = CLAMP(normalizedOCY, 0, 1);
 
-    // Conversion factors from Camera2 K factors to DNG spec. K factors:
-    //
-    //      Note: these are necessary because our unit system assumes a
-    //      normalized max radius of sqrt(2), whereas the DNG spec's
-    //      WarpRectilinear opcode assumes a normalized max radius of 1.
-    //      Thus, each K coefficient must include the domain scaling
-    //      factor (the DNG domain is scaled by sqrt(2) to emulate the
-    //      domain used by the Camera2 specification).
-
-    const double c_0 = sqrt(2);
-    const double c_1 = 2 * sqrt(2);
-    const double c_2 = 4 * sqrt(2);
-    const double c_3 = 8 * sqrt(2);
-    const double c_4 = 2;
-    const double c_5 = 2;
-
-    const double coeffs[] = { c_0 * kCoeffs[0],
-                              c_1 * kCoeffs[1],
-                              c_2 * kCoeffs[2],
-                              c_3 * kCoeffs[3],
-                              c_4 * kCoeffs[4],
-                              c_5 * kCoeffs[5] };
-
+    double coeffs[6] = {
+        kCoeffs[0],
+        kCoeffs[1],
+        kCoeffs[2],
+        kCoeffs[3],
+        kCoeffs[4],
+        kCoeffs[5]
+    };
 
     return addWarpRectilinear(/*numPlanes*/1,
                               /*opticalCenterX*/normalizedOCX,
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 2207cb8c..5b29419 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -44,7 +44,15 @@
 #define AAUDIO_UNSPECIFIED           0
 
 enum {
+    /**
+     * Audio data will travel out of the device, for example through a speaker.
+     */
     AAUDIO_DIRECTION_OUTPUT,
+
+
+    /**
+     * Audio data will travel into the device, for example from a microphone.
+     */
     AAUDIO_DIRECTION_INPUT
 };
 typedef int32_t aaudio_direction_t;
@@ -52,33 +60,112 @@
 enum {
     AAUDIO_FORMAT_INVALID = -1,
     AAUDIO_FORMAT_UNSPECIFIED = 0,
+
+    /**
+     * This format uses the int16_t data type.
+     * The maximum range of the data is -32768 to 32767.
+     */
     AAUDIO_FORMAT_PCM_I16,
+
+    /**
+     * This format uses the float data type.
+     * The nominal range of the data is [-1.0f, 1.0f).
+     * Values outside that range may be clipped.
+     *
+     * See also 'floatData' at
+     * https://developer.android.com/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)
+     */
     AAUDIO_FORMAT_PCM_FLOAT
 };
 typedef int32_t aaudio_format_t;
 
+/**
+ * These result codes are returned from AAudio functions to indicate success or failure.
+ * Note that error return codes may change in the future so applications should generally
+ * not rely on specific return codes.
+ */
 enum {
+    /**
+     * The call was successful.
+     */
     AAUDIO_OK,
     AAUDIO_ERROR_BASE = -900, // TODO review
+
+    /**
+     * The audio device was disconnected. This could occur, for example, when headphones
+     * are plugged in or unplugged. The stream cannot be used after the device is disconnected.
+     * Applications should stop and close the stream.
+     * If this error is received in an error callback then another thread should be
+     * used to stop and close the stream.
+     */
     AAUDIO_ERROR_DISCONNECTED,
+
+    /**
+     * An invalid parameter was passed to AAudio.
+     */
     AAUDIO_ERROR_ILLEGAL_ARGUMENT,
     // reserved
     AAUDIO_ERROR_INTERNAL = AAUDIO_ERROR_ILLEGAL_ARGUMENT + 2,
+
+    /**
+     * The requested operation is not appropriate for the current state of AAudio.
+     */
     AAUDIO_ERROR_INVALID_STATE,
     // reserved
     // reserved
+    /* The server rejected the handle used to identify the stream.
+     */
     AAUDIO_ERROR_INVALID_HANDLE = AAUDIO_ERROR_INVALID_STATE + 3,
     // reserved
+
+    /**
+     * The function is not implemented for this stream.
+     */
     AAUDIO_ERROR_UNIMPLEMENTED = AAUDIO_ERROR_INVALID_HANDLE + 2,
+
+    /**
+     * A resource or information is unavailable.
+     * This could occur when an application tries to open too many streams,
+     * or a timestamp is not available.
+     */
     AAUDIO_ERROR_UNAVAILABLE,
     AAUDIO_ERROR_NO_FREE_HANDLES,
+
+    /**
+     * Memory could not be allocated.
+     */
     AAUDIO_ERROR_NO_MEMORY,
+
+    /**
+     * A NULL pointer was passed to AAudio.
+     * Or a NULL pointer was detected internally.
+     */
     AAUDIO_ERROR_NULL,
+
+    /**
+     * An operation took longer than expected.
+     */
     AAUDIO_ERROR_TIMEOUT,
     AAUDIO_ERROR_WOULD_BLOCK,
+
+    /**
+     * The requested data format is not supported.
+     */
     AAUDIO_ERROR_INVALID_FORMAT,
+
+    /**
+     * A requested was out of range.
+     */
     AAUDIO_ERROR_OUT_OF_RANGE,
+
+    /**
+     * The audio service was not available.
+     */
     AAUDIO_ERROR_NO_SERVICE,
+
+    /**
+     * The requested sample rate was not supported.
+     */
     AAUDIO_ERROR_INVALID_RATE
 };
 typedef int32_t  aaudio_result_t;
@@ -126,15 +213,15 @@
     AAUDIO_PERFORMANCE_MODE_NONE = 10,
 
     /**
-     * Extending battery life is most important.
+     * Extending battery life is more important than low latency.
      *
      * This mode is not supported in input streams.
-     * Mode NONE will be used if this is requested.
+     * For input, mode NONE will be used if this is requested.
      */
     AAUDIO_PERFORMANCE_MODE_POWER_SAVING,
 
     /**
-     * Reducing latency is most important.
+     * Reducing latency is more important than battery life.
      */
     AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
 };
@@ -289,6 +376,11 @@
 };
 typedef int32_t aaudio_input_preset_t;
 
+/**
+ * These may be used with AAudioStreamBuilder_setSessionId().
+ *
+ * Added in API level 28.
+ */
 enum {
     /**
      * Do not allocate a session ID.
@@ -302,7 +394,7 @@
     /**
      * Allocate a session ID that can be used to attach and control
      * effects using the Java AudioEffects API.
-     * Note that the use of this flag may result in higher latency.
+     * Note that using this may result in higher latency.
      *
      * Note that this matches the value of AudioManager.AUDIO_SESSION_ID_GENERATE.
      *
@@ -474,8 +566,14 @@
 /**
  * Set the requested performance mode.
  *
+ * Supported modes are AAUDIO_PERFORMANCE_MODE_NONE, AAUDIO_PERFORMANCE_MODE_POWER_SAVING
+ * and AAUDIO_PERFORMANCE_MODE_LOW_LATENCY.
+ *
  * The default, if you do not call this function, is AAUDIO_PERFORMANCE_MODE_NONE.
  *
+ * You may not get the mode you requested.
+ * You can call AAudioStream_getPerformanceMode() to find out the final mode for the stream.
+ *
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param mode the desired performance mode, eg. AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
  */
@@ -550,10 +648,12 @@
  * and then used with this function when opening another stream.
  * This allows effects to be shared between streams.
  *
- * Session IDs from AAudio can be used the Android Java APIs and vice versa.
+ * Session IDs from AAudio can be used with the Android Java APIs and vice versa.
  * So a session ID from an AAudio stream can be passed to Java
  * and effects applied using the Java AudioEffect API.
  *
+ * Note that allocating or setting a session ID may result in a stream with higher latency.
+ *
  * Allocated session IDs will always be positive and nonzero.
  *
  * Added in API level 28.
@@ -612,6 +712,14 @@
  * <li>use any mutexes or other synchronization primitives</li>
  * <li>sleep</li>
  * <li>stop or close the stream</li>
+ * <li>AAudioStream_read()</li>
+ * <li>AAudioStream_write()</li>
+ * </ul>
+ *
+ * The following are OK to call from the data callback:
+ * <ul>
+ * <li>AAudioStream_get*()</li>
+ * <li>AAudio_convertResultToText()</li>
  * </ul>
  *
  * If you need to move data, eg. MIDI commands, in or out of the callback function then
@@ -685,6 +793,22 @@
  * Prototype for the callback function that is passed to
  * AAudioStreamBuilder_setErrorCallback().
  *
+ * The following may NOT be called from the error callback:
+ * <ul>
+ * <li>AAudioStream_requestStop()</li>
+ * <li>AAudioStream_requestPause()</li>
+ * <li>AAudioStream_close()</li>
+ * <li>AAudioStream_waitForStateChange()</li>
+ * <li>AAudioStream_read()</li>
+ * <li>AAudioStream_write()</li>
+ * </ul>
+ *
+ * The following are OK to call from the error callback:
+ * <ul>
+ * <li>AAudioStream_get*()</li>
+ * <li>AAudio_convertResultToText()</li>
+ * </ul>
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param userData the same address that was passed to AAudioStreamBuilder_setErrorCallback()
  * @param error an AAUDIO_ERROR_* value.
@@ -856,6 +980,8 @@
  *
  * This call is "strong non-blocking" unless it has to wait for data.
  *
+ * If the call times out then zero or a partial frame count will be returned.
+ *
  * @param stream A stream created using AAudioStreamBuilder_openStream().
  * @param buffer The address of the first sample.
  * @param numFrames Number of frames to read. Only complete frames will be written.
@@ -879,6 +1005,8 @@
  *
  * This call is "strong non-blocking" unless it has to wait for room in the buffer.
  *
+ * If the call times out then zero or a partial frame count will be returned.
+ *
  * @param stream A stream created using AAudioStreamBuilder_openStream().
  * @param buffer The address of the first sample.
  * @param numFrames Number of frames to write. Only complete frames will be written.
@@ -903,7 +1031,8 @@
  * This cannot be set higher than AAudioStream_getBufferCapacityInFrames().
  *
  * Note that you will probably not get the exact size you request.
- * Call AAudioStream_getBufferSizeInFrames() to see what the actual final size is.
+ * You can check the return value or call AAudioStream_getBufferSizeInFrames()
+ * to see what the actual final size is.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param numFrames requested number of frames that can be filled without blocking
@@ -1038,7 +1167,8 @@
 
 /**
  * Passes back the number of frames that have been written since the stream was created.
- * For an output stream, this will be advanced by the application calling write().
+ * For an output stream, this will be advanced by the application calling write()
+ * or by a data callback.
  * For an input stream, this will be advanced by the endpoint.
  *
  * The frame position is monotonically increasing.
@@ -1051,7 +1181,8 @@
 /**
  * Passes back the number of frames that have been read since the stream was created.
  * For an output stream, this will be advanced by the endpoint.
- * For an input stream, this will be advanced by the application calling read().
+ * For an input stream, this will be advanced by the application calling read()
+ * or by a data callback.
  *
  * The frame position is monotonically increasing.
  *
diff --git a/media/libaudioclient/include/media/AudioParameter.h b/media/libaudioclient/include/media/AudioParameter.h
index 59ac1db..967d895 100644
--- a/media/libaudioclient/include/media/AudioParameter.h
+++ b/media/libaudioclient/include/media/AudioParameter.h
@@ -81,6 +81,11 @@
 
     static const char * const valueListSeparator;
 
+    // keyReconfigA2dp: Ask HwModule to reconfigure A2DP offloaded codec
+    // keyReconfigA2dpSupported: Query if HwModule supports A2DP offload codec config
+    static const char * const keyReconfigA2dp;
+    static const char * const keyReconfigA2dpSupported;
+
     String8 toString() const { return toStringImpl(true); }
     String8 keysToString() const { return toStringImpl(false); }
 
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index aa39443..f6f817a 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -192,7 +192,6 @@
     // always recompute for both channel masks even if only one has changed.
     const uint32_t trackChannelCount = audio_channel_count_from_out_mask(trackChannelMask);
     const uint32_t mixerChannelCount = audio_channel_count_from_out_mask(mixerChannelMask);
-    const bool mixerChannelCountChanged = track->mMixerChannelCount != mixerChannelCount;
 
     ALOG_ASSERT((trackChannelCount <= MAX_NUM_CHANNELS_TO_DOWNMIX)
             && trackChannelCount
@@ -213,7 +212,7 @@
     // do it after downmix since track format may change!
     track->prepareForReformat();
 
-    if (track->mResampler.get() != nullptr && mixerChannelCountChanged) {
+    if (track->mResampler.get() != nullptr) {
         // resampler channels may have changed.
         const uint32_t resetToSampleRate = track->sampleRate;
         track->mResampler.reset(nullptr);
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 3e88c7c..5fa9da9 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -1,5 +1,5 @@
 // Effect configuration
-cc_library_shared {
+cc_library {
     name: "libeffectsconfig",
     vendor_available: true,
 
diff --git a/media/libeffects/dynamicsproc/EffectDynamicsProcessing.cpp b/media/libeffects/dynamicsproc/EffectDynamicsProcessing.cpp
index 55383eb..0b883f1 100644
--- a/media/libeffects/dynamicsproc/EffectDynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/EffectDynamicsProcessing.cpp
@@ -51,7 +51,7 @@
         {0x7261676f, 0x6d75, 0x7369, 0x6364, {0x28, 0xe2, 0xfd, 0x3a, 0xc3, 0x9e}}, // type
         {0xe0e6539b, 0x1781, 0x7261, 0x676f, {0x6d, 0x75, 0x73, 0x69, 0x63, 0x40}}, // uuid
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST),
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL),
         0, // TODO
         1,
         "Dynamics Processing",
@@ -367,6 +367,76 @@
     return 0;
 }
 
+//helper function
+bool DP_checkSizesInt(uint32_t paramSize, uint32_t valueSize, uint32_t expectedParams,
+        uint32_t expectedValues) {
+    if (paramSize < expectedParams * sizeof(int32_t)) {
+        ALOGE("Invalid paramSize: %u expected %u", paramSize,
+                (uint32_t)(expectedParams * sizeof(int32_t)));
+        return false;
+    }
+    if (valueSize < expectedValues * sizeof(int32_t)) {
+        ALOGE("Invalid valueSize %u expected %u", valueSize,
+                (uint32_t)(expectedValues * sizeof(int32_t)));
+        return false;
+    }
+    return true;
+}
+
+static dp_fx::DPChannel* DP_getChannel(DynamicsProcessingContext *pContext,
+        int32_t channel) {
+    if (pContext->mPDynamics == NULL) {
+        return NULL;
+    }
+    dp_fx::DPChannel *pChannel = pContext->mPDynamics->getChannel(channel);
+    ALOGE_IF(pChannel == NULL, "DPChannel NULL. invalid channel %d", channel);
+    return pChannel;
+}
+
+static dp_fx::DPEq* DP_getEq(DynamicsProcessingContext *pContext, int32_t channel,
+        int32_t eqType) {
+    dp_fx::DPChannel *pChannel = DP_getChannel(pContext, channel);
+    if (pChannel == NULL) {
+        return NULL;
+    }
+    dp_fx::DPEq *pEq = (eqType == DP_PARAM_PRE_EQ ? pChannel->getPreEq() :
+            (eqType == DP_PARAM_POST_EQ ? pChannel->getPostEq() : NULL));
+    ALOGE_IF(pEq == NULL,"DPEq NULL invalid eq");
+    return pEq;
+}
+
+static dp_fx::DPEqBand* DP_getEqBand(DynamicsProcessingContext *pContext, int32_t channel,
+        int32_t eqType, int32_t band) {
+    dp_fx::DPEq *pEq = DP_getEq(pContext, channel, eqType);
+    if (pEq == NULL) {
+        return NULL;
+    }
+    dp_fx::DPEqBand *pEqBand = pEq->getBand(band);
+    ALOGE_IF(pEqBand == NULL, "DPEqBand NULL. invalid band %d", band);
+    return pEqBand;
+}
+
+static dp_fx::DPMbc* DP_getMbc(DynamicsProcessingContext *pContext, int32_t channel) {
+    dp_fx::DPChannel * pChannel = DP_getChannel(pContext, channel);
+    if (pChannel == NULL) {
+        return NULL;
+    }
+    dp_fx::DPMbc *pMbc = pChannel->getMbc();
+    ALOGE_IF(pMbc == NULL, "DPMbc NULL invalid MBC");
+    return pMbc;
+}
+
+static dp_fx::DPMbcBand* DP_getMbcBand(DynamicsProcessingContext *pContext, int32_t channel,
+        int32_t band) {
+    dp_fx::DPMbc *pMbc = DP_getMbc(pContext, channel);
+    if (pMbc == NULL) {
+        return NULL;
+    }
+    dp_fx::DPMbcBand *pMbcBand = pMbc->getBand(band);
+    ALOGE_IF(pMbcBand == NULL, "pMbcBand NULL. invalid band %d", band);
+    return pMbcBand;
+}
+
 int DP_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
         void *pCmdData, uint32_t *replySize, void *pReplyData) {
 
@@ -483,8 +553,49 @@
                 p->data + voffset);
         break;
     }
+    case EFFECT_CMD_SET_VOLUME: {
+        ALOGV("EFFECT_CMD_SET_VOLUME");
+        // if pReplyData is NULL, VOL_CTRL is delegated to another effect
+        if (pReplyData == NULL || replySize == NULL || *replySize < ((int)sizeof(int32_t) * 2)) {
+            ALOGV("no VOLUME data to return");
+            break;
+        }
+        if (pCmdData == NULL || cmdSize < ((int)sizeof(uint32_t) * 2)) {
+            ALOGE("\tLVM_ERROR : DynamicsProcessing EFFECT_CMD_SET_VOLUME ERROR");
+            return -EINVAL;
+        }
+
+        const int32_t unityGain = 1 << 24;
+        //channel count
+        int32_t channelCount = (int32_t)audio_channel_count_from_out_mask(
+                pContext->mConfig.inputCfg.channels);
+        for (int32_t ch = 0; ch < channelCount; ch++) {
+
+            dp_fx::DPChannel * pChannel = DP_getChannel(pContext, ch);
+            if (pChannel == NULL) {
+                ALOGE("%s EFFECT_CMD_SET_VOLUME invalid channel %d", __func__, ch);
+                return -EINVAL;
+                break;
+            }
+
+            int32_t offset = ch;
+            if (ch > 1) {
+                // FIXME: limited to 2 unique channels. If more channels present, use value for
+                // first channel
+                offset = 0;
+            }
+            const float gain = (float)*((uint32_t *)pCmdData + offset) / unityGain;
+            const float gainDb = linearToDb(gain);
+            ALOGVV("%s EFFECT_CMD_SET_VOLUME channel %d, engine outputlevel %f (%0.2f dB)",
+                    __func__, ch, gain, gainDb);
+            pChannel->setOutputGain(gainDb);
+        }
+
+        const int32_t  volRet[2] = {unityGain, unityGain}; // Apply no volume before effect.
+        memcpy(pReplyData, volRet, sizeof(volRet));
+        break;
+    }
     case EFFECT_CMD_SET_DEVICE:
-    case EFFECT_CMD_SET_VOLUME:
     case EFFECT_CMD_SET_AUDIO_MODE:
         break;
 
@@ -523,76 +634,6 @@
     return 0;
 }
 
-//helper function
-bool DP_checkSizesInt(uint32_t paramSize, uint32_t valueSize, uint32_t expectedParams,
-        uint32_t expectedValues) {
-    if (paramSize < expectedParams * sizeof(int32_t)) {
-        ALOGE("Invalid paramSize: %u expected %u", paramSize,
-                (uint32_t) (expectedParams * sizeof(int32_t)));
-        return false;
-    }
-    if (valueSize < expectedValues * sizeof(int32_t)) {
-        ALOGE("Invalid valueSize %u expected %u", valueSize,
-                (uint32_t)(expectedValues * sizeof(int32_t)));
-        return false;
-    }
-    return true;
-}
-
-static dp_fx::DPChannel* DP_getChannel(DynamicsProcessingContext *pContext,
-        int32_t channel) {
-    if (pContext->mPDynamics == NULL) {
-        return NULL;
-    }
-    dp_fx::DPChannel *pChannel = pContext->mPDynamics->getChannel(channel);
-    ALOGE_IF(pChannel == NULL, "DPChannel NULL. invalid channel %d", channel);
-    return pChannel;
-}
-
-static dp_fx::DPEq* DP_getEq(DynamicsProcessingContext *pContext, int32_t channel,
-        int32_t eqType) {
-    dp_fx::DPChannel * pChannel = DP_getChannel(pContext, channel);
-    if (pChannel == NULL) {
-        return NULL;
-    }
-    dp_fx::DPEq *pEq = (eqType == DP_PARAM_PRE_EQ ? pChannel->getPreEq() :
-            (eqType == DP_PARAM_POST_EQ ? pChannel->getPostEq() : NULL));
-    ALOGE_IF(pEq == NULL,"DPEq NULL invalid eq");
-    return pEq;
-}
-
-static dp_fx::DPEqBand* DP_getEqBand(DynamicsProcessingContext *pContext, int32_t channel,
-        int32_t eqType, int32_t band) {
-    dp_fx::DPEq *pEq = DP_getEq(pContext, channel, eqType);
-    if (pEq == NULL) {
-        return NULL;
-    }
-    dp_fx::DPEqBand *pEqBand = pEq->getBand(band);
-    ALOGE_IF(pEqBand == NULL, "DPEqBand NULL. invalid band %d", band);
-    return pEqBand;
-}
-
-static dp_fx::DPMbc* DP_getMbc(DynamicsProcessingContext *pContext, int32_t channel) {
-    dp_fx::DPChannel * pChannel = DP_getChannel(pContext, channel);
-    if (pChannel == NULL) {
-        return NULL;
-    }
-    dp_fx::DPMbc *pMbc = pChannel->getMbc();
-    ALOGE_IF(pMbc == NULL, "DPMbc NULL invalid MBC");
-    return pMbc;
-}
-
-static dp_fx::DPMbcBand* DP_getMbcBand(DynamicsProcessingContext *pContext, int32_t channel,
-        int32_t band) {
-    dp_fx::DPMbc *pMbc = DP_getMbc(pContext, channel);
-    if (pMbc == NULL) {
-        return NULL;
-    }
-    dp_fx::DPMbcBand *pMbcBand = pMbc->getBand(band);
-    ALOGE_IF(pMbcBand == NULL, "pMbcBand NULL. invalid band %d", band);
-    return pMbcBand;
-}
-
 int DP_getParameter(DynamicsProcessingContext *pContext,
                            uint32_t paramSize,
                            void *pParam,
diff --git a/media/libeffects/dynamicsproc/dsp/DPBase.cpp b/media/libeffects/dynamicsproc/dsp/DPBase.cpp
index 8b79991..ac758e0 100644
--- a/media/libeffects/dynamicsproc/dsp/DPBase.cpp
+++ b/media/libeffects/dynamicsproc/dsp/DPBase.cpp
@@ -174,8 +174,8 @@
 }
 
 //----
-DPChannel::DPChannel() : mInitialized(false), mInputGainDb(0), mPreEqInUse(false), mMbcInUse(false),
-        mPostEqInUse(false), mLimiterInUse(false) {
+DPChannel::DPChannel() : mInitialized(false), mInputGainDb(0), mOutputGainDb(0),
+        mPreEqInUse(false), mMbcInUse(false), mPostEqInUse(false), mLimiterInUse(false) {
 }
 
 void DPChannel::init(float inputGain, bool preEqInUse, uint32_t preEqBandCount,
diff --git a/media/libeffects/dynamicsproc/dsp/DPBase.h b/media/libeffects/dynamicsproc/dsp/DPBase.h
index 355f64b..e74f91d 100644
--- a/media/libeffects/dynamicsproc/dsp/DPBase.h
+++ b/media/libeffects/dynamicsproc/dsp/DPBase.h
@@ -272,6 +272,16 @@
         mInputGainDb = gain;
     }
 
+    float getOutputGain() const {
+        if (!mInitialized) {
+            return 0;
+        }
+        return mOutputGainDb;
+    }
+    void setOutputGain(float gain) {
+        mOutputGainDb = gain;
+    }
+
     DPEq* getPreEq();
     DPMbc* getMbc();
     DPEq* getPostEq();
@@ -281,6 +291,7 @@
 private:
     bool mInitialized;
     float mInputGainDb;
+    float mOutputGainDb;
 
     DPEq mPreEq;
     DPMbc mMbc;
diff --git a/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp b/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp
index 59195fc..d06fd70 100644
--- a/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp
+++ b/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp
@@ -29,7 +29,7 @@
 
 #define CIRCULAR_BUFFER_UPSAMPLE 4  //4 times buffer size
 
-static constexpr float MIN_ENVELOPE = 0.000001f;
+static constexpr float MIN_ENVELOPE = 1e-6f; //-120 dB
 //helper functionS
 static inline bool isPowerOf2(unsigned long n) {
     return (n & (n - 1)) == 0;
@@ -53,14 +53,6 @@
 #define IS_CHANGED(c, a, b) { c |= !compareEquality(a,b); \
     (a) = (b); }
 
-float dBtoLinear(float valueDb) {
-    return  pow (10, valueDb / 20.0);
-}
-
-float linearToDb(float value) {
-    return 20 * log10(value);
-}
-
 //ChannelBuffers helper
 void ChannelBuffer::initBuffers(unsigned int blockSize, unsigned int overlapSize,
         unsigned int halfFftSize, unsigned int samplingRate, DPBase &dpBase) {
@@ -74,7 +66,7 @@
     cBOutput.resize(mBlockSize * CIRCULAR_BUFFER_UPSAMPLE);
 
     //fill input with half block size...
-    for (unsigned int k = 0;  k < mBlockSize/2; k++) {
+    for (unsigned int k = 0; k < mBlockSize/2; k++) {
         cBInput.write(0);
     }
 
@@ -94,12 +86,14 @@
             mMbcBands.size(), mPostEqBands.size());
 
     DPChannel *pChannel = dpBase.getChannel(0);
-    if (pChannel != NULL) {
+    if (pChannel != nullptr) {
         mPreEqInUse = pChannel->getPreEq()->isInUse();
         mMbcInUse = pChannel->getMbc()->isInUse();
         mPostEqInUse = pChannel->getPostEq()->isInUse();
         mLimiterInUse = pChannel->getLimiter()->isInUse();
     }
+
+    mLimiterParams.linkGroup = -1; //no group.
 }
 
 void ChannelBuffer::computeBinStartStop(BandParams &bp, size_t binStart) {
@@ -108,8 +102,35 @@
     bp.binStop = (int)(0.5 + bp.freqCutoffHz * mBlockSize / mSamplingRate);
 }
 
-//== DPFrequency
+//== LinkedLimiters Helper
+void LinkedLimiters::reset() {
+    mGroupsMap.clear();
+}
 
+void LinkedLimiters::update(int32_t group, int index) {
+    mGroupsMap[group].push_back(index);
+}
+
+void LinkedLimiters::remove(int index) {
+    //check all groups and if index is found, remove it.
+    //if group is empty afterwards, remove it.
+    for (auto it = mGroupsMap.begin(); it != mGroupsMap.end(); ) {
+        for (auto itIndex = it->second.begin(); itIndex != it->second.end(); ) {
+            if (*itIndex == index) {
+                itIndex = it->second.erase(itIndex);
+            } else {
+                ++itIndex;
+            }
+        }
+        if (it->second.size() == 0) {
+            it = mGroupsMap.erase(it);
+        } else {
+            ++it;
+        }
+    }
+}
+
+//== DPFrequency
 void DPFrequency::reset() {
 }
 
@@ -147,14 +168,25 @@
                 mSamplingRate, *this);
     }
 
-    //dsp
+    //effective number of frames processed per second
+    mBlocksPerSecond = (float)mSamplingRate / (mBlockSize - mOverlapSize);
+
     fill_window(mVWindow, RDSP_WINDOW_HANNING_FLAT_TOP, mBlockSize, mOverlapSize);
+
+    //compute window rms for energy compensation
+    mWindowRms = 0;
+    for (size_t i = 0; i < mVWindow.size(); i++) {
+        mWindowRms += mVWindow[i] * mVWindow[i];
+    }
+
+    //Making sure window rms is not zero.
+    mWindowRms = std::max(sqrt(mWindowRms / mVWindow.size()), MIN_ENVELOPE);
 }
 
 void DPFrequency::updateParameters(ChannelBuffer &cb, int channelIndex) {
     DPChannel *pChannel = getChannel(channelIndex);
 
-    if (pChannel == NULL) {
+    if (pChannel == nullptr) {
         ALOGE("Error: updateParameters null DPChannel %d", channelIndex);
         return;
     }
@@ -166,7 +198,7 @@
         //===EqPre
         if (cb.mPreEqInUse) {
             DPEq *pPreEq = pChannel->getPreEq();
-            if (pPreEq == NULL) {
+            if (pPreEq == nullptr) {
                 ALOGE("Error: updateParameters null PreEq for channel: %d", channelIndex);
                 return;
             }
@@ -174,7 +206,7 @@
             if (cb.mPreEqEnabled) {
                 for (unsigned int b = 0; b < getPreEqBandCount(); b++) {
                     DPEqBand *pEqBand = pPreEq->getBand(b);
-                    if (pEqBand == NULL) {
+                    if (pEqBand == nullptr) {
                         ALOGE("Error: updateParameters null PreEqBand for band %d", b);
                         return; //failed.
                     }
@@ -222,7 +254,7 @@
         bool changed = false;
 
         DPEq *pPostEq = pChannel->getPostEq();
-        if (pPostEq == NULL) {
+        if (pPostEq == nullptr) {
             ALOGE("Error: updateParameters null postEq for channel: %d", channelIndex);
             return; //failed.
         }
@@ -230,7 +262,7 @@
         if (cb.mPostEqEnabled) {
             for (unsigned int b = 0; b < getPostEqBandCount(); b++) {
                 DPEqBand *pEqBand = pPostEq->getBand(b);
-                if (pEqBand == NULL) {
+                if (pEqBand == nullptr) {
                     ALOGE("Error: updateParameters PostEqBand NULL for band %d", b);
                     return; //failed.
                 }
@@ -265,7 +297,7 @@
     //===MBC
     if (cb.mMbcInUse) {
         DPMbc *pMbc = pChannel->getMbc();
-        if (pMbc == NULL) {
+        if (pMbc == nullptr) {
             ALOGE("Error: updateParameters Mbc NULL for channel: %d", channelIndex);
             return;
         }
@@ -274,7 +306,7 @@
             bool changed = false;
             for (unsigned int b = 0; b < getMbcBandCount(); b++) {
                 DPMbcBand *pMbcBand = pMbc->getBand(b);
-                if (pMbcBand == NULL) {
+                if (pMbcBand == nullptr) {
                     ALOGE("Error: updateParameters MbcBand NULL for band %d", b);
                     return; //failed.
                 }
@@ -307,11 +339,38 @@
                     cb.computeBinStartStop(*pMbcBandParams, binNext);
                     binNext = pMbcBandParams->binStop + 1;
                 }
-
             }
-
         }
     }
+
+    //===Limiter
+    if (cb.mLimiterInUse) {
+        bool changed = false;
+        DPLimiter *pLimiter = pChannel->getLimiter();
+        if (pLimiter == nullptr) {
+            ALOGE("Error: updateParameters Limiter NULL for channel: %d", channelIndex);
+            return;
+        }
+        cb.mLimiterEnabled = pLimiter->isEnabled();
+        if (cb.mLimiterEnabled) {
+            IS_CHANGED(changed, cb.mLimiterParams.linkGroup ,
+                    (int32_t)pLimiter->getLinkGroup());
+            cb.mLimiterParams.attackTimeMs = pLimiter->getAttackTime();
+            cb.mLimiterParams.releaseTimeMs = pLimiter->getReleaseTime();
+            cb.mLimiterParams.ratio = pLimiter->getRatio();
+            cb.mLimiterParams.thresholdDb = pLimiter->getThreshold();
+            cb.mLimiterParams.postGainDb = pLimiter->getPostGain();
+        }
+
+        if (changed) {
+            ALOGV("limiter changed, recomputing linkGroups for %d", channelIndex);
+            mLinkedLimiters.remove(channelIndex); //in case it was already there.
+            mLinkedLimiters.update(cb.mLimiterParams.linkGroup, channelIndex);
+        }
+    }
+
+    //=== Output Gain
+    cb.outputGainDb = pChannel->getOutputGain();
 }
 
 size_t DPFrequency::processSamples(const float *in, float *out, size_t samples) {
@@ -336,12 +395,8 @@
            }
        }
 
-       //TODO: lookahead limiters
-       //TODO: apply linked limiters to all channels.
-       //**Process each Channel
-       for (int ch = 0; ch < channelCount; ch++) {
-           processMono(mChannelBuffers[ch]);
-       }
+       //**process all channelBuffers
+       processChannelBuffers(mChannelBuffers);
 
        //** estimate how much data is available in ALL channels
        size_t available = mChannelBuffers[0].cBOutput.availableToRead();
@@ -370,62 +425,78 @@
        return samples;
 }
 
-size_t DPFrequency::processMono(ChannelBuffer &cb) {
-
+size_t DPFrequency::processChannelBuffers(CBufferVector &channelBuffers) {
+    const int channelCount = channelBuffers.size();
     size_t processedSamples = 0;
+    size_t processFrames = mBlockSize - mOverlapSize;
 
-    size_t available = cb.cBInput.availableToRead();
-    while (available >= mBlockSize - mOverlapSize) {
-
-        //move tail of previous
-        for (unsigned int k = 0; k < mOverlapSize; ++k) {
-            cb.input[k] = cb.input[mBlockSize - mOverlapSize + k];
-        }
-
-        //read new available data
-        for (unsigned int k = 0; k < mBlockSize - mOverlapSize; k++) {
-            cb.input[mOverlapSize + k] = cb.cBInput.read();
-        }
-
-        //## Actual process
-        processOneVector(cb.output, cb.input, cb);
-        //##End of Process
-
-        //mix tail (and capture new tail
-        for (unsigned int k = 0; k < mOverlapSize; k++) {
-            cb.output[k] += cb.outTail[k];
-            cb.outTail[k] = cb.output[mBlockSize - mOverlapSize + k]; //new tail
-        }
-
-        //output data
-        for (unsigned int k = 0; k < mBlockSize - mOverlapSize; k++) {
-            cb.cBOutput.write(cb.output[k]);
-        }
-
-        available = cb.cBInput.availableToRead();
+    size_t available = channelBuffers[0].cBInput.availableToRead();
+    for (int ch = 1; ch < channelCount; ch++) {
+        available = std::min(available, channelBuffers[ch].cBInput.availableToRead());
     }
 
+    while (available >= processFrames) {
+        //First pass
+        for (int ch = 0; ch < channelCount; ch++) {
+            ChannelBuffer * pCb = &channelBuffers[ch];
+            //move tail of previous
+            std::copy(pCb->input.begin() + processFrames,
+                    pCb->input.end(),
+                    pCb->input.begin());
+
+            //read new available data
+            for (unsigned int k = 0; k < processFrames; k++) {
+                pCb->input[mOverlapSize + k] = pCb->cBInput.read();
+            }
+            //first stages: fft, preEq, mbc, postEq and start of Limiter
+            processedSamples += processFirstStages(*pCb);
+        }
+
+        //**compute linked limiters and update levels if needed
+        processLinkedLimiters(channelBuffers);
+
+        //final pass.
+        for (int ch = 0; ch < channelCount; ch++) {
+            ChannelBuffer * pCb = &channelBuffers[ch];
+
+            //linked limiter and ifft
+            processLastStages(*pCb);
+
+            //mix tail (and capture new tail
+            for (unsigned int k = 0; k < mOverlapSize; k++) {
+                pCb->output[k] += pCb->outTail[k];
+                pCb->outTail[k] = pCb->output[processFrames + k]; //new tail
+            }
+
+            //output data
+            for (unsigned int k = 0; k < processFrames; k++) {
+                pCb->cBOutput.write(pCb->output[k]);
+            }
+        }
+        available -= processFrames;
+    }
     return processedSamples;
 }
-
-size_t DPFrequency::processOneVector(FloatVec & output, FloatVec & input,
-        ChannelBuffer &cb) {
+size_t DPFrequency::processFirstStages(ChannelBuffer &cb) {
 
     //##apply window
     Eigen::Map<Eigen::VectorXf> eWindow(&mVWindow[0], mVWindow.size());
-    Eigen::Map<Eigen::VectorXf> eInput(&input[0], input.size());
+    Eigen::Map<Eigen::VectorXf> eInput(&cb.input[0], cb.input.size());
 
     Eigen::VectorXf eWin = eInput.cwiseProduct(eWindow); //apply window
 
-    //##fft //TODO: refactor frequency transformations away from other stages.
-    mFftServer.fwd(mComplexTemp, eWin);
+    //##fft
+    //Note: we are using eigen with the default scaling, which ensures that
+    //  IFFT( FFT(x) ) = x.
+    // TODO: optimize by using the noscale option, and compensate with dB scale offsets
+    mFftServer.fwd(cb.complexTemp, eWin);
 
-    size_t cSize = mComplexTemp.size();
+    size_t cSize = cb.complexTemp.size();
     size_t maxBin = std::min(cSize/2, mHalfFFTSize);
 
     //== EqPre (always runs)
     for (size_t k = 0; k < maxBin; k++) {
-        mComplexTemp[k] *= cb.mPreEqFactorVector[k];
+        cb.complexTemp[k] *= cb.mPreEqFactorVector[k];
     }
 
     //== MBC
@@ -439,62 +510,68 @@
             float preGainSquared = preGainFactor * preGainFactor;
 
             for (size_t k = pMbcBandParams->binStart; k <= pMbcBandParams->binStop; k++) {
-                float fReal = mComplexTemp[k].real();
-                float fImag = mComplexTemp[k].imag();
-                float fSquare = (fReal * fReal + fImag * fImag) * preGainSquared;
-
-                fEnergySum += fSquare;
+                fEnergySum += std::norm(cb.complexTemp[k]) * preGainSquared; //mag squared
             }
 
-            fEnergySum = sqrt(fEnergySum /2.0);
+            //Eigen FFT is full spectrum, even if the source was real data.
+            // Each half spectrum has half the energy. This is taken into account with the * 2
+            // factor in the energy computations.
+            // energy = sqrt(sum_components_squared) number_points
+            // in here, the fEnergySum is duplicated to account for the second half spectrum,
+            // and the windowRms is used to normalize by the expected energy reduction
+            // caused by the window used (expected for steady state signals)
+            fEnergySum = sqrt(fEnergySum * 2) / (mBlockSize * mWindowRms);
+
+            // updates computed per frame advance.
             float fTheta = 0.0;
-            float fFAtt = pMbcBandParams->attackTimeMs;
-            float fFRel = pMbcBandParams->releaseTimeMs;
-
-            float fUpdatesPerSecond = 10; //TODO: compute from framerate
-
+            float fFAttSec = pMbcBandParams->attackTimeMs / 1000; //in seconds
+            float fFRelSec = pMbcBandParams->releaseTimeMs / 1000; //in seconds
 
             if (fEnergySum > pMbcBandParams->previousEnvelope) {
-                fTheta = exp(-1.0 / (fFAtt * fUpdatesPerSecond));
+                fTheta = exp(-1.0 / (fFAttSec * mBlocksPerSecond));
             } else {
-                fTheta = exp(-1.0 / (fFRel * fUpdatesPerSecond));
+                fTheta = exp(-1.0 / (fFRelSec * mBlocksPerSecond));
             }
 
             float fEnv = (1.0 - fTheta) * fEnergySum + fTheta * pMbcBandParams->previousEnvelope;
-
             //preserve for next iteration
             pMbcBandParams->previousEnvelope = fEnv;
 
-            float fThreshold = dBtoLinear(pMbcBandParams->thresholdDb);
-            float fNoiseGateThreshold = dBtoLinear(pMbcBandParams->noiseGateThresholdDb);
-
-            float fNewFactor = 1.0;
-
-            if (fEnv > fThreshold) {
-                float fDbAbove = linearToDb(fThreshold / fEnv);
-                float fDbTarget = fDbAbove / pMbcBandParams->ratio;
-                float fDbChange = fDbAbove - fDbTarget;
-                fNewFactor = dBtoLinear(fDbChange);
-            } else if (fEnv < fNoiseGateThreshold) {
-                if (fEnv < MIN_ENVELOPE) {
-                    fEnv = MIN_ENVELOPE;
-                }
-                float fDbBelow = linearToDb(fNoiseGateThreshold / fEnv);
-                float fDbTarget = fDbBelow / pMbcBandParams->expanderRatio;
-                float fDbChange = fDbBelow - fDbTarget;
-                fNewFactor = dBtoLinear(fDbChange);
+            if (fEnv < MIN_ENVELOPE) {
+                fEnv = MIN_ENVELOPE;
             }
+            const float envDb = linearToDb(fEnv);
+            float newLevelDb = envDb;
+            //using shorter variables for code clarity
+            const float thresholdDb = pMbcBandParams->thresholdDb;
+            const float ratio = pMbcBandParams->ratio;
+            const float kneeWidthDbHalf = pMbcBandParams->kneeWidthDb / 2;
+            const float noiseGateThresholdDb = pMbcBandParams->noiseGateThresholdDb;
+            const float expanderRatio = pMbcBandParams->expanderRatio;
+
+            //find segment
+            if (envDb > thresholdDb + kneeWidthDbHalf) {
+                //compression segment
+                newLevelDb = envDb + ((1 / ratio) - 1) * (envDb - thresholdDb);
+            } else if (envDb > thresholdDb - kneeWidthDbHalf) {
+                //knee-compression segment
+                float temp = (envDb - thresholdDb + kneeWidthDbHalf);
+                newLevelDb = envDb + ((1 / ratio) - 1) *
+                        temp * temp / (kneeWidthDbHalf * 4);
+            } else if (envDb < noiseGateThresholdDb) {
+                //expander segment
+                newLevelDb = noiseGateThresholdDb -
+                        expanderRatio * (noiseGateThresholdDb - envDb);
+            }
+
+            float newFactor = dBtoLinear(newLevelDb - envDb);
 
             //apply post gain.
-            fNewFactor *= dBtoLinear(pMbcBandParams->gainPostDb);
-
-            if (fNewFactor < 0) {
-                fNewFactor = 0;
-            }
+            newFactor *= dBtoLinear(pMbcBandParams->gainPostDb);
 
             //apply to this band
             for (size_t k = pMbcBandParams->binStart; k <= pMbcBandParams->binStop; k++) {
-                mComplexTemp[k] *= fNewFactor;
+                cb.complexTemp[k] *= newFactor;
             }
 
         } //end per band process
@@ -504,14 +581,94 @@
     //== EqPost
     if (cb.mPostEqInUse && cb.mPostEqEnabled) {
         for (size_t k = 0; k < maxBin; k++) {
-            mComplexTemp[k] *= cb.mPostEqFactorVector[k];
+            cb.complexTemp[k] *= cb.mPostEqFactorVector[k];
+        }
+    }
+
+    //== Limiter. First Pass
+    if (cb.mLimiterInUse && cb.mLimiterEnabled) {
+        float fEnergySum = 0;
+        for (size_t k = 0; k < maxBin; k++) {
+            fEnergySum += std::norm(cb.complexTemp[k]);
+        }
+
+        //see explanation above for energy computation logic
+        fEnergySum = sqrt(fEnergySum * 2) / (mBlockSize * mWindowRms);
+        float fTheta = 0.0;
+        float fFAttSec = cb.mLimiterParams.attackTimeMs / 1000; //in seconds
+        float fFRelSec = cb.mLimiterParams.releaseTimeMs / 1000; //in seconds
+
+        if (fEnergySum > cb.mLimiterParams.previousEnvelope) {
+            fTheta = exp(-1.0 / (fFAttSec * mBlocksPerSecond));
+        } else {
+            fTheta = exp(-1.0 / (fFRelSec * mBlocksPerSecond));
+        }
+
+        float fEnv = (1.0 - fTheta) * fEnergySum + fTheta * cb.mLimiterParams.previousEnvelope;
+        //preserve for next iteration
+        cb.mLimiterParams.previousEnvelope = fEnv;
+
+        const float envDb = linearToDb(fEnv);
+        float newFactorDb = 0;
+        //using shorter variables for code clarity
+        const float thresholdDb = cb.mLimiterParams.thresholdDb;
+        const float ratio = cb.mLimiterParams.ratio;
+
+        if (envDb > thresholdDb) {
+            //limiter segment
+            newFactorDb = ((1 / ratio) - 1) * (envDb - thresholdDb);
+        }
+
+        float newFactor = dBtoLinear(newFactorDb);
+
+        cb.mLimiterParams.newFactor = newFactor;
+
+    } //end Limiter
+    return mBlockSize;
+}
+
+void DPFrequency::processLinkedLimiters(CBufferVector &channelBuffers) {
+
+    const int channelCount = channelBuffers.size();
+    for (auto &groupPair : mLinkedLimiters.mGroupsMap) {
+        float minFactor = 1.0;
+        //estimate minfactor for all linked
+        for(int index : groupPair.second) {
+            if (index >= 0 && index < channelCount) {
+                minFactor = std::min(channelBuffers[index].mLimiterParams.newFactor, minFactor);
+            }
+        }
+        //apply minFactor
+        for(int index : groupPair.second) {
+            if (index >= 0 && index < channelCount) {
+                channelBuffers[index].mLimiterParams.linkFactor = minFactor;
+            }
+        }
+    }
+}
+
+size_t DPFrequency::processLastStages(ChannelBuffer &cb) {
+
+    float outputGainFactor = dBtoLinear(cb.outputGainDb);
+    //== Limiter. last Pass
+    if (cb.mLimiterInUse && cb.mLimiterEnabled) {
+        //compute factor, with post-gain
+        float factor = cb.mLimiterParams.linkFactor * dBtoLinear(cb.mLimiterParams.postGainDb);
+        outputGainFactor *= factor;
+    }
+
+    //apply to all if != 1.0
+    if (!compareEquality(outputGainFactor, 1.0f)) {
+        size_t cSize = cb.complexTemp.size();
+        size_t maxBin = std::min(cSize/2, mHalfFFTSize);
+        for (size_t k = 0; k < maxBin; k++) {
+            cb.complexTemp[k] *= outputGainFactor;
         }
     }
 
     //##ifft directly to output.
-    Eigen::Map<Eigen::VectorXf> eOutput(&output[0], output.size());
-    mFftServer.inv(eOutput, mComplexTemp);
-
+    Eigen::Map<Eigen::VectorXf> eOutput(&cb.output[0], cb.output.size());
+    mFftServer.inv(eOutput, cb.complexTemp);
     return mBlockSize;
 }
 
diff --git a/media/libeffects/dynamicsproc/dsp/DPFrequency.h b/media/libeffects/dynamicsproc/dsp/DPFrequency.h
index 9919142..be8771d 100644
--- a/media/libeffects/dynamicsproc/dsp/DPFrequency.h
+++ b/media/libeffects/dynamicsproc/dsp/DPFrequency.h
@@ -39,8 +39,11 @@
     FloatVec output;    // time domain temp vector for output
     FloatVec outTail;   // time domain temp vector for output tail (for overlap-add method)
 
+    Eigen::VectorXcf complexTemp; // complex temp vector for frequency domain operations
+
     //Current parameters
     float inputGainDb;
+    float outputGainDb;
     struct BandParams {
         bool enabled;
         float freqCutoffHz;
@@ -64,6 +67,19 @@
         //Historic values
         float previousEnvelope;
     };
+    struct LimiterParams {
+        int32_t linkGroup;
+        float attackTimeMs;
+        float releaseTimeMs;
+        float ratio;
+        float thresholdDb;
+        float postGainDb;
+
+        //Historic values
+        float previousEnvelope;
+        float newFactor;
+        float linkFactor;
+    };
 
     bool mPreEqInUse;
     bool mPreEqEnabled;
@@ -79,6 +95,7 @@
 
     bool mLimiterInUse;
     bool mLimiterEnabled;
+    LimiterParams mLimiterParams;
     FloatVec mPreEqFactorVector; // temp pre-computed vector to shape spectrum at preEQ stage
     FloatVec mPostEqFactorVector; // temp pre-computed vector to shape spectrum at postEQ stage
 
@@ -91,6 +108,18 @@
 
 };
 
+using CBufferVector = std::vector<ChannelBuffer>;
+
+using GroupsMap = std::map<int32_t, IntVec>;
+
+class LinkedLimiters {
+public:
+    void reset();
+    void update(int32_t group, int index);
+    void remove(int index);
+    GroupsMap mGroupsMap;
+};
+
 class DPFrequency : public DPBase {
 public:
     virtual size_t processSamples(const float *in, float *out, size_t samples);
@@ -104,16 +133,25 @@
     size_t processMono(ChannelBuffer &cb);
     size_t processOneVector(FloatVec &output, FloatVec &input, ChannelBuffer &cb);
 
+    size_t processChannelBuffers(CBufferVector &channelBuffers);
+    size_t processFirstStages(ChannelBuffer &cb);
+    size_t processLastStages(ChannelBuffer &cb);
+    void processLinkedLimiters(CBufferVector &channelBuffers);
+
     size_t mBlockSize;
     size_t mHalfFFTSize;
     size_t mOverlapSize;
     size_t mSamplingRate;
 
-    std::vector<ChannelBuffer> mChannelBuffers;
+    float mBlocksPerSecond;
+
+    CBufferVector mChannelBuffers;
+
+    LinkedLimiters mLinkedLimiters;
 
     //dsp
     FloatVec mVWindow;  //window class.
-    Eigen::VectorXcf mComplexTemp;
+    float mWindowRms;
     Eigen::FFT<float> mFftServer;
 };
 
diff --git a/media/libeffects/dynamicsproc/dsp/RDsp.h b/media/libeffects/dynamicsproc/dsp/RDsp.h
index 1048442..cfa1305 100644
--- a/media/libeffects/dynamicsproc/dsp/RDsp.h
+++ b/media/libeffects/dynamicsproc/dsp/RDsp.h
@@ -20,10 +20,25 @@
 #include <complex>
 #include <log/log.h>
 #include <vector>
+#include <map>
 using FloatVec = std::vector<float>;
+using IntVec = std::vector<int>;
 using ComplexVec  = std::vector<std::complex<float>>;
 
 // =======
+// Helper Functions
+// =======
+template <class T>
+static T dBtoLinear(T valueDb) {
+    return pow (10, valueDb / 20.0);
+}
+
+template <class T>
+static T linearToDb(T value) {
+    return 20 * log10(value);
+}
+
+// =======
 // DSP window creation
 // =======
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 9d9ac8c..1a1d6b3 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -203,6 +203,7 @@
     ],
 
     shared_libs: [
+        "android.hidl.token@1.0-utils",
         "liblog",
         "libcutils",
         "libutils",
diff --git a/media/libmedia/AudioParameter.cpp b/media/libmedia/AudioParameter.cpp
index cb0e927..034f7c2 100644
--- a/media/libmedia/AudioParameter.cpp
+++ b/media/libmedia/AudioParameter.cpp
@@ -47,6 +47,8 @@
 const char * const AudioParameter::valueOn = AUDIO_PARAMETER_VALUE_ON;
 const char * const AudioParameter::valueOff = AUDIO_PARAMETER_VALUE_OFF;
 const char * const AudioParameter::valueListSeparator = AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+const char * const AudioParameter::keyReconfigA2dp = AUDIO_PARAMETER_RECONFIG_A2DP;
+const char * const AudioParameter::keyReconfigA2dpSupported = AUDIO_PARAMETER_A2DP_RECONFIG_SUPPORTED;
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
diff --git a/media/libmediaextractor/include/media/stagefright/MetaDataBase.h b/media/libmediaextractor/include/media/stagefright/MetaDataBase.h
index 8410e7b..dfe34e8 100644
--- a/media/libmediaextractor/include/media/stagefright/MetaDataBase.h
+++ b/media/libmediaextractor/include/media/stagefright/MetaDataBase.h
@@ -70,6 +70,7 @@
     kKeyWantsNALFragments = 'NALf',
     kKeyIsSyncFrame       = 'sync',  // int32_t (bool)
     kKeyIsCodecConfig     = 'conf',  // int32_t (bool)
+    kKeyIsMuxerData       = 'muxd',  // int32_t (bool)
     kKeyTime              = 'time',  // int64_t (usecs)
     kKeyDecodingTime      = 'decT',  // int64_t (decoding timestamp in usecs)
     kKeyNTPTime           = 'ntpT',  // uint64_t (ntp-timestamp)
@@ -223,6 +224,7 @@
     kKeyFrameCount       = 'nfrm', // int32_t, total number of frame in video track
     kKeyExifOffset       = 'exof', // int64_t, Exif data offset
     kKeyExifSize         = 'exsz', // int64_t, Exif data size
+    kKeyIsExif           = 'exif', // bool (int32_t) buffer contains exif data block
 };
 
 enum {
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index cbc3015..23d66bb 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -934,7 +934,11 @@
     sp<MetaData> meta = mSources.itemAt(trackIndex)->getFormat();
     if (meta == NULL) {
         ALOGE("no metadata for track %zu", trackIndex);
-        return NULL;
+        format->setInt32("type", MEDIA_TRACK_TYPE_UNKNOWN);
+        format->setString("mime", "application/octet-stream");
+        format->setString("language", "und");
+
+        return format;
     }
 
     const char *mime;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 0a1bdfe..a5f5fc6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1447,6 +1447,29 @@
             break;
         }
 
+        case kWhatGetStats:
+        {
+            ALOGV("kWhatGetStats");
+
+            Vector<sp<AMessage>> *trackStats;
+            CHECK(msg->findPointer("trackstats", (void**)&trackStats));
+
+            trackStats->clear();
+            if (mVideoDecoder != NULL) {
+                trackStats->push_back(mVideoDecoder->getStats());
+            }
+            if (mAudioDecoder != NULL) {
+                trackStats->push_back(mAudioDecoder->getStats());
+            }
+
+            // respond for synchronization
+            sp<AMessage> response = new AMessage;
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
         default:
             TRESPASS();
             break;
@@ -2210,16 +2233,16 @@
     return renderer->getCurrentPosition(mediaUs);
 }
 
-void NuPlayer::getStats(Vector<sp<AMessage> > *mTrackStats) {
-    CHECK(mTrackStats != NULL);
+void NuPlayer::getStats(Vector<sp<AMessage> > *trackStats) {
+    CHECK(trackStats != NULL);
 
-    mTrackStats->clear();
-    if (mVideoDecoder != NULL) {
-        mTrackStats->push_back(mVideoDecoder->getStats());
-    }
-    if (mAudioDecoder != NULL) {
-        mTrackStats->push_back(mAudioDecoder->getStats());
-    }
+    ALOGV("NuPlayer::getStats()");
+    sp<AMessage> msg = new AMessage(kWhatGetStats, this);
+    msg->setPointer("trackstats", trackStats);
+
+    sp<AMessage> response;
+    (void) msg->postAndAwaitResponse(&response);
+    // response is for synchronization, ignore contents
 }
 
 sp<MetaData> NuPlayer::getFileMeta() {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 3a7ef4e..e400d16 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -88,7 +88,7 @@
     status_t getSelectedTrack(int32_t type, Parcel* reply) const;
     status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
     status_t getCurrentPosition(int64_t *mediaUs);
-    void getStats(Vector<sp<AMessage> > *mTrackStats);
+    void getStats(Vector<sp<AMessage> > *trackStats);
 
     sp<MetaData> getFileMeta();
     float getFrameRate();
@@ -159,6 +159,7 @@
         kWhatPrepareDrm                 = 'pDrm',
         kWhatReleaseDrm                 = 'rDrm',
         kWhatMediaClockNotify           = 'mckN',
+        kWhatGetStats                   = 'gSts',
     };
 
     wp<NuPlayerDriver> mDriver;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
index 0402fca..fb12360 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
@@ -155,7 +155,9 @@
             break;
         default:
             ALOGE("Unknown track type: %d", track.mTrackType);
-            return NULL;
+            format->setInt32("type", MEDIA_TRACK_TYPE_UNKNOWN);
+            format->setString("mime", "application/octet-stream");
+            return format;
     }
 
     // For CEA-608 CC1, field 0 channel 0
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 2a08f62..69cd82e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -745,6 +745,7 @@
     sp<AMessage> reply = new AMessage(kWhatRenderBuffer, this);
     reply->setSize("buffer-ix", index);
     reply->setInt32("generation", mBufferGeneration);
+    reply->setSize("size", size);
 
     if (eos) {
         ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
@@ -1127,6 +1128,7 @@
     int32_t render;
     size_t bufferIx;
     int32_t eos;
+    size_t size;
     CHECK(msg->findSize("buffer-ix", &bufferIx));
 
     if (!mIsAudio) {
@@ -1146,7 +1148,10 @@
         CHECK(msg->findInt64("timestampNs", &timestampNs));
         err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
     } else {
-        mNumOutputFramesDropped += !mIsAudio;
+        if (!msg->findInt32("eos", &eos) || !eos ||
+                !msg->findSize("size", &size) || size) {
+            mNumOutputFramesDropped += !mIsAudio;
+        }
         err = mCodec->releaseOutputBuffer(bufferIx);
     }
     if (err != OK) {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2abea9e..7f39d10 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -567,7 +567,7 @@
       mMetadataBuffersToSubmit(0),
       mNumUndequeuedBuffers(0),
       mRepeatFrameDelayUs(-1ll),
-      mMaxPtsGapUs(-1ll),
+      mMaxPtsGapUs(0ll),
       mMaxFps(-1),
       mFps(-1.0),
       mCaptureFps(-1.0),
@@ -1823,16 +1823,21 @@
 
         // only allow 32-bit value, since we pass it as U32 to OMX.
         if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
-            mMaxPtsGapUs = -1ll;
-        } else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < 0) {
+            mMaxPtsGapUs = 0ll;
+        } else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < INT32_MIN) {
             ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs);
-            mMaxPtsGapUs = -1ll;
+            mMaxPtsGapUs = 0ll;
         }
 
         if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
             mMaxFps = -1;
         }
 
+        // notify GraphicBufferSource to allow backward frames
+        if (mMaxPtsGapUs < 0ll) {
+            mMaxFps = -1;
+        }
+
         if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) {
             mCaptureFps = -1.0;
         }
@@ -5322,13 +5327,13 @@
     convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
 
     // if some aspects are unspecified, use dataspace fields
-    if (range != 0) {
+    if (range == 0) {
         range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
     }
-    if (standard != 0) {
+    if (standard == 0) {
         standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
     }
-    if (transfer != 0) {
+    if (transfer == 0) {
         transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
     }
 
@@ -6686,11 +6691,11 @@
         }
     }
 
-    if (mCodec->mMaxPtsGapUs > 0ll) {
+    if (mCodec->mMaxPtsGapUs != 0ll) {
         OMX_PARAM_U32TYPE maxPtsGapParams;
         InitOMXParams(&maxPtsGapParams);
         maxPtsGapParams.nPortIndex = kPortIndexInput;
-        maxPtsGapParams.nU32 = (uint32_t) mCodec->mMaxPtsGapUs;
+        maxPtsGapParams.nU32 = (uint32_t)mCodec->mMaxPtsGapUs;
 
         err = mCodec->mOMXNode->setParameter(
                 (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl,
@@ -6703,7 +6708,7 @@
         }
     }
 
-    if (mCodec->mMaxFps > 0) {
+    if (mCodec->mMaxFps > 0 || mCodec->mMaxPtsGapUs < 0) {
         err = statusFromBinderStatus(
                 mCodec->mGraphicBufferSource->setMaxFps(mCodec->mMaxFps));
 
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 550a99c..6ff3d78 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -83,6 +83,9 @@
 static const char kMetaKey_TemporalLayerCount[] = "com.android.video.temporal_layers_count";
 
 static const int kTimestampDebugCount = 10;
+static const int kItemIdBase = 10000;
+static const char kExifHeader[] = {'E', 'x', 'i', 'f', '\0', '\0'};
+static const int32_t kTiffHeaderOffset = htonl(sizeof(kExifHeader));
 
 static const uint8_t kMandatoryHevcNalUnitTypes[3] = {
     kHevcNalUnitTypeVps,
@@ -112,7 +115,7 @@
 
     int64_t getDurationUs() const;
     int64_t getEstimatedTrackSizeBytes() const;
-    int32_t getMetaSizeIncrease() const;
+    int32_t getMetaSizeIncrease(int32_t angle, int32_t trackCount) const;
     void writeTrackHeader(bool use32BitOffset = true);
     int64_t getMinCttsOffsetTimeUs();
     void bufferChunk(int64_t timestampUs);
@@ -122,8 +125,10 @@
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
     bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
+    bool isExifData(const MediaBufferBase *buffer) const;
     void addChunkOffset(off64_t offset);
-    void addItemOffsetAndSize(off64_t offset, size_t size);
+    void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
+    void flushItemRefs();
     int32_t getTrackId() const { return mTrackId; }
     status_t dump(int fd, const Vector<String16>& args) const;
     static const char *getFourCCForMime(const char *mime);
@@ -355,7 +360,9 @@
     int32_t mRotation;
 
     Vector<uint16_t> mProperties;
-    Vector<uint16_t> mDimgRefs;
+    ItemRefs mDimgRefs;
+    ItemRefs mCdscRefs;
+    uint16_t mImageItemId;
     int32_t mIsPrimary;
     int32_t mWidth, mHeight;
     int32_t mTileWidth, mTileHeight;
@@ -499,6 +506,7 @@
     mPrimaryItemId = 0;
     mAssociationEntryCount = 0;
     mNumGrids = 0;
+    mHasRefs = false;
 
     // Following variables only need to be set for the first recording session.
     // And they will stay the same for all the recording sessions.
@@ -680,7 +688,12 @@
 #endif
 }
 
-int64_t MPEG4Writer::estimateFileLevelMetaSize() {
+int64_t MPEG4Writer::estimateFileLevelMetaSize(MetaData *params) {
+    int32_t rotation;
+    if (!params || !params->findInt32(kKeyRotation, &rotation)) {
+        rotation = 0;
+    }
+
     // base meta size
     int64_t metaSize =     12  // meta fullbox header
                          + 33  // hdlr box
@@ -695,7 +708,7 @@
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         if ((*it)->isHeic()) {
-            metaSize += (*it)->getMetaSizeIncrease();
+            metaSize += (*it)->getMetaSizeIncrease(rotation, mTracks.size());
         }
     }
 
@@ -900,7 +913,7 @@
     if (mInMemoryCacheSize == 0) {
         int32_t bitRate = -1;
         if (mHasFileLevelMeta) {
-            mInMemoryCacheSize += estimateFileLevelMetaSize();
+            mInMemoryCacheSize += estimateFileLevelMetaSize(param);
         }
         if (mHasMoovBox) {
             if (param) {
@@ -1344,12 +1357,17 @@
 }
 
 off64_t MPEG4Writer::addSample_l(
-        MediaBuffer *buffer, bool usePrefix, size_t *bytesWritten) {
+        MediaBuffer *buffer, bool usePrefix, bool isExif, size_t *bytesWritten) {
     off64_t old_offset = mOffset;
 
     if (usePrefix) {
         addMultipleLengthPrefixedSamples_l(buffer);
     } else {
+        if (isExif) {
+            ::write(mFd, &kTiffHeaderOffset, 4); // exif_tiff_header_offset field
+            mOffset += 4;
+        }
+
         ::write(mFd,
               (const uint8_t *)buffer->data() + buffer->range_offset(),
               buffer->range_length());
@@ -1767,6 +1785,9 @@
       mReachedEOS(false),
       mStartTimestampUs(-1),
       mRotation(0),
+      mDimgRefs("dimg"),
+      mCdscRefs("cdsc"),
+      mImageItemId(0),
       mIsPrimary(0),
       mWidth(0),
       mHeight(0),
@@ -1933,6 +1954,13 @@
     return OK;
 }
 
+bool MPEG4Writer::Track::isExifData(const MediaBufferBase *buffer) const {
+    return mIsHeic
+            && (buffer->range_length() > sizeof(kExifHeader))
+            && !memcmp((uint8_t *)buffer->data() + buffer->range_offset(),
+                    kExifHeader, sizeof(kExifHeader));
+}
+
 void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
     CHECK(!mIsHeic);
     if (mOwner->use32BitFileOffset()) {
@@ -1943,7 +1971,7 @@
     }
 }
 
-void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size) {
+void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
     CHECK(mIsHeic);
 
     if (offset > UINT32_MAX || size > UINT32_MAX) {
@@ -1954,6 +1982,18 @@
     if (mIsMalformed) {
         return;
     }
+
+    if (isExif) {
+         mCdscRefs.value.push_back(mOwner->addItem_l({
+            .itemType = "Exif",
+            .isPrimary = false,
+            .isHidden = false,
+            .offset = (uint32_t)offset,
+            .size = (uint32_t)size,
+        }));
+        return;
+    }
+
     if (mTileIndex >= mNumTiles) {
         ALOGW("Ignoring excess tiles!");
         return;
@@ -1968,7 +2008,7 @@
         default: break; // don't set if invalid
     }
 
-    bool hasGrid = (mNumTiles > 1);
+    bool hasGrid = (mTileWidth > 0);
 
     if (mProperties.empty()) {
         mProperties.push_back(mOwner->addProperty_l({
@@ -1990,18 +2030,16 @@
         }
     }
 
-    uint16_t itemId = mOwner->addItem_l({
-        .itemType = "hvc1",
-        .isPrimary = hasGrid ? false : (mIsPrimary != 0),
-        .isHidden = hasGrid,
-        .offset = (uint32_t)offset,
-        .size = (uint32_t)size,
-        .properties = mProperties,
-    });
-
     mTileIndex++;
     if (hasGrid) {
-        mDimgRefs.push_back(itemId);
+        mDimgRefs.value.push_back(mOwner->addItem_l({
+            .itemType = "hvc1",
+            .isPrimary = false,
+            .isHidden = true,
+            .offset = (uint32_t)offset,
+            .size = (uint32_t)size,
+            .properties = mProperties,
+        }));
 
         if (mTileIndex == mNumTiles) {
             mProperties.clear();
@@ -2016,7 +2054,7 @@
                     .rotation = heifRotation,
                 }));
             }
-            mOwner->addItem_l({
+            mImageItemId = mOwner->addItem_l({
                 .itemType = "grid",
                 .isPrimary = (mIsPrimary != 0),
                 .isHidden = false,
@@ -2025,9 +2063,31 @@
                 .width = (uint32_t)mWidth,
                 .height = (uint32_t)mHeight,
                 .properties = mProperties,
-                .dimgRefs = mDimgRefs,
             });
         }
+    } else {
+        mImageItemId = mOwner->addItem_l({
+            .itemType = "hvc1",
+            .isPrimary = (mIsPrimary != 0),
+            .isHidden = false,
+            .offset = (uint32_t)offset,
+            .size = (uint32_t)size,
+            .properties = mProperties,
+        });
+    }
+}
+
+// Flush out the item refs for this track. Note that it must be called after the
+// writer thread has stopped, because there might be pending items in the last
+// few chunks written by the writer thread (as opposed to the track). In particular,
+// it affects the 'dimg' refs for tiled image, as we only have the refs after the
+// last tile sample is written.
+void MPEG4Writer::Track::flushItemRefs() {
+    CHECK(mIsHeic);
+
+    if (mImageItemId > 0) {
+        mOwner->addRefs_l(mImageItemId, mDimgRefs);
+        mOwner->addRefs_l(mImageItemId, mCdscRefs);
     }
 }
 
@@ -2174,15 +2234,20 @@
         chunk->mTimeStampUs, chunk->mTrack->getTrackType());
 
     int32_t isFirstSample = true;
-    bool usePrefix = chunk->mTrack->usePrefix();
     while (!chunk->mSamples.empty()) {
         List<MediaBuffer *>::iterator it = chunk->mSamples.begin();
 
+        int32_t isExif;
+        if (!(*it)->meta_data().findInt32(kKeyIsExif, &isExif)) {
+            isExif = 0;
+        }
+        bool usePrefix = chunk->mTrack->usePrefix() && !isExif;
+
         size_t bytesWritten;
-        off64_t offset = addSample_l(*it, usePrefix, &bytesWritten);
+        off64_t offset = addSample_l(*it, usePrefix, isExif, &bytesWritten);
 
         if (chunk->mTrack->isHeic()) {
-            chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten);
+            chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten, isExif);
         } else if (isFirstSample) {
             chunk->mTrack->addChunkOffset(offset);
             isFirstSample = false;
@@ -2904,6 +2969,19 @@
             break;
         }
 
+        bool isExif = false;
+        int32_t isMuxerData;
+        if (buffer->meta_data().findInt32(kKeyIsMuxerData, &isMuxerData) && isMuxerData) {
+            // We only support one type of muxer data, which is Exif data block.
+            isExif = isExifData(buffer);
+            if (!isExif) {
+                ALOGW("Ignoring bad Exif data block");
+                buffer->release();
+                buffer = NULL;
+                continue;
+            }
+        }
+
         ++nActualFrames;
 
         // Make a deep copy of the MediaBuffer and Metadata and release
@@ -2916,10 +2994,15 @@
         buffer->release();
         buffer = NULL;
 
-        if (usePrefix()) StripStartcode(copy);
+        if (isExif) {
+            copy->meta_data().setInt32(kKeyIsExif, 1);
+        }
+        bool usePrefix = this->usePrefix() && !isExif;
+
+        if (usePrefix) StripStartcode(copy);
 
         size_t sampleSize = copy->range_length();
-        if (usePrefix()) {
+        if (usePrefix) {
             if (mOwner->useNalLengthFour()) {
                 sampleSize += 4;
             } else {
@@ -3185,10 +3268,10 @@
         }
         if (!hasMultipleTracks) {
             size_t bytesWritten;
-            off64_t offset = mOwner->addSample_l(copy, usePrefix(), &bytesWritten);
+            off64_t offset = mOwner->addSample_l(copy, usePrefix, isExif, &bytesWritten);
 
             if (mIsHeic) {
-                addItemOffsetAndSize(offset, bytesWritten);
+                addItemOffsetAndSize(offset, bytesWritten, isExif);
             } else {
                 uint32_t count = (mOwner->use32BitFileOffset()
                             ? mStcoTableEntries->count()
@@ -3450,10 +3533,12 @@
     return mEstimatedTrackSizeBytes;
 }
 
-int32_t MPEG4Writer::Track::getMetaSizeIncrease() const {
+int32_t MPEG4Writer::Track::getMetaSizeIncrease(
+        int32_t angle, int32_t trackCount) const {
     CHECK(mIsHeic);
 
-    int32_t grid = (mNumTiles > 1);
+    int32_t grid = (mTileWidth > 0);
+    int32_t rotate = (angle > 0);
 
     // Note that the rotation angle is in the file meta, and we don't have
     // it until start, so here the calculation has to assume rotation.
@@ -3461,25 +3546,34 @@
     // increase to ipco
     int32_t increase = 20 * (grid + 1)              // 'ispe' property
                      + (8 + mCodecSpecificDataSize) // 'hvcC' property
-                     + 9;                           // 'irot' property (worst case)
+                     ;
+
+    if (rotate) {
+        increase += 9;                              // 'irot' property (worst case)
+    }
 
     // increase to iref and idat
     if (grid) {
-        increase += (8 + 2 + 2 + mNumTiles * 2)  // 'dimg' in iref
-                  + 12;                          // ImageGrid in 'idat' (worst case)
+        increase += (12 + mNumTiles * 2)            // 'dimg' in iref
+                  + 12;                             // ImageGrid in 'idat' (worst case)
     }
 
-    // increase to iloc, iinf and ipma
-    increase += (16             // increase to 'iloc'
-              + 21              // increase to 'iinf'
-              + (3 + 2 * 2))    // increase to 'ipma' (worst case, 2 props x 2 bytes)
-              * (mNumTiles + grid);
+    increase += (12 + 2);                           // 'cdsc' in iref
 
-    // adjust to ipma:
-    // if rotation is present and only one tile, it could ref 3 properties
-    if (!grid) {
-        increase += 2;
-    }
+    // increase to iloc, iinf
+    increase += (16                                 // increase to 'iloc'
+              + 21)                                 // increase to 'iinf'
+              * (mNumTiles + grid + 1);             // "+1" is for 'Exif'
+
+    // When total # of properties is > 127, the properties id becomes 2-byte.
+    // We write 4 properties at most for each image (2x'ispe', 1x'hvcC', 1x'irot').
+    // Set the threshold to be 30.
+    int32_t propBytes = trackCount > 30 ? 2 : 1;
+
+    // increase to ipma
+    increase += (3 + 2 * propBytes) * mNumTiles     // 'ispe' + 'hvcC'
+             + grid * (3 + propBytes)               // 'ispe' for grid
+             + rotate * propBytes;                  // 'irot' (either on grid or tile)
 
     return increase;
 }
@@ -4239,7 +4333,7 @@
     writeInt16((uint16_t)itemCount);
     for (size_t i = 0; i < itemCount; i++) {
         writeInfeBox(mItems[i].itemId, mItems[i].itemType,
-                mItems[i].isHidden ? 1 : 0);
+                (mItems[i].isImage() && mItems[i].isHidden) ? 1 : 0);
     }
 
     endBox();
@@ -4274,21 +4368,21 @@
     writeInt32(0);          // Version = 0, Flags = 0
     {
         for (size_t i = 0; i < mItems.size(); i++) {
-            if (!mItems[i].isGrid()) {
-                continue;
+            for (size_t r = 0; r < mItems[i].refsList.size(); r++) {
+                const ItemRefs &refs = mItems[i].refsList[r];
+                beginBox(refs.key);
+                writeInt16(mItems[i].itemId);
+                size_t refCount = refs.value.size();
+                if (refCount > 65535) {
+                    ALOGW("too many entries in %s", refs.key);
+                    refCount = 65535;
+                }
+                writeInt16((uint16_t)refCount);
+                for (size_t refIndex = 0; refIndex < refCount; refIndex++) {
+                    writeInt16(refs.value[refIndex]);
+                }
+                endBox();
             }
-            beginBox("dimg");
-            writeInt16(mItems[i].itemId);
-            size_t refCount = mItems[i].dimgRefs.size();
-            if (refCount > 65535) {
-                ALOGW("too many entries in dimg");
-                refCount = 65535;
-            }
-            writeInt16((uint16_t)refCount);
-            for (size_t refIndex = 0; refIndex < refCount; refIndex++) {
-                writeInt16(mItems[i].dimgRefs[refIndex]);
-            }
-            endBox();
         }
     }
     endBox();
@@ -4384,32 +4478,45 @@
 }
 
 void MPEG4Writer::writeFileLevelMetaBox() {
-    if (mItems.empty()) {
-        ALOGE("no valid item was found");
-        return;
-    }
-
     // patch up the mPrimaryItemId and count items with prop associations
     uint16_t firstVisibleItemId = 0;
+    uint16_t firstImageItemId = 0;
     for (size_t index = 0; index < mItems.size(); index++) {
+        if (!mItems[index].isImage()) continue;
+
         if (mItems[index].isPrimary) {
             mPrimaryItemId = mItems[index].itemId;
-        } else if (!firstVisibleItemId && !mItems[index].isHidden) {
+        }
+        if (!firstImageItemId) {
+            firstImageItemId = mItems[index].itemId;
+        }
+        if (!firstVisibleItemId && !mItems[index].isHidden) {
             firstVisibleItemId = mItems[index].itemId;
         }
-
         if (!mItems[index].properties.empty()) {
             mAssociationEntryCount++;
         }
     }
 
+    if (!firstImageItemId) {
+        ALOGE("no valid image was found");
+        return;
+    }
+
     if (mPrimaryItemId == 0) {
         if (firstVisibleItemId > 0) {
-            ALOGW("didn't find primary, using first visible item");
+            ALOGW("didn't find primary, using first visible image");
             mPrimaryItemId = firstVisibleItemId;
         } else {
-            ALOGW("no primary and no visible item, using first item");
-            mPrimaryItemId = mItems[0].itemId;
+            ALOGW("no primary and no visible item, using first image");
+            mPrimaryItemId = firstImageItemId;
+        }
+    }
+
+    for (List<Track *>::iterator it = mTracks.begin();
+        it != mTracks.end(); ++it) {
+        if ((*it)->isHeic()) {
+            (*it)->flushItemRefs();
         }
     }
 
@@ -4422,6 +4529,8 @@
     writeIprpBox();
     if (mNumGrids > 0) {
         writeIdatBox();
+    }
+    if (mHasRefs) {
         writeIrefBox();
     }
     endBox();
@@ -4445,8 +4554,8 @@
     size_t index = mItems.size();
     mItems.push_back(info);
 
-    // make the item id start at 10000
-    mItems.editItemAt(index).itemId = index + 10000;
+    // make the item id start at kItemIdBase
+    mItems.editItemAt(index).itemId = index + kItemIdBase;
 
 #if (LOG_NDEBUG==0)
     if (!info.properties.empty()) {
@@ -4464,6 +4573,20 @@
     return mItems[index].itemId;
 }
 
+void MPEG4Writer::addRefs_l(uint16_t itemId, const ItemRefs &refs) {
+    if (refs.value.empty()) {
+        return;
+    }
+    if (itemId < kItemIdBase) {
+        ALOGW("itemId shouldn't be smaller than kItemIdBase");
+        return;
+    }
+
+    size_t index = itemId - kItemIdBase;
+    mItems.editItemAt(index).refsList.push_back(refs);
+    mHasRefs = true;
+}
+
 /*
  * Geodata is stored according to ISO-6709 standard.
  */
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 06a49d0..72eff94 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -479,6 +479,13 @@
 
 // static
 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
+    // allow plugin to create surface
+    sp<PersistentSurface> pluginSurface =
+        StagefrightPluginLoader::GetCCodecInstance()->createInputSurface();
+    if (pluginSurface != nullptr) {
+        return pluginSurface;
+    }
+
     OMXClient client;
     if (client.connect() != OK) {
         ALOGE("Failed to connect to OMX to create persistent input surface.");
@@ -854,8 +861,7 @@
 
 //static
 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name) {
-    static bool ccodecEnabled = property_get_bool("debug.stagefright.ccodec", false);
-    if (ccodecEnabled && name.startsWithIgnoreCase("c2.")) {
+    if (name.startsWithIgnoreCase("c2.")) {
         return CreateCCodec();
     } else if (name.startsWithIgnoreCase("omx.")) {
         // at this time only ACodec specifies a mime type.
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 9a33168..eaff283 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -92,10 +92,15 @@
 }
 
 std::vector<MediaCodecListBuilderBase *> GetBuilders() {
-    std::vector<MediaCodecListBuilderBase *> builders {&sOmxInfoBuilder};
-    if (property_get_bool("debug.stagefright.ccodec", false)) {
-        builders.push_back(GetCodec2InfoBuilder());
+    std::vector<MediaCodecListBuilderBase *> builders;
+    // if plugin provides the input surface, we cannot use OMX video encoders.
+    // In this case, rely on plugin to provide list of OMX codecs that are usable.
+    sp<PersistentSurface> surfaceTest =
+        StagefrightPluginLoader::GetCCodecInstance()->createInputSurface();
+    if (surfaceTest == nullptr) {
+        builders.push_back(&sOmxInfoBuilder);
     }
+    builders.push_back(GetCodec2InfoBuilder());
     return builders;
 }
 
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 23e543d..98f59b5 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -190,6 +190,10 @@
         sampleMetaData.setInt32(kKeyIsSyncFrame, true);
     }
 
+    if (flags & MediaCodec::BUFFER_FLAG_MUXER_DATA) {
+        sampleMetaData.setInt32(kKeyIsMuxerData, 1);
+    }
+
     sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
     // This pushBuffer will wait until the mediaBuffer is consumed.
     return currentTrack->pushBuffer(mediaBuffer);
diff --git a/media/libstagefright/StagefrightPluginLoader.cpp b/media/libstagefright/StagefrightPluginLoader.cpp
index 7f13f87..519e870 100644
--- a/media/libstagefright/StagefrightPluginLoader.cpp
+++ b/media/libstagefright/StagefrightPluginLoader.cpp
@@ -44,6 +44,11 @@
     if (mCreateBuilder == nullptr) {
         ALOGD("Failed to find symbol: CreateBuilder (%s)", dlerror());
     }
+    mCreateInputSurface = (CodecBase::CreateInputSurfaceFunc)dlsym(
+            mLibHandle, "CreateInputSurface");
+    if (mCreateBuilder == nullptr) {
+        ALOGD("Failed to find symbol: CreateInputSurface (%s)", dlerror());
+    }
 }
 
 StagefrightPluginLoader::~StagefrightPluginLoader() {
@@ -69,6 +74,14 @@
     return mCreateBuilder();
 }
 
+PersistentSurface *StagefrightPluginLoader::createInputSurface() {
+    if (mLibHandle == nullptr || mCreateInputSurface == nullptr) {
+        ALOGD("Handle or CreateInputSurface symbol is null");
+        return nullptr;
+    }
+    return mCreateInputSurface();
+}
+
 //static
 const std::unique_ptr<StagefrightPluginLoader> &StagefrightPluginLoader::GetCCodecInstance() {
     Mutex::Autolock _l(sMutex);
diff --git a/media/libstagefright/StagefrightPluginLoader.h b/media/libstagefright/StagefrightPluginLoader.h
index 2746756..999d30c 100644
--- a/media/libstagefright/StagefrightPluginLoader.h
+++ b/media/libstagefright/StagefrightPluginLoader.h
@@ -20,6 +20,7 @@
 
 #include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecListWriter.h>
+#include <media/stagefright/PersistentSurface.h>
 #include <utils/Mutex.h>
 
 namespace android {
@@ -31,6 +32,8 @@
 
     CodecBase *createCodec();
     MediaCodecListBuilderBase *createBuilder();
+    PersistentSurface *createInputSurface();
+
 private:
     explicit StagefrightPluginLoader(const char *libPath);
 
@@ -40,6 +43,7 @@
     void *mLibHandle;
     CodecBase::CreateCodecFunc mCreateCodec;
     MediaCodecListBuilderBase::CreateBuilderFunc mCreateBuilder;
+    CodecBase::CreateInputSurfaceFunc mCreateInputSurface;
 };
 
 }  // namespace android
diff --git a/media/libstagefright/bqhelper/FrameDropper.cpp b/media/libstagefright/bqhelper/FrameDropper.cpp
index 7afe837..d2a2473 100644
--- a/media/libstagefright/bqhelper/FrameDropper.cpp
+++ b/media/libstagefright/bqhelper/FrameDropper.cpp
@@ -34,7 +34,12 @@
 }
 
 status_t FrameDropper::setMaxFrameRate(float maxFrameRate) {
-    if (maxFrameRate <= 0) {
+    if (maxFrameRate < 0) {
+        mMinIntervalUs = -1ll;
+        return OK;
+    }
+
+    if (maxFrameRate == 0) {
         ALOGE("framerate should be positive but got %f.", maxFrameRate);
         return BAD_VALUE;
     }
diff --git a/media/libstagefright/bqhelper/GraphicBufferSource.cpp b/media/libstagefright/bqhelper/GraphicBufferSource.cpp
index 68ae8ec..dd03d38 100644
--- a/media/libstagefright/bqhelper/GraphicBufferSource.cpp
+++ b/media/libstagefright/bqhelper/GraphicBufferSource.cpp
@@ -786,10 +786,16 @@
                 static_cast<long long>(mPrevFrameUs));
     } else {
         if (timeUs <= mPrevFrameUs) {
-            // Drop the frame if it's going backward in time. Bad timestamp
-            // could disrupt encoder's rate control completely.
-            ALOGW("Dropping frame that's going backward in time");
-            return false;
+            if (mFrameDropper != NULL && mFrameDropper->disabled()) {
+                // Warn only, client has disabled frame drop logic possibly for image
+                // encoding cases where camera's ZSL mode could send out of order frames.
+                ALOGW("Received frame that's going backward in time");
+            } else {
+                // Drop the frame if it's going backward in time. Bad timestamp
+                // could disrupt encoder's rate control completely.
+                ALOGW("Dropping frame that's going backward in time");
+                return false;
+            }
         }
 
         mPrevFrameUs = timeUs;
@@ -1110,6 +1116,7 @@
         mEndOfStream = false;
         mEndOfStreamSent = false;
         mSkipFramesBeforeNs = -1ll;
+        mFrameDropper.clear();
         mFrameRepeatIntervalUs = -1ll;
         mRepeatLastFrameGeneration = 0;
         mOutstandingFrameRepeatCount = 0;
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
index c5a6d4b..4e83059 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
+++ b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
@@ -35,6 +35,9 @@
     // Returns false if max frame rate has not been set via setMaxFrameRate.
     bool shouldDrop(int64_t timeUs);
 
+    // Returns true if all frame drop logic should be disabled.
+    bool disabled() { return (mMinIntervalUs == -1ll); }
+
 protected:
     virtual ~FrameDropper();
 
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
index 5af9556..abc8910 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
+++ b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
@@ -149,7 +149,21 @@
     // When set, the sample's timestamp will be adjusted with the timeOffsetUs.
     status_t setTimeOffsetUs(int64_t timeOffsetUs);
 
-    // When set, the max frame rate fed to the encoder will be capped at maxFps.
+    /*
+     * Set the maximum frame rate on the source.
+     *
+     * When maxFps is a positive number, it indicates the maximum rate at which
+     * the buffers from this source will be sent to the encoder. Excessive
+     * frames will be dropped to meet the frame rate requirement.
+     *
+     * When maxFps is a negative number, any frame drop logic will be disabled
+     * and all frames from this source will be sent to the encoder, even when
+     * the timestamp goes backwards. Note that some components may still drop
+     * out-of-order frames silently, so this usually has to be used in
+     * conjunction with OMXNodeInstance::setMaxPtsGapUs() workaround.
+     *
+     * When maxFps is 0, this call will fail with BAD_VALUE.
+     */
     status_t setMaxFps(float maxFps);
 
     // Sets the time lapse (or slow motion) parameters.
diff --git a/media/libstagefright/codecs/xaacdec/Android.bp b/media/libstagefright/codecs/xaacdec/Android.bp
new file mode 100644
index 0000000..7392f1e
--- /dev/null
+++ b/media/libstagefright/codecs/xaacdec/Android.bp
@@ -0,0 +1,36 @@
+cc_library_shared {
+    name: "libstagefright_soft_xaacdec",
+    vendor_available: true,
+
+    srcs: [
+        "SoftXAAC.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+        "frameworks/native/include/media/openmax",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-DENABLE_MPEG_D_DRC"
+    ],
+
+    sanitize: {
+        // integer_overflow: true,
+        misc_undefined: [ "signed-integer-overflow", "unsigned-integer-overflow", ],
+        cfi: true,
+    },
+
+    static_libs: ["libxaacdec"],
+
+    shared_libs: [
+        "libstagefright_omx",
+        "libstagefright_foundation",
+        "libutils",
+        "libcutils",
+        "liblog",
+    ],
+
+    compile_multilib: "32",
+}
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
new file mode 100644
index 0000000..b3aefa8
--- /dev/null
+++ b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
@@ -0,0 +1,1881 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftXAAC"
+#include <utils/Log.h>
+
+#include "SoftXAAC.h"
+
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+#include <math.h>
+
+#define DRC_DEFAULT_MOBILE_REF_LEVEL 64  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT   127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1   /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define DRC_KEY_AAC_DRC_EFFECT_TYPE   (3)  /* Default Effect type is "Limited playback" */
+/* REF_LEVEL of 64 pairs well with EFFECT_TYPE of 3. */
+#define DRC_DEFAULT_MOBILE_LOUDNESS_LEVEL (64)  /* Default loudness value for MPEG-D DRC */
+
+#define PROP_DRC_OVERRIDE_REF_LEVEL  "aac_drc_reference_level"
+#define PROP_DRC_OVERRIDE_CUT        "aac_drc_cut"
+#define PROP_DRC_OVERRIDE_BOOST      "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY      "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
+#define PROP_DRC_OVERRIDE_EFFECT_TYPE "ro.aac_drc_effect_type"
+#define PROP_DRC_OVERRIDE_LOUDNESS_LEVEL "aac_drc_loudness_level"
+
+#define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
+
+
+#define RETURN_IF_NE(returned, expected, retval, str) \
+        if ( returned != expected ) { \
+            ALOGE("Error in %s: Returned: %d Expected: %d", str, returned, expected); \
+            return retval; \
+        }
+
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+    params->nSize = sizeof(T);
+    params->nVersion.s.nVersionMajor = 1;
+    params->nVersion.s.nVersionMinor = 0;
+    params->nVersion.s.nRevision = 0;
+    params->nVersion.s.nStep = 0;
+}
+
+static const OMX_U32 kSupportedProfiles[] = {
+    OMX_AUDIO_AACObjectLC,
+    OMX_AUDIO_AACObjectHE,
+    OMX_AUDIO_AACObjectHE_PS,
+    OMX_AUDIO_AACObjectLD,
+    OMX_AUDIO_AACObjectELD,
+};
+
+SoftXAAC::SoftXAAC(
+        const char *name,
+        const OMX_CALLBACKTYPE *callbacks,
+        OMX_PTR appData,
+        OMX_COMPONENTTYPE **component)
+    : SimpleSoftOMXComponent(name, callbacks, appData, component),
+    mIsADTS(false),
+    mInputBufferCount(0),
+    mOutputBufferCount(0),
+    mSignalledError(false),
+    mLastInHeader(NULL),
+    mPrevTimestamp(0),
+    mCurrentTimestamp(0),
+    mOutputPortSettingsChange(NONE),
+    mXheaacCodecHandle(NULL),
+    mMpegDDrcHandle(NULL),
+    mInputBufferSize(0),
+    mOutputFrameLength(1024),
+    mInputBuffer(NULL),
+    mOutputBuffer(NULL),
+    mSampFreq(0),
+    mNumChannels(0),
+    mPcmWdSz(0),
+    mChannelMask(0),
+    mIsCodecInitialized(false),
+    mIsCodecConfigFlushRequired(false),
+    mpegd_drc_present(0),
+    drc_flag(0)
+
+{
+    initPorts();
+    CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftXAAC::~SoftXAAC() {
+    int errCode = deInitXAACDecoder();
+    if (0 != errCode) {
+        ALOGE("deInitXAACDecoder() failed %d",errCode);
+    }
+
+    mIsCodecInitialized = false;
+    mIsCodecConfigFlushRequired = false;
+}
+
+void SoftXAAC::initPorts() {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+
+    def.nPortIndex = 0;
+    def.eDir = OMX_DirInput;
+    def.nBufferCountMin = kNumInputBuffers;
+    def.nBufferCountActual = def.nBufferCountMin;
+    def.nBufferSize = 8192;
+    def.bEnabled = OMX_TRUE;
+    def.bPopulated = OMX_FALSE;
+    def.eDomain = OMX_PortDomainAudio;
+    def.bBuffersContiguous = OMX_FALSE;
+    def.nBufferAlignment = 1;
+
+    def.format.audio.cMIMEType = const_cast<char *>("audio/aac");
+    def.format.audio.pNativeRender = NULL;
+    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+    def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+
+    addPort(def);
+
+    def.nPortIndex = 1;
+    def.eDir = OMX_DirOutput;
+    def.nBufferCountMin = kNumOutputBuffers;
+    def.nBufferCountActual = def.nBufferCountMin;
+    def.nBufferSize = 4096 * MAX_CHANNEL_COUNT;
+    def.bEnabled = OMX_TRUE;
+    def.bPopulated = OMX_FALSE;
+    def.eDomain = OMX_PortDomainAudio;
+    def.bBuffersContiguous = OMX_FALSE;
+    def.nBufferAlignment = 2;
+
+    def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+    def.format.audio.pNativeRender = NULL;
+    def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+    addPort(def);
+}
+
+status_t SoftXAAC::initDecoder() {
+    status_t status = UNKNOWN_ERROR;
+
+    int ui_drc_val;
+    IA_ERRORCODE err_code = IA_NO_ERROR;
+    int loop = 0;
+
+    err_code = initXAACDecoder();
+    if(err_code != IA_NO_ERROR) {
+        if (NULL == mXheaacCodecHandle) {
+            ALOGE("AAC decoder handle is null");
+        }
+        if (NULL == mMpegDDrcHandle) {
+            ALOGE("MPEG-D DRC decoder handle is null");
+        }
+        for(loop= 1; loop < mMallocCount; loop++) {
+            if (mMemoryArray[loop] == NULL) {
+                ALOGE(" memory allocation error %d\n",loop);
+                break;
+            }
+        }
+        ALOGE("initXAACDecoder Failed");
+
+        for(loop = 0; loop < mMallocCount; loop++) {
+           if(mMemoryArray[loop])
+           free(mMemoryArray[loop]);
+        }
+        mMallocCount = 0;
+        return status;
+    } else {
+        status = OK;
+    }
+
+    mEndOfInput = false;
+    mEndOfOutput = false;
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL))
+    {
+        ui_drc_val = atoi(value);
+        ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d",ui_drc_val,
+                DRC_DEFAULT_MOBILE_REF_LEVEL);
+    }
+    else
+    {
+        ui_drc_val= DRC_DEFAULT_MOBILE_REF_LEVEL;
+    }
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL,
+                                &ui_drc_val);
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL");
+#ifdef     ENABLE_MPEG_D_DRC
+
+    if (property_get(PROP_DRC_OVERRIDE_LOUDNESS_LEVEL, value, NULL))
+    {
+        ui_drc_val = atoi(value);
+        ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d",ui_drc_val,
+                DRC_DEFAULT_MOBILE_LOUDNESS_LEVEL);
+    }
+    else
+    {
+        ui_drc_val= DRC_DEFAULT_MOBILE_LOUDNESS_LEVEL;
+    }
+
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS,
+                                &ui_drc_val);
+
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS");
+#endif
+
+
+    if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL))
+    {
+        ui_drc_val = atoi(value);
+        ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", ui_drc_val,
+                DRC_DEFAULT_MOBILE_DRC_CUT);
+    }
+    else
+    {
+        ui_drc_val=DRC_DEFAULT_MOBILE_DRC_CUT;
+    }
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT,
+                                &ui_drc_val);
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT");
+
+    if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL))
+    {
+        ui_drc_val = atoi(value);
+        ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", ui_drc_val,
+                DRC_DEFAULT_MOBILE_DRC_BOOST);
+    }
+    else
+    {
+        ui_drc_val = DRC_DEFAULT_MOBILE_DRC_BOOST;
+    }
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST,
+                                &ui_drc_val);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST");
+
+    if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL))
+    {
+        ui_drc_val = atoi(value);
+        ALOGV("AAC decoder using desired Heavy compression factor of %d instead of %d", ui_drc_val,
+                DRC_DEFAULT_MOBILE_DRC_HEAVY);
+    }
+    else
+    {
+        ui_drc_val = DRC_DEFAULT_MOBILE_DRC_HEAVY;
+    }
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP,
+                                &ui_drc_val);
+   RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP");
+
+#ifdef ENABLE_MPEG_D_DRC
+    if (property_get(PROP_DRC_OVERRIDE_EFFECT_TYPE, value, NULL))
+    {
+        ui_drc_val = atoi(value);
+        ALOGV("AAC decoder using desired DRC effect type of %d instead of %d", ui_drc_val,
+                DRC_KEY_AAC_DRC_EFFECT_TYPE);
+    }
+    else
+    {
+        ui_drc_val = DRC_KEY_AAC_DRC_EFFECT_TYPE;
+    }
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                              IA_API_CMD_SET_CONFIG_PARAM,
+                              IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE,
+                              &ui_drc_val);
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE");
+
+#endif
+    return status;
+}
+
+OMX_ERRORTYPE SoftXAAC::internalGetParameter(
+        OMX_INDEXTYPE index, OMX_PTR params) {
+
+    switch ((OMX_U32) index) {
+
+        case OMX_IndexParamAudioPortFormat:
+        {
+            OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
+                (OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
+
+            if (!isValidOMXParam(formatParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (formatParams->nPortIndex > 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (formatParams->nIndex > 0) {
+                return OMX_ErrorNoMore;
+            }
+
+            formatParams->eEncoding =
+                (formatParams->nPortIndex == 0)
+                    ? OMX_AUDIO_CodingAAC : OMX_AUDIO_CodingPCM;
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioAac:
+        {
+            OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
+                (OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
+
+            if (!isValidOMXParam(aacParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (aacParams->nPortIndex != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            aacParams->nBitRate = 0;
+            aacParams->nAudioBandWidth = 0;
+            aacParams->nAACtools = 0;
+            aacParams->nAACERtools = 0;
+            aacParams->eAACProfile = OMX_AUDIO_AACObjectMain;
+
+            aacParams->eAACStreamFormat =
+                mIsADTS
+                    ? OMX_AUDIO_AACStreamFormatMP4ADTS
+                    : OMX_AUDIO_AACStreamFormatMP4FF;
+
+            aacParams->eChannelMode = OMX_AUDIO_ChannelModeStereo;
+
+            if (!isConfigured()) {
+                aacParams->nChannels = 1;
+                aacParams->nSampleRate = 44100;
+                aacParams->nFrameLength = 0;
+            } else {
+                aacParams->nChannels = mNumChannels;
+                aacParams->nSampleRate = mSampFreq;
+                aacParams->nFrameLength = mOutputFrameLength;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioPcm:
+        {
+            OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+            if (!isValidOMXParam(pcmParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (pcmParams->nPortIndex != 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            pcmParams->eNumData = OMX_NumericalDataSigned;
+            pcmParams->eEndian = OMX_EndianBig;
+            pcmParams->bInterleaved = OMX_TRUE;
+            pcmParams->nBitPerSample = 16;
+            pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+            pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+            pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+            pcmParams->eChannelMapping[2] = OMX_AUDIO_ChannelCF;
+            pcmParams->eChannelMapping[3] = OMX_AUDIO_ChannelLFE;
+            pcmParams->eChannelMapping[4] = OMX_AUDIO_ChannelLS;
+            pcmParams->eChannelMapping[5] = OMX_AUDIO_ChannelRS;
+
+            if (!isConfigured()) {
+                pcmParams->nChannels = 1;
+                pcmParams->nSamplingRate = 44100;
+            } else {
+                pcmParams->nChannels = mNumChannels;
+                pcmParams->nSamplingRate = mSampFreq;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioProfileQuerySupported:
+        {
+            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *profileParams =
+                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *)params;
+
+            if (!isValidOMXParam(profileParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (profileParams->nPortIndex != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
+                return OMX_ErrorNoMore;
+            }
+
+            profileParams->eProfile =
+                kSupportedProfiles[profileParams->nProfileIndex];
+
+            return OMX_ErrorNone;
+        }
+
+        default:
+            return SimpleSoftOMXComponent::internalGetParameter(index, params);
+    }
+}
+
+OMX_ERRORTYPE SoftXAAC::internalSetParameter(
+        OMX_INDEXTYPE index, const OMX_PTR params) {
+
+    switch ((int)index) {
+        case OMX_IndexParamStandardComponentRole:
+        {
+            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+                (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+            if (!isValidOMXParam(roleParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (strncmp((const char *)roleParams->cRole,
+                        "audio_decoder.aac",
+                        OMX_MAX_STRINGNAME_SIZE - 1)) {
+                return OMX_ErrorUndefined;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioPortFormat:
+        {
+            const OMX_AUDIO_PARAM_PORTFORMATTYPE *formatParams =
+                (const OMX_AUDIO_PARAM_PORTFORMATTYPE *)params;
+
+            if (!isValidOMXParam(formatParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (formatParams->nPortIndex > 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            if ((formatParams->nPortIndex == 0
+                        && formatParams->eEncoding != OMX_AUDIO_CodingAAC)
+                || (formatParams->nPortIndex == 1
+                        && formatParams->eEncoding != OMX_AUDIO_CodingPCM)) {
+                return OMX_ErrorUndefined;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioAac:
+        {
+            const OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
+                (const OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
+
+            if (!isValidOMXParam(aacParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (aacParams->nPortIndex != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (aacParams->eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4FF) {
+                mIsADTS = false;
+            } else if (aacParams->eAACStreamFormat
+                        == OMX_AUDIO_AACStreamFormatMP4ADTS) {
+                mIsADTS = true;
+            } else {
+                return OMX_ErrorUndefined;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioAndroidAacDrcPresentation:
+        {
+            const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
+                    (const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
+
+            if (!isValidOMXParam(aacPresParams)) {
+                ALOGE("set OMX_ErrorBadParameter");
+                return OMX_ErrorBadParameter;
+            }
+
+            // for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
+            // a value of -1 implies the parameter is not set by the application:
+            //   nMaxOutputChannels     -1 by default
+            //   nDrcCut                uses default platform properties, see initDecoder()
+            //   nDrcBoost                idem
+            //   nHeavyCompression        idem
+            //   nTargetReferenceLevel    idem
+            //   nEncodedTargetLevel      idem
+            if (aacPresParams->nMaxOutputChannels >= 0) {
+                int max;
+                if (aacPresParams->nMaxOutputChannels >= 8) { max = 8; }
+                else if (aacPresParams->nMaxOutputChannels >= 6) { max = 6; }
+                else if (aacPresParams->nMaxOutputChannels >= 2) { max = 2; }
+                else {
+                    // -1 or 0: disable downmix,  1: mono
+                    max = aacPresParams->nMaxOutputChannels;
+                }
+            }
+            /* Apply DRC Changes */
+            setXAACDRCInfo(aacPresParams->nDrcCut,
+                           aacPresParams->nDrcBoost,
+                           aacPresParams->nTargetReferenceLevel,
+                           aacPresParams->nHeavyCompression
+                          #ifdef ENABLE_MPEG_D_DRC
+                           ,aacPresParams->nDrcEffectType
+                          #endif
+                           );    // TOD0 : Revert this change
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamAudioPcm:
+        {
+            const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+                (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+            if (!isValidOMXParam(pcmParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (pcmParams->nPortIndex != 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        default:
+            return SimpleSoftOMXComponent::internalSetParameter(index, params);
+    }
+}
+
+bool SoftXAAC::isConfigured() const {
+    return mInputBufferCount > 0;
+}
+
+void SoftXAAC::onQueueFilled(OMX_U32 /* portIndex */) {
+    if (mSignalledError || mOutputPortSettingsChange != NONE) {
+        ALOGE("onQueueFilled do not process %d %d",mSignalledError,mOutputPortSettingsChange);
+        return;
+    }
+
+    uint8_t*  inBuffer        = NULL;
+    uint32_t  inBufferLength  = 0;
+
+    List<BufferInfo *> &inQueue = getPortQueue(0);
+    List<BufferInfo *> &outQueue = getPortQueue(1);
+
+    signed int numOutBytes = 0;
+
+    /* If decoder call fails in between, then mOutputFrameLength is used  */
+    /* Decoded output for AAC is 1024/2048 samples / channel             */
+    /* TODO: For USAC mOutputFrameLength can go up to 4096                 */
+    /* Note: entire buffer logic to save and retrieve assumes 2 bytes per*/
+    /* sample currently                                                  */
+    if (mIsCodecInitialized) {
+        numOutBytes = mOutputFrameLength * (mPcmWdSz/8) * mNumChannels;
+        if ((mPcmWdSz/8) != 2) {
+            ALOGE("XAAC assumes 2 bytes per sample! mPcmWdSz %d",mPcmWdSz);
+        }
+    }
+
+    while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
+        if (!inQueue.empty()) {
+            BufferInfo *inInfo = *inQueue.begin();
+            OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+            mEndOfInput = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
+
+            if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+                ALOGE("first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
+                inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
+            }
+            if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+                BufferInfo *inInfo = *inQueue.begin();
+                OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+                inBuffer = inHeader->pBuffer + inHeader->nOffset;
+                inBufferLength = inHeader->nFilledLen;
+
+                /* GA header configuration sent to Decoder! */
+                int err_code = configXAACDecoder(inBuffer,inBufferLength);
+                if (0 != err_code) {
+                    ALOGW("configXAACDecoder err_code = %d", err_code);
+                    mSignalledError = true;
+                    notify(OMX_EventError, OMX_ErrorUndefined, err_code, NULL);
+                    return;
+                }
+                mInputBufferCount++;
+                mOutputBufferCount++; // fake increase of outputBufferCount to keep the counters aligned
+
+                inInfo->mOwnedByUs = false;
+                inQueue.erase(inQueue.begin());
+                mLastInHeader = NULL;
+                inInfo = NULL;
+                notifyEmptyBufferDone(inHeader);
+                inHeader = NULL;
+
+                // Only send out port settings changed event if both sample rate
+                // and mNumChannels are valid.
+                if (mSampFreq && mNumChannels && !mIsCodecConfigFlushRequired) {
+                    ALOGV("Configuring decoder: %d Hz, %d channels", mSampFreq, mNumChannels);
+                    notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+                    mOutputPortSettingsChange = AWAITING_DISABLED;
+                }
+
+                return;
+            }
+
+            if (inHeader->nFilledLen == 0) {
+                inInfo->mOwnedByUs = false;
+                inQueue.erase(inQueue.begin());
+                mLastInHeader = NULL;
+                inInfo = NULL;
+                notifyEmptyBufferDone(inHeader);
+                inHeader = NULL;
+                continue;
+            }
+
+            // Restore Offset and Length for Port reconfig case
+            size_t tempOffset =  inHeader->nOffset;
+            size_t tempFilledLen = inHeader->nFilledLen;
+            if (mIsADTS) {
+                 size_t adtsHeaderSize = 0;
+                // skip 30 bits, aac_frame_length follows.
+                // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+
+                const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
+
+                bool signalError = false;
+                if (inHeader->nFilledLen < 7) {
+                    ALOGE("Audio data too short to contain even the ADTS header. "
+                            "Got %d bytes.", inHeader->nFilledLen);
+                    hexdump(adtsHeader, inHeader->nFilledLen);
+                    signalError = true;
+                } else {
+                    bool protectionAbsent = (adtsHeader[1] & 1);
+
+                    unsigned aac_frame_length =
+                        ((adtsHeader[3] & 3) << 11)
+                        | (adtsHeader[4] << 3)
+                        | (adtsHeader[5] >> 5);
+
+                    if (inHeader->nFilledLen < aac_frame_length) {
+                        ALOGE("Not enough audio data for the complete frame. "
+                                "Got %d bytes, frame size according to the ADTS "
+                                "header is %u bytes.",
+                                inHeader->nFilledLen, aac_frame_length);
+                        hexdump(adtsHeader, inHeader->nFilledLen);
+                        signalError = true;
+                    } else {
+                        adtsHeaderSize = (protectionAbsent ? 7 : 9);
+                        if (aac_frame_length < adtsHeaderSize) {
+                            signalError = true;
+                        } else {
+                            inBuffer = (uint8_t *)adtsHeader + adtsHeaderSize;
+                            inBufferLength = aac_frame_length - adtsHeaderSize;
+
+                            inHeader->nOffset += adtsHeaderSize;
+                            inHeader->nFilledLen -= adtsHeaderSize;
+                        }
+                    }
+                }
+
+                if (signalError) {
+                    mSignalledError = true;
+                    notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
+                    return;
+                }
+
+                // insert buffer size and time stamp
+                if (mLastInHeader != inHeader) {
+                    mCurrentTimestamp = inHeader->nTimeStamp;
+                    mLastInHeader = inHeader;
+                } else {
+                    mCurrentTimestamp = mPrevTimestamp +
+                        mOutputFrameLength  * 1000000ll / mSampFreq;
+                }
+            } else {
+                inBuffer = inHeader->pBuffer + inHeader->nOffset;
+                inBufferLength = inHeader->nFilledLen;
+                mLastInHeader = inHeader;
+                mCurrentTimestamp = inHeader->nTimeStamp;
+            }
+
+            int numLoops = 0;
+            signed int prevSampleRate = mSampFreq;
+            signed int prevNumChannels = mNumChannels;
+
+            /* XAAC decoder expects first frame to be fed via configXAACDecoder API */
+            /* which should initialize the codec. Once this state is reached, call the  */
+            /* decodeXAACStream API with same frame to decode!                        */
+            if (!mIsCodecInitialized) {
+                int err_code = configXAACDecoder(inBuffer,inBufferLength);
+                if (0 != err_code) {
+                    ALOGW("configXAACDecoder Failed 2 err_code = %d", err_code);
+                    mSignalledError = true;
+                    notify(OMX_EventError, OMX_ErrorUndefined, err_code, NULL);
+                    return;
+                }
+                mIsCodecConfigFlushRequired = true;
+            }
+
+            if (!mSampFreq || !mNumChannels) {
+                if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) {
+                    ALOGW("Invalid AAC stream");
+                    ALOGW("mSampFreq %d mNumChannels %d ",mSampFreq,mNumChannels);
+                    mSignalledError = true;
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    return;
+                }
+            } else if ((mSampFreq != prevSampleRate) ||
+                       (mNumChannels != prevNumChannels)) {
+                ALOGV("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+                      prevSampleRate, mSampFreq, prevNumChannels, mNumChannels);
+                inHeader->nOffset = tempOffset;
+                inHeader->nFilledLen = tempFilledLen;
+                notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+                mOutputPortSettingsChange = AWAITING_DISABLED;
+                return;
+            }
+
+            signed int bytesConsumed = 0;
+            int errorCode = 0;
+            if (mIsCodecInitialized) {
+                errorCode = decodeXAACStream(inBuffer,inBufferLength, &bytesConsumed, &numOutBytes);
+            } else {
+                ALOGW("Assumption that first frame after header initializes decoder failed!");
+            }
+            inHeader->nFilledLen -= bytesConsumed;
+            inHeader->nOffset += bytesConsumed;
+
+            if (inHeader->nFilledLen != 0) {
+                ALOGE("All data not consumed");
+            }
+
+            /* In case of error, decoder would have given out empty buffer */
+            if ((0 != errorCode) && (0 == numOutBytes) && mIsCodecInitialized) {
+                numOutBytes = mOutputFrameLength * (mPcmWdSz/8) * mNumChannels;
+            }
+            numLoops++;
+
+            if (0 == bytesConsumed) {
+                ALOGE("bytesConsumed = 0 should never happen");
+                mSignalledError = true;
+                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                return;
+            }
+
+            if (errorCode) {
+                /* Clear buffer for output buffer is done inside XAAC codec */
+                /* TODO - Check if below memset is on top of reset inside codec */
+                memset(mOutputBuffer, 0, numOutBytes); // TODO: check for overflow, ASAN
+
+                // Discard input buffer.
+                if (inHeader) {
+                    inHeader->nFilledLen = 0;
+                }
+
+                // fall through
+            }
+
+            if (inHeader && inHeader->nFilledLen == 0) {
+                inInfo->mOwnedByUs = false;
+                mInputBufferCount++;
+                inQueue.erase(inQueue.begin());
+                mLastInHeader = NULL;
+                inInfo = NULL;
+                notifyEmptyBufferDone(inHeader);
+                inHeader = NULL;
+            } else {
+                ALOGV("inHeader->nFilledLen = %d", inHeader ? inHeader->nFilledLen : 0);
+            }
+
+            if (!outQueue.empty() && numOutBytes) {
+                BufferInfo *outInfo = *outQueue.begin();
+                OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+                if (outHeader->nOffset != 0) {
+                    ALOGE("outHeader->nOffset != 0 is not handled");
+                    mSignalledError = true;
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    return;
+                }
+
+                signed short *outBuffer =
+                        reinterpret_cast<signed short *>(outHeader->pBuffer + outHeader->nOffset);
+                int samplesize = mNumChannels * sizeof(int16_t);
+                if (outHeader->nOffset
+                        + mOutputFrameLength * samplesize
+                        > outHeader->nAllocLen) {
+                    ALOGE("buffer overflow");
+                    mSignalledError = true;
+                    notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                    return;
+                }
+                memcpy(outBuffer, mOutputBuffer, numOutBytes);
+                outHeader->nFilledLen = numOutBytes;
+
+                if (mEndOfInput && !outQueue.empty()) {
+                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+                    mEndOfOutput = true;
+                } else {
+                    outHeader->nFlags = 0;
+                }
+                outHeader->nTimeStamp = mCurrentTimestamp;
+                mPrevTimestamp = mCurrentTimestamp;
+
+                mOutputBufferCount++;
+                outInfo->mOwnedByUs = false;
+                outQueue.erase(outQueue.begin());
+                outInfo = NULL;
+                notifyFillBufferDone(outHeader);
+                outHeader = NULL;
+            }
+        }
+
+        if (mEndOfInput) {
+            if (!outQueue.empty()) {
+                if (!mEndOfOutput) {
+                    ALOGV(" empty block signaling EOS");
+                    // send partial or empty block signaling EOS
+                    mEndOfOutput = true;
+                    BufferInfo *outInfo = *outQueue.begin();
+                    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+                    outHeader->nFilledLen = 0;
+                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+                    outHeader->nTimeStamp = mPrevTimestamp ;
+
+                    mOutputBufferCount++;
+                    outInfo->mOwnedByUs = false;
+                    outQueue.erase(outQueue.begin());
+                    outInfo = NULL;
+                    notifyFillBufferDone(outHeader);
+                    outHeader = NULL;
+                }
+                break; // if outQueue not empty but no more output
+            }
+        }
+    }
+}
+
+void SoftXAAC::onPortFlushCompleted(OMX_U32 portIndex) {
+    if (portIndex == 0) {
+        // Make sure that the next buffer output does not still
+        // depend on fragments from the last one decoded.
+        // drain all existing data
+        if (mIsCodecInitialized) {
+            configflushDecode();
+        }
+        drainDecoder();
+        mLastInHeader = NULL;
+        mEndOfInput = false;
+    } else {
+        mEndOfOutput = false;
+    }
+}
+
+void SoftXAAC::configflushDecode() {
+    IA_ERRORCODE err_code;
+    UWORD32 ui_init_done;
+    uint32_t inBufferLength=8203;
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_INIT,
+                                IA_CMD_TYPE_FLUSH_MEM,
+                                NULL);
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_INPUT_BYTES,
+                                0,
+                                &inBufferLength);
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_INIT,
+                                IA_CMD_TYPE_FLUSH_MEM,
+                                NULL);
+
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_INIT,
+                                IA_CMD_TYPE_INIT_DONE_QUERY,
+                                &ui_init_done);
+
+
+    if (ui_init_done) {
+        err_code = getXAACStreamInfo();
+        ALOGV("Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz %d\nchannelMask %d\noutputFrameLength %d",
+                                    mSampFreq,mNumChannels,mPcmWdSz,mChannelMask,mOutputFrameLength);
+        if(mNumChannels > MAX_CHANNEL_COUNT) {
+            ALOGE(" No of channels are more than max channels\n");
+            mIsCodecInitialized = false;
+        }
+        else
+            mIsCodecInitialized = true;
+    }
+
+}
+int SoftXAAC::drainDecoder() {
+    return 0;
+}
+
+void SoftXAAC::onReset() {
+    drainDecoder();
+
+    // reset the "configured" state
+    mInputBufferCount = 0;
+    mOutputBufferCount = 0;
+    mEndOfInput = false;
+    mEndOfOutput = false;
+    mLastInHeader = NULL;
+
+    mSignalledError = false;
+    mOutputPortSettingsChange = NONE;
+}
+
+void SoftXAAC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+    if (portIndex != 1) {
+        return;
+    }
+
+    switch (mOutputPortSettingsChange) {
+        case NONE:
+            break;
+
+        case AWAITING_DISABLED:
+        {
+            CHECK(!enabled);
+            mOutputPortSettingsChange = AWAITING_ENABLED;
+            break;
+        }
+
+        default:
+        {
+            CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+            CHECK(enabled);
+            mOutputPortSettingsChange = NONE;
+            break;
+        }
+    }
+}
+
+int SoftXAAC::initXAACDecoder() {
+    LOOPIDX i;
+
+    /* Error code */
+    IA_ERRORCODE err_code = IA_NO_ERROR;
+
+    /* First part                                        */
+    /* Error Handler Init                                */
+    /* Get Library Name, Library Version and API Version */
+    /* Initialize API structure + Default config set     */
+    /* Set config params from user                       */
+    /* Initialize memory tables                          */
+    /* Get memory information and allocate memory        */
+
+    /* Memory variables */
+    UWORD32 ui_proc_mem_tabs_size;
+    /* API size */
+    UWORD32 pui_api_size;
+
+    mInputBufferSize = 0;
+    mInputBuffer = 0;
+    mOutputBuffer = 0;
+    mMallocCount = 0;
+
+    /* Process struct initing end */
+    /* ******************************************************************/
+    /* Initialize API structure and set config params to default        */
+    /* ******************************************************************/
+
+    /* Get the API size */
+    err_code = ixheaacd_dec_api(NULL,
+                                IA_API_CMD_GET_API_SIZE,
+                                0,
+                                &pui_api_size);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_API_SIZE");
+
+    /* Allocate memory for API */
+    mMemoryArray[mMallocCount] = memalign(4, pui_api_size);
+    if (mMemoryArray[mMallocCount] == NULL) {
+        ALOGE("malloc for pui_api_size + 4 >> %d Failed",pui_api_size + 4);
+        return IA_FATAL_ERROR;
+    }
+    /* Set API object with the memory allocated */
+    mXheaacCodecHandle =
+        (pVOID)((WORD8*)mMemoryArray[mMallocCount]);
+    mMallocCount++;
+
+    /* Set the config params to default values */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_INIT,
+                                IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS,
+                                NULL);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS");
+#ifdef ENABLE_MPEG_D_DRC
+    /* Get the API size */
+    err_code = ia_drc_dec_api(NULL, IA_API_CMD_GET_API_SIZE, 0, &pui_api_size);
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_API_SIZE");
+
+   /* Allocate memory for API */
+   mMemoryArray[mMallocCount] = memalign(4, pui_api_size);
+
+   if(mMemoryArray[mMallocCount] == NULL)
+   {
+       ALOGE("malloc for drc api structure Failed");
+       return IA_FATAL_ERROR;
+   }
+   memset(mMemoryArray[mMallocCount],0,pui_api_size);
+
+   /* Set API object with the memory allocated */
+   mMpegDDrcHandle =
+       (pVOID)((WORD8*)mMemoryArray[mMallocCount]);
+    mMallocCount++;
+
+
+    /* Set the config params to default values */
+    err_code = ia_drc_dec_api(
+       mMpegDDrcHandle,
+       IA_API_CMD_INIT,
+       IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS,
+       NULL);
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS");
+#endif
+
+    /* ******************************************************************/
+    /* Set config parameters                                            */
+    /* ******************************************************************/
+    UWORD32 ui_mp4_flag = 1;
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4,
+                                &ui_mp4_flag);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4");
+
+    /* ******************************************************************/
+    /* Initialize Memory info tables                                    */
+    /* ******************************************************************/
+
+    /* Get memory info tables size */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_MEMTABS_SIZE,
+                                0,
+                                &ui_proc_mem_tabs_size);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEMTABS_SIZE");
+    mMemoryArray[mMallocCount] = memalign(4, ui_proc_mem_tabs_size);
+    if (mMemoryArray[mMallocCount] == NULL) {
+        ALOGE("Malloc for size (ui_proc_mem_tabs_size + 4) = %d failed!",ui_proc_mem_tabs_size + 4);
+        return IA_FATAL_ERROR;
+    }
+
+    /* Set pointer for process memory tables    */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_MEMTABS_PTR,
+                                0,
+                                (pVOID)((WORD8*)mMemoryArray[mMallocCount]));
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_MEMTABS_PTR");
+    mMallocCount++;
+
+    /* initialize the API, post config, fill memory tables  */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_INIT,
+                                IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS,
+                                NULL);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
+
+    /* ******************************************************************/
+    /* Allocate Memory with info from library                           */
+    /* ******************************************************************/
+    /* There are four different types of memories, that needs to be allocated */
+    /* persistent,scratch,input and output */
+    for(i = 0; i < 4; i++) {
+        int ui_size = 0, ui_alignment = 0, ui_type = 0;
+        pVOID pv_alloc_ptr;
+
+        /* Get memory size */
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_GET_MEM_INFO_SIZE,
+                                    i,
+                                    &ui_size);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
+
+        /* Get memory alignment */
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_GET_MEM_INFO_ALIGNMENT,
+                                    i,
+                                    &ui_alignment);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
+
+        /* Get memory type */
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_GET_MEM_INFO_TYPE,
+                                    i,
+                                    &ui_type);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
+
+        mMemoryArray[mMallocCount] =
+            memalign(ui_alignment , ui_size);
+        if (mMemoryArray[mMallocCount] == NULL) {
+            ALOGE("Malloc for size (ui_size + ui_alignment) = %d failed!",ui_size + ui_alignment);
+            return IA_FATAL_ERROR;
+        }
+        pv_alloc_ptr =
+            (pVOID )((WORD8*)mMemoryArray[mMallocCount]);
+        mMallocCount++;
+
+        /* Set the buffer pointer */
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_MEM_PTR,
+                                    i,
+                                    pv_alloc_ptr);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_MEM_PTR");
+        if (ui_type == IA_MEMTYPE_INPUT) {
+            mInputBuffer = (pWORD8)pv_alloc_ptr;
+            mInputBufferSize = ui_size;
+
+        }
+
+        if (ui_type == IA_MEMTYPE_OUTPUT) {
+            mOutputBuffer = (pWORD8)pv_alloc_ptr;
+        }
+
+    }
+    /* End first part */
+
+  return IA_NO_ERROR;
+}
+
+int SoftXAAC::configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength) {
+
+    UWORD32 ui_init_done;
+    int32_t i_bytes_consumed;
+
+    if (mInputBufferSize < inBufferLength) {
+        ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d",mInputBufferSize,inBufferLength);
+        return false;
+    }
+
+    /* Copy the buffer passed by Android plugin to codec input buffer */
+    memcpy(mInputBuffer, inBuffer, inBufferLength);
+
+    /* Set number of bytes to be processed */
+    IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                             IA_API_CMD_SET_INPUT_BYTES,
+                                             0,
+                                             &inBufferLength);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_INPUT_BYTES");
+
+    if (mIsCodecConfigFlushRequired) {
+        /* If codec is already initialized, then GA header is passed again */
+        /* Need to call the Flush API instead of INIT_PROCESS */
+        mIsCodecInitialized = false; /* Codec needs to be Reinitialized after flush */
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_INIT,
+                                    IA_CMD_TYPE_GA_HDR,
+                                    NULL);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_GA_HDR");
+    }
+    else {
+        /* Initialize the process */
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_INIT,
+                                    IA_CMD_TYPE_INIT_PROCESS,
+                                    NULL);
+        ALOGV("IA_CMD_TYPE_INIT_PROCESS returned error_code = %d",err_code);
+    }
+
+    /* Checking for end of initialization */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_INIT,
+                                IA_CMD_TYPE_INIT_DONE_QUERY,
+                                &ui_init_done);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_INIT_DONE_QUERY");
+
+    /* How much buffer is used in input buffers */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CURIDX_INPUT_BUF,
+                                0,
+                                &i_bytes_consumed);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF");
+
+    if(ui_init_done){
+        err_code = getXAACStreamInfo();
+        ALOGI("Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz %d\nchannelMask %d\noutputFrameLength %d",
+                                    mSampFreq,mNumChannels,mPcmWdSz,mChannelMask,mOutputFrameLength);
+        mIsCodecInitialized = true;
+
+#ifdef ENABLE_MPEG_D_DRC
+        configMPEGDDrc();
+#endif
+    }
+
+    return err_code;
+}
+int SoftXAAC::configMPEGDDrc()
+{
+   IA_ERRORCODE err_code = IA_NO_ERROR;
+   int i_effect_type;
+   int i_loud_norm;
+   int i_target_loudness;
+   unsigned int i_sbr_mode;
+   int n_mems;
+   int i;
+
+#ifdef     ENABLE_MPEG_D_DRC
+    {
+
+    /* Sampling Frequency */
+    {
+      err_code =
+          ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                         IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ, &mSampFreq);
+      RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ");
+    }
+    /* Total Number of Channels */
+    {
+      err_code =
+          ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                         IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &mNumChannels);
+      RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS");
+    }
+
+    /* PCM word size  */
+    {
+      err_code =
+          ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                         IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ, &mPcmWdSz);
+      RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ");
+    }
+
+    /*Set Effect Type*/
+
+    {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, &i_effect_type);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE");
+
+
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE");
+
+    }
+
+/*Set target loudness */
+
+    {
+        err_code = ixheaacd_dec_api(
+            mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS, &i_target_loudness);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS");
+
+
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, &i_target_loudness);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS");
+
+    }
+
+    /*Set loud_norm_flag*/
+    {
+        err_code = ixheaacd_dec_api(
+            mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, &i_loud_norm);
+        RETURN_IF_NE(err_code, IA_NO_ERROR , err_code,"IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM");
+
+
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm);
+        RETURN_IF_NE(err_code, IA_NO_ERROR , err_code,"IA_DRC_DEC_CONFIG_DRC_LOUD_NORM");
+
+    }
+
+
+
+    err_code = ixheaacd_dec_api(
+        mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+        IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE, &i_sbr_mode);
+        RETURN_IF_NE(err_code, IA_NO_ERROR , err_code,"IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE");
+
+
+    if(i_sbr_mode!=0)
+    {
+        WORD32 frame_length;
+        if (i_sbr_mode==1)
+        {
+            frame_length=2048;
+        }
+        else if(i_sbr_mode==3)
+        {
+            frame_length=4096;
+        }
+        else
+        {
+            frame_length=1024;
+        }
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE, &frame_length);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE");
+
+    }
+
+
+    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                              IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS, NULL);
+
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
+
+
+
+    for (i = 0; i < (WORD32)2; i++) {
+      WORD32 ui_size, ui_alignment, ui_type;
+      pVOID pv_alloc_ptr;
+
+      /* Get memory size */
+      err_code = ia_drc_dec_api(mMpegDDrcHandle,
+                                IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size);
+
+     RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
+
+      /* Get memory alignment */
+      err_code =
+          ia_drc_dec_api(mMpegDDrcHandle,
+                         IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i, &ui_alignment);
+
+      RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
+
+      /* Get memory type */
+      err_code = ia_drc_dec_api(mMpegDDrcHandle,
+                                IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
+
+     RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
+
+
+      mMemoryArray[mMallocCount] = memalign(4, ui_size);
+      if (mMemoryArray[mMallocCount] == NULL) {
+        ALOGE(" Cannot create requested memory  %d",ui_size);
+        return IA_FATAL_ERROR;
+      }
+        pv_alloc_ptr =
+            (pVOID )((WORD8*)mMemoryArray[mMallocCount]);
+        mMallocCount++;
+
+           /* Set the buffer pointer */
+      err_code = ia_drc_dec_api(mMpegDDrcHandle,
+                                IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
+
+      RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_MEM_PTR");
+    }
+    {
+    WORD32 ui_size;
+    ui_size=8192*2;
+    mMemoryArray[mMallocCount]=memalign(4, ui_size);
+      if (mMemoryArray[mMallocCount] == NULL) {
+        ALOGE(" Cannot create requested memory  %d",ui_size);
+        return IA_FATAL_ERROR;
+      }
+
+    drc_ip_buf=(int8_t *)mMemoryArray[mMallocCount];
+    mMallocCount++;
+    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR,
+                              2, /*mOutputBuffer*/ drc_ip_buf);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_MEM_PTR");
+
+    mMemoryArray[mMallocCount]=memalign(4, ui_size);
+      if (mMemoryArray[mMallocCount] == NULL) {
+        ALOGE(" Cannot create requested memory  %d",ui_size);
+        return IA_FATAL_ERROR;
+      }
+
+    drc_op_buf=(int8_t *)mMemoryArray[mMallocCount];
+    mMallocCount++;
+    err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR,
+                              3, /*mOutputBuffer*/ drc_op_buf);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_MEM_PTR");
+    }
+    /*ITTIAM: DRC buffers
+            buf[0] - contains extension element pay load loudness related
+            buf[1] - contains extension element pay load*/
+    {
+      VOID *p_array[2][16];
+      WORD32 ii;
+      WORD32 buf_sizes[2][16];
+      WORD32 num_elements;
+      WORD32 num_config_ext;
+      WORD32 bit_str_fmt = 1;
+
+
+
+      WORD32 uo_num_chan;
+
+      memset(buf_sizes, 0, 32 * sizeof(WORD32));
+
+      err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+          IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES, &buf_sizes[0][0]);
+
+
+      err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+          IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR, &p_array);
+
+
+      err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                                IA_CMD_TYPE_INIT_SET_BUFF_PTR, 0);
+
+
+
+      err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+          IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE, &num_elements);
+
+      err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+          IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT, &num_config_ext);
+
+      for (ii = 0; ii < num_config_ext; ii++) {
+        /*copy loudness bitstream*/
+        if (buf_sizes[0][ii] > 0) {
+          memcpy(drc_ip_buf, p_array[0][ii], buf_sizes[0][ii]);
+
+          /*Set bitstream_split_format */
+          err_code = ia_drc_dec_api(
+              mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+              IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
+
+          /* Set number of bytes to be processed */
+          err_code = ia_drc_dec_api(mMpegDDrcHandle,
+                                    IA_API_CMD_SET_INPUT_BYTES_IL_BS, 0,
+                                    &buf_sizes[0][ii]);
+
+
+
+          /* Execute process */
+          err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                                    IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF, NULL);
+
+
+
+          drc_flag = 1;
+        }
+      }
+
+      for (ii = 0; ii < num_elements; ii++) {
+        /*copy config bitstream*/
+        if (buf_sizes[1][ii] > 0) {
+          memcpy(drc_ip_buf, p_array[1][ii], buf_sizes[1][ii]);
+          /* Set number of bytes to be processed */
+
+          /*Set bitstream_split_format */
+          err_code = ia_drc_dec_api(
+              mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+              IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
+
+          err_code = ia_drc_dec_api(mMpegDDrcHandle,
+                                    IA_API_CMD_SET_INPUT_BYTES_IC_BS, 0,
+                                    &buf_sizes[1][ii]);
+
+
+
+          /* Execute process */
+          err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                                    IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF, NULL);
+
+
+
+          drc_flag = 1;
+        }
+      }
+
+      if (drc_flag == 1) {
+        mpegd_drc_present = 1;
+      } else {
+        mpegd_drc_present = 0;
+      }
+
+
+      /*Read interface buffer config file bitstream*/
+      if(mpegd_drc_present==1){
+
+        WORD32 interface_is_present = 1;
+
+
+        err_code = ia_drc_dec_api(
+            mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+            IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT, &interface_is_present);
+
+
+
+        /* Execute process */
+        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                                  IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF, NULL);
+
+
+        err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                                  IA_CMD_TYPE_INIT_PROCESS, NULL);
+
+
+        err_code = ia_drc_dec_api(
+            mMpegDDrcHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &uo_num_chan);
+
+      }
+    }
+  }
+#endif
+
+return err_code;
+
+}
+int SoftXAAC::decodeXAACStream(uint8_t* inBuffer,
+                               uint32_t inBufferLength,
+                               int32_t *bytesConsumed,
+                               int32_t *outBytes) {
+    if (mInputBufferSize < inBufferLength) {
+        ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d",mInputBufferSize,inBufferLength);
+        return -1;
+    }
+
+    /* Copy the buffer passed by Android plugin to codec input buffer */
+    memcpy(mInputBuffer,inBuffer,inBufferLength);
+
+    /* Set number of bytes to be processed */
+    IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                             IA_API_CMD_SET_INPUT_BYTES,
+                                             0,
+                                             &inBufferLength);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_SET_INPUT_BYTES");
+
+    /* Execute process */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_EXECUTE,
+                                IA_CMD_TYPE_DO_EXECUTE,
+                                NULL);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_DO_EXECUTE");
+
+    UWORD32 ui_exec_done;
+    /* Checking for end of processing */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_EXECUTE,
+                                IA_CMD_TYPE_DONE_QUERY,
+                                &ui_exec_done);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_CMD_TYPE_DONE_QUERY");
+
+#ifdef ENABLE_MPEG_D_DRC
+     {
+      if (ui_exec_done != 1) {
+        VOID *p_array;        // ITTIAM:buffer to handle gain payload
+        WORD32 buf_size = 0;  // ITTIAM:gain payload length
+        WORD32 bit_str_fmt = 1;
+        WORD32 gain_stream_flag = 1;
+
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN, &buf_size);
+
+
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF, &p_array);
+
+
+        if (buf_size > 0) {
+          /*Set bitstream_split_format */
+          err_code = ia_drc_dec_api(
+              mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+              IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
+
+          memcpy(drc_ip_buf, p_array, buf_size);
+          /* Set number of bytes to be processed */
+          err_code =
+              ia_drc_dec_api(mMpegDDrcHandle,
+                             IA_API_CMD_SET_INPUT_BYTES_BS, 0, &buf_size);
+
+          err_code = ia_drc_dec_api(
+              mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+              IA_DRC_DEC_CONFIG_GAIN_STREAM_FLAG, &gain_stream_flag);
+
+
+          /* Execute process */
+          err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+                                    IA_CMD_TYPE_INIT_CPY_BSF_BUFF, NULL);
+
+
+          mpegd_drc_present = 1;
+        }
+      }
+    }
+#endif
+    /* How much buffer is used in input buffers */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CURIDX_INPUT_BUF,
+                                0,
+                                bytesConsumed);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF");
+
+    /* Get the output bytes */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_OUTPUT_BYTES,
+                                0,
+                                outBytes);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_GET_OUTPUT_BYTES");
+#ifdef ENABLE_MPEG_D_DRC
+
+    if (mpegd_drc_present == 1) {
+      memcpy(drc_ip_buf, mOutputBuffer, *outBytes);
+      err_code = ia_drc_dec_api(mMpegDDrcHandle,
+                                IA_API_CMD_SET_INPUT_BYTES, 0, outBytes);
+
+
+      err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_EXECUTE,
+                                IA_CMD_TYPE_DO_EXECUTE, NULL);
+
+      memcpy(mOutputBuffer, drc_op_buf, *outBytes);
+    }
+#endif
+    return err_code;
+}
+
+int SoftXAAC::deInitXAACDecoder() {
+    ALOGI("deInitXAACDecoder");
+
+    /* Tell that the input is over in this buffer */
+    IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                             IA_API_CMD_INPUT_OVER,
+                                             0,
+                                             NULL);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_API_CMD_INPUT_OVER");
+
+    for(int i = 0; i < mMallocCount; i++)
+    {
+        if(mMemoryArray[i])
+            free(mMemoryArray[i]);
+    }
+    mMallocCount = 0;
+
+    return err_code;
+}
+
+IA_ERRORCODE SoftXAAC::getXAACStreamInfo() {
+    IA_ERRORCODE err_code = IA_NO_ERROR;
+
+    /* Sampling frequency */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ,
+                                &mSampFreq);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ");
+
+    /* Total Number of Channels */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS,
+                                &mNumChannels);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS");
+
+    /* PCM word size */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ,
+                                &mPcmWdSz);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ");
+
+    /* channel mask to tell the arrangement of channels in bit stream */
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK,
+                                &mChannelMask);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK");
+
+    /* Channel mode to tell MONO/STEREO/DUAL-MONO/NONE_OF_THESE */
+    UWORD32 ui_channel_mode;
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE,
+                                &ui_channel_mode);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE");
+    if(ui_channel_mode == 0)
+        ALOGV("Channel Mode: MONO_OR_PS\n");
+    else if(ui_channel_mode == 1)
+        ALOGV("Channel Mode: STEREO\n");
+    else if(ui_channel_mode == 2)
+        ALOGV("Channel Mode: DUAL-MONO\n");
+    else
+        ALOGV("Channel Mode: NONE_OF_THESE or MULTICHANNEL\n");
+
+    /* Channel mode to tell SBR PRESENT/NOT_PRESENT */
+    UWORD32 ui_sbr_mode;
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_GET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE,
+                                &ui_sbr_mode);
+    RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE");
+    if(ui_sbr_mode == 0)
+        ALOGV("SBR Mode: NOT_PRESENT\n");
+    else if(ui_sbr_mode == 1)
+        ALOGV("SBR Mode: PRESENT\n");
+    else
+        ALOGV("SBR Mode: ILLEGAL\n");
+
+    /* mOutputFrameLength = 1024 * (1 + SBR_MODE) for AAC */
+    /* For USAC it could be 1024 * 3 , support to query  */
+    /* not yet added in codec                            */
+    mOutputFrameLength = 1024 * (1 + ui_sbr_mode);
+
+    ALOGI("mOutputFrameLength %d ui_sbr_mode %d",mOutputFrameLength,ui_sbr_mode);
+
+    return IA_NO_ERROR;
+}
+
+IA_ERRORCODE SoftXAAC::setXAACDRCInfo(int32_t drcCut,
+                                      int32_t drcBoost,
+                                      int32_t drcRefLevel,
+                                      int32_t drcHeavyCompression
+                                #ifdef ENABLE_MPEG_D_DRC
+                                      ,int32_t drEffectType
+                                #endif
+                                      ) {
+    IA_ERRORCODE err_code = IA_NO_ERROR;
+
+    int32_t ui_drc_enable = 1;
+    int32_t i_effect_type, i_target_loudness, i_loud_norm;
+    err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                IA_API_CMD_SET_CONFIG_PARAM,
+                                IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE,
+                                &ui_drc_enable);
+     RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE");
+    if (drcCut !=-1) {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_CONFIG_PARAM,
+                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT,
+                                    &drcCut);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT");
+    }
+
+    if (drcBoost !=-1) {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_CONFIG_PARAM,
+                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST,
+                                    &drcBoost);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST");
+    }
+
+    if (drcRefLevel != -1) {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_CONFIG_PARAM,
+                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL,
+                                    &drcRefLevel);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL");
+    }
+#ifdef ENABLE_MPEG_D_DRC
+    if (drcRefLevel != -1) {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_CONFIG_PARAM,
+                                    IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS,
+                                    &drcRefLevel);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS");
+    }
+#endif
+    if (drcHeavyCompression != -1) {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_CONFIG_PARAM,
+                                    IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP,
+                                    &drcHeavyCompression);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP");
+    }
+
+#ifdef ENABLE_MPEG_D_DRC
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+                                    IA_API_CMD_SET_CONFIG_PARAM,
+                                    IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE,
+                                    &drEffectType);
+
+#endif
+
+#ifdef ENABLE_MPEG_D_DRC
+    /*Set Effect Type*/
+
+    {
+        err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, &i_effect_type);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE");
+
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type);
+
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE");
+
+    }
+
+/*Set target loudness */
+
+    {
+        err_code = ixheaacd_dec_api(
+            mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS, &i_target_loudness);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS");
+
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, &i_target_loudness);
+        RETURN_IF_NE(err_code, IA_NO_ERROR, err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS");
+
+    }
+    /*Set loud_norm_flag*/
+    {
+        err_code = ixheaacd_dec_api(
+            mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+            IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, &i_loud_norm);
+        RETURN_IF_NE(err_code, IA_NO_ERROR , err_code,"IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM");
+
+        err_code =
+            ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+                IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm);
+
+        RETURN_IF_NE(err_code, IA_NO_ERROR , err_code,"IA_DRC_DEC_CONFIG_DRC_LOUD_NORM");
+
+    }
+
+#endif
+
+
+    return IA_NO_ERROR;
+}
+
+}  // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+        const char *name, const OMX_CALLBACKTYPE *callbacks,
+        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+    ALOGI("createSoftOMXComponent for SoftXAACDEC");
+    return new android::SoftXAAC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.h b/media/libstagefright/codecs/xaacdec/SoftXAAC.h
new file mode 100644
index 0000000..11a9c77
--- /dev/null
+++ b/media/libstagefright/codecs/xaacdec/SoftXAAC.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFTXAAC_H_
+#define SOFTXAAC_H_
+
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
+
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include "ixheaacd_type_def.h"
+#include "ixheaacd_error_standards.h"
+#include "ixheaacd_error_handler.h"
+#include "ixheaacd_apicmd_standards.h"
+#include "ixheaacd_memory_standards.h"
+#include "ixheaacd_aac_config.h"
+
+#include "impd_apicmd_standards.h"
+#include "impd_drc_config_params.h"
+
+#define MAX_MEM_ALLOCS 100
+
+extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj,
+                        WORD32 i_cmd, WORD32 i_idx, pVOID pv_value);
+extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj,
+                        WORD32 i_cmd, WORD32 i_idx, pVOID pv_value);
+extern "C"  IA_ERRORCODE ixheaacd_get_config_param(pVOID p_ia_process_api_obj,
+                                       pWORD32 pi_samp_freq,
+                                       pWORD32 pi_num_chan,
+                                       pWORD32 pi_pcm_wd_sz,
+                                       pWORD32 pi_channel_mask);
+
+namespace android {
+
+struct SoftXAAC : public SimpleSoftOMXComponent {
+    SoftXAAC(const char *name,
+            const OMX_CALLBACKTYPE *callbacks,
+            OMX_PTR appData,
+            OMX_COMPONENTTYPE **component);
+
+protected:
+    virtual ~SoftXAAC();
+
+    virtual OMX_ERRORTYPE internalGetParameter(
+            OMX_INDEXTYPE index, OMX_PTR params);
+
+    virtual OMX_ERRORTYPE internalSetParameter(
+            OMX_INDEXTYPE index, const OMX_PTR params);
+
+    virtual void onQueueFilled(OMX_U32 portIndex);
+    virtual void onPortFlushCompleted(OMX_U32 portIndex);
+    virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+    virtual void onReset();
+
+private:
+    enum {
+        kNumInputBuffers        = 4,
+        kNumOutputBuffers       = 4,
+        kNumDelayBlocksMax      = 8,
+    };
+
+    bool mIsADTS;
+    size_t mInputBufferCount;
+    size_t mOutputBufferCount;
+    bool mSignalledError;
+    OMX_BUFFERHEADERTYPE *mLastInHeader;
+    int64_t mPrevTimestamp;
+    int64_t mCurrentTimestamp;
+    uint32_t mBufSize;
+
+    enum {
+        NONE,
+        AWAITING_DISABLED,
+        AWAITING_ENABLED
+    } mOutputPortSettingsChange;
+
+    void initPorts();
+    status_t initDecoder();
+    bool isConfigured() const;
+    int drainDecoder();
+    int initXAACDecoder();
+    int deInitXAACDecoder();
+
+    int configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength);
+    int configMPEGDDrc();
+    int decodeXAACStream(uint8_t* inBuffer,
+                         uint32_t inBufferLength,
+                         int32_t *bytesConsumed,
+                         int32_t *outBytes);
+
+    void configflushDecode();
+    IA_ERRORCODE getXAACStreamInfo();
+    IA_ERRORCODE setXAACDRCInfo(int32_t drcCut,
+                                int32_t drcBoost,
+                                int32_t drcRefLevel,
+                                int32_t drcHeavyCompression
+#ifdef ENABLE_MPEG_D_DRC
+                                ,int32_t drEffectType
+#endif
+                               );
+
+    bool mEndOfInput;
+    bool mEndOfOutput;
+
+    void*       mXheaacCodecHandle;
+    void*       mMpegDDrcHandle;
+    uint32_t    mInputBufferSize;
+    uint32_t    mOutputFrameLength;
+    int8_t*     mInputBuffer;
+    int8_t*     mOutputBuffer;
+    int32_t     mSampFreq;
+    int32_t     mNumChannels;
+    int32_t     mPcmWdSz;
+    int32_t     mChannelMask;
+    bool        mIsCodecInitialized;
+    bool        mIsCodecConfigFlushRequired;
+    int8_t *drc_ip_buf;
+    int8_t *drc_op_buf;
+    int32_t mpegd_drc_present;
+    int32_t drc_flag;
+//    int32_t is_drc_enabled;
+
+    void*       mMemoryArray[MAX_MEM_ALLOCS];
+    int32_t     mMallocCount;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SoftXAAC);
+
+};
+
+}  // namespace android
+
+#endif  // SOFTXAAC_H_
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 43304aa..df66ac6 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -197,6 +197,7 @@
         CHECK(mNumItems < kMaxNumItems);
         i = mNumItems++;
         item = &mItems[i];
+        item->mType = kTypeInt32;
         item->setName(name, len);
     }
 
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index f292c47..9f39b5e 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -1029,7 +1029,8 @@
     sp<AMessage> itemMeta;
     int64_t itemDurationUs;
     int32_t targetDuration;
-    if (mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
+    if (mPlaylist->meta() != NULL
+            && mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
         do {
             --index;
             if (!mPlaylist->itemAt(index, NULL /* uri */, &itemMeta)
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 1cbf865..ad60f46 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -223,6 +223,7 @@
     virtual void signalEndOfInputStream() = 0;
 
     typedef CodecBase *(*CreateCodecFunc)(void);
+    typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
 
 protected:
     CodecBase() = default;
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 7b41362..f18940d 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -132,7 +132,7 @@
     status_t startTracks(MetaData *params);
     size_t numTracks();
     int64_t estimateMoovBoxSize(int32_t bitRate);
-    int64_t estimateFileLevelMetaSize();
+    int64_t estimateFileLevelMetaSize(MetaData *params);
     void writeCachedBoxToFile(const char *type);
 
     struct Chunk {
@@ -167,8 +167,10 @@
     Condition       mChunkReadyCondition;   // Signal that chunks are available
 
     // HEIF writing
+    typedef key_value_pair_t< const char *, Vector<uint16_t> > ItemRefs;
     typedef struct _ItemInfo {
         bool isGrid() const { return !strcmp("grid", itemType); }
+        bool isImage() const { return !strcmp("hvc1", itemType) || isGrid(); }
         const char *itemType;
         uint16_t itemId;
         bool isPrimary;
@@ -188,7 +190,7 @@
             };
         };
         Vector<uint16_t> properties;
-        Vector<uint16_t> dimgRefs;
+        Vector<ItemRefs> refsList;
     } ItemInfo;
 
     typedef struct _ItemProperty {
@@ -204,6 +206,7 @@
     uint32_t mPrimaryItemId;
     uint32_t mAssociationEntryCount;
     uint32_t mNumGrids;
+    bool mHasRefs;
     Vector<ItemInfo> mItems;
     Vector<ItemProperty> mProperties;
 
@@ -252,11 +255,12 @@
     void initInternal(int fd, bool isFirstSession);
 
     // Acquire lock before calling these methods
-    off64_t addSample_l(MediaBuffer *buffer, bool usePrefix, size_t *bytesWritten);
+    off64_t addSample_l(MediaBuffer *buffer, bool usePrefix, bool isExif, size_t *bytesWritten);
     void addLengthPrefixedSample_l(MediaBuffer *buffer);
     void addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
     uint16_t addProperty_l(const ItemProperty &);
     uint16_t addItem_l(const ItemInfo &);
+    void addRefs_l(uint16_t itemId, const ItemRefs &);
 
     bool exceedsFileSizeLimit();
     bool use32BitFileOffset() const;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 67808f1..ad02004 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -61,9 +61,11 @@
     };
 
     enum BufferFlags {
-        BUFFER_FLAG_SYNCFRAME   = 1,
-        BUFFER_FLAG_CODECCONFIG = 2,
-        BUFFER_FLAG_EOS         = 4,
+        BUFFER_FLAG_SYNCFRAME     = 1,
+        BUFFER_FLAG_CODECCONFIG   = 2,
+        BUFFER_FLAG_EOS           = 4,
+        BUFFER_FLAG_PARTIAL_FRAME = 8,
+        BUFFER_FLAG_MUXER_DATA    = 16,
     };
 
     enum {
diff --git a/media/libstagefright/include/media/stagefright/PersistentSurface.h b/media/libstagefright/include/media/stagefright/PersistentSurface.h
index d8b75a2..49b36c9 100644
--- a/media/libstagefright/include/media/stagefright/PersistentSurface.h
+++ b/media/libstagefright/include/media/stagefright/PersistentSurface.h
@@ -18,22 +18,34 @@
 
 #define PERSISTENT_SURFACE_H_
 
-#include <gui/IGraphicBufferProducer.h>
 #include <android/IGraphicBufferSource.h>
-#include <media/stagefright/foundation/ABase.h>
 #include <binder/Parcel.h>
+#include <hidl/HidlSupport.h>
+#include <hidl/HybridInterface.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <media/stagefright/foundation/ABase.h>
+
+using android::hidl::base::V1_0::IBase;
 
 namespace android {
 
 struct PersistentSurface : public RefBase {
     PersistentSurface() {}
 
+    // create an OMX persistent surface
     PersistentSurface(
             const sp<IGraphicBufferProducer>& bufferProducer,
             const sp<IGraphicBufferSource>& bufferSource) :
         mBufferProducer(bufferProducer),
         mBufferSource(bufferSource) { }
 
+    // create a HIDL persistent surface
+    PersistentSurface(
+            const sp<IGraphicBufferProducer>& bufferProducer,
+            const sp<IBase>& hidlTarget) :
+        mBufferProducer(bufferProducer),
+        mHidlTarget(hidlTarget) { }
+
     sp<IGraphicBufferProducer> getBufferProducer() const {
         return mBufferProducer;
     }
@@ -42,9 +54,25 @@
         return mBufferSource;
     }
 
+    sp<IBase> getHidlTarget() const {
+        return mHidlTarget;
+    }
+
     status_t writeToParcel(Parcel *parcel) const {
         parcel->writeStrongBinder(IInterface::asBinder(mBufferProducer));
+        // this can handle null
         parcel->writeStrongBinder(IInterface::asBinder(mBufferSource));
+        // write hidl target
+        if (mHidlTarget != nullptr) {
+            HalToken token;
+            bool result = createHalToken(mHidlTarget, &token);
+            parcel->writeBool(result);
+            if (result) {
+                parcel->writeByteArray(token.size(), token.data());
+            }
+        } else {
+            parcel->writeBool(false);
+        }
         return NO_ERROR;
     }
 
@@ -53,12 +81,24 @@
                 parcel->readStrongBinder());
         mBufferSource = interface_cast<IGraphicBufferSource>(
                 parcel->readStrongBinder());
+        // read hidl target
+        bool haveHidlTarget = parcel->readBool();
+        if (haveHidlTarget) {
+            std::vector<uint8_t> tokenVector;
+            parcel->readByteVector(&tokenVector);
+            HalToken token = HalToken(tokenVector);
+            mHidlTarget = retrieveHalInterface(token);
+            deleteHalToken(token);
+        } else {
+            mHidlTarget.clear();
+        }
         return NO_ERROR;
     }
 
 private:
     sp<IGraphicBufferProducer> mBufferProducer;
     sp<IGraphicBufferSource> mBufferSource;
+    sp<IBase> mHidlTarget;
 
     DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
 };
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 227d33e..cc67834 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -354,7 +354,7 @@
       mQuirks(0),
       mBufferIDCount(0),
       mRestorePtsFailed(false),
-      mMaxTimestampGapUs(-1ll),
+      mMaxTimestampGapUs(0ll),
       mPrevOriginalTimeUs(-1ll),
       mPrevModifiedTimeUs(-1ll)
 {
@@ -1879,7 +1879,9 @@
         return BAD_VALUE;
     }
 
-    mMaxTimestampGapUs = (int64_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+    // The incoming number is an int32_t contained in OMX_U32.
+    // Cast to int32_t first then int64_t.
+    mMaxTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
 
     return OK;
 }
@@ -1903,12 +1905,26 @@
         ALOGV("IN  timestamp: %lld -> %lld",
             static_cast<long long>(originalTimeUs),
             static_cast<long long>(timestamp));
+    } else if (mMaxTimestampGapUs < 0ll) {
+        /*
+         * Apply a fixed timestamp gap between adjacent frames.
+         *
+         * This is used by scenarios like still image capture where timestamps
+         * on frames could go forward or backward. Some encoders may silently
+         * drop frames when it goes backward (or even stay unchanged).
+         */
+        if (mPrevOriginalTimeUs >= 0ll) {
+            timestamp = mPrevModifiedTimeUs - mMaxTimestampGapUs;
+        }
+        ALOGV("IN  timestamp: %lld -> %lld",
+            static_cast<long long>(originalTimeUs),
+            static_cast<long long>(timestamp));
     }
 
     mPrevOriginalTimeUs = originalTimeUs;
     mPrevModifiedTimeUs = timestamp;
 
-    if (mMaxTimestampGapUs > 0ll && !mRestorePtsFailed) {
+    if (mMaxTimestampGapUs != 0ll && !mRestorePtsFailed) {
         mOriginalTimeUs.add(timestamp, originalTimeUs);
     }
 
@@ -1941,7 +1957,7 @@
 void OMXNodeInstance::codecBufferFilled(omx_message &msg) {
     Mutex::Autolock autoLock(mLock);
 
-    if (mMaxTimestampGapUs <= 0ll || mRestorePtsFailed) {
+    if (mMaxTimestampGapUs == 0ll || mRestorePtsFailed) {
         return;
     }
 
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 4946ada..1f3e8c1 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -34,7 +34,12 @@
     const char *mRole;
 
 } kComponents[] = {
+    // two choices for aac decoding.
+    // configurable in media/libstagefright/data/media_codecs_google_audio.xml
+    // default implementation
     { "OMX.google.aac.decoder", "aacdec", "audio_decoder.aac" },
+    // alternate implementation
+    { "OMX.google.xaac.decoder", "xaacdec", "audio_decoder.aac" },
     { "OMX.google.aac.encoder", "aacenc", "audio_encoder.aac" },
     { "OMX.google.amrnb.decoder", "amrdec", "audio_decoder.amrnb" },
     { "OMX.google.amrnb.encoder", "amrnbenc", "audio_encoder.amrnb" },
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
index c436121..a761ef6 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
@@ -288,6 +288,21 @@
 
     bool handleDataSpaceChanged(omx_message &msg);
 
+    /*
+     * Set the max pts gap between frames.
+     *
+     * When the pts gap number is positive, it indicates the maximum pts gap between
+     * two adjacent frames. If two frames are further apart, timestamps will be modified
+     * to meet this requirement before the frames are sent to the encoder.
+     *
+     * When the pts gap number is negative, it indicates that the original timestamp
+     * should always be modified such that all adjacent frames have the same pts gap
+     * equal to the absolute value of the passed in number. This option is typically
+     * used when client wants to make sure all frames are captured even when source
+     * potentially sends out-of-order frames.
+     *
+     * Timestamps will be restored to the original when the output is sent back to the client.
+     */
     status_t setMaxPtsGapUs(const void *params, size_t size);
     int64_t getCodecTimestamp(OMX_TICKS timestamp);
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 76b29c9..bdd39c6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1690,6 +1690,7 @@
         recordTrack.clear();
         AudioSystem::releaseInput(portId);
         output.inputId = AUDIO_IO_HANDLE_NONE;
+        output.selectedDeviceId = input.selectedDeviceId;
         portId = AUDIO_PORT_HANDLE_NONE;
     }
     lStatus = AudioSystem::getInputForAttr(&input.attr, &output.inputId,
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index dcf223c..2047dfd 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -595,7 +595,8 @@
             (mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
                     || mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO)) {
         // Older effects may require exact STEREO position mask.
-        if (mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) {
+        if (mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
+                && (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_AUXILIARY) {
             ALOGV("Overriding effect input channels %#x as STEREO", mConfig.inputCfg.channels);
             mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
         }
diff --git a/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h b/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h
index fac6cbe..32b4440 100644
--- a/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h
+++ b/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h
@@ -24,6 +24,7 @@
 namespace android {
 
 class DeviceDescriptor;
+class DeviceVector;
 
 class SessionRoute : public RefBase
 {
@@ -98,7 +99,8 @@
     int decRouteActivity(audio_session_t session);
     bool getAndClearRouteChanged(audio_session_t session); // also clears the changed flag
     void log(const char* caption);
-
+    audio_devices_t getActiveDeviceForStream(audio_stream_type_t streamType,
+                                             const DeviceVector& availableDevices);
     // Specify an Output(Sink) route by passing SessionRoute::SOURCE_TYPE_NA in the
     // source argument.
     // Specify an Input(Source) rout by passing SessionRoute::AUDIO_STREAM_DEFAULT
diff --git a/services/audiopolicy/common/managerdefinitions/src/SessionRoute.cpp b/services/audiopolicy/common/managerdefinitions/src/SessionRoute.cpp
index 8edd4d1..d34214b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/SessionRoute.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/SessionRoute.cpp
@@ -82,7 +82,7 @@
 void SessionRouteMap::log(const char* caption)
 {
     ALOGI("%s ----", caption);
-    for(size_t index = 0; index < size(); index++) {
+    for (size_t index = 0; index < size(); index++) {
         valueAt(index)->log("  ");
     }
 }
@@ -119,4 +119,22 @@
     }
 }
 
+audio_devices_t SessionRouteMap::getActiveDeviceForStream(audio_stream_type_t streamType,
+                                                          const DeviceVector& availableDevices)
+{
+    audio_devices_t device = AUDIO_DEVICE_NONE;
+
+    for (size_t index = 0; index < size(); index++) {
+        sp<SessionRoute> route = valueAt(index);
+        if (streamType == route->mStreamType && route->isActiveOrChanged()
+                && route->mDeviceDescriptor != 0) {
+            device = route->mDeviceDescriptor->type();
+            if (!availableDevices.getDevicesFromType(device).isEmpty()) {
+                break;
+            }
+        }
+    }
+    return device;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 23c020d..3e13e50 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -453,6 +453,12 @@
             }
             // Use both Bluetooth SCO and phone default output when ringing in normal mode
             if (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION] == AUDIO_POLICY_FORCE_BT_SCO) {
+                if ((strategy == STRATEGY_SONIFICATION) &&
+                        (device & AUDIO_DEVICE_OUT_SPEAKER) &&
+                        (availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER_SAFE)) {
+                    device |= AUDIO_DEVICE_OUT_SPEAKER_SAFE;
+                    device &= ~AUDIO_DEVICE_OUT_SPEAKER;
+                }
                 if (device2 != AUDIO_DEVICE_NONE) {
                     device |= device2;
                     break;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index ee16e07..3775551 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -368,6 +368,9 @@
                                                       const char *device_name)
 {
     status_t status;
+    String8 reply;
+    AudioParameter param;
+    int isReconfigA2dpSupported = 0;
 
     ALOGV("handleDeviceConfigChange(() device: 0x%X, address %s name %s",
           device, device_address, device_name);
@@ -384,6 +387,26 @@
         return NO_ERROR;
     }
 
+    // For offloaded A2DP, Hw modules may have the capability to
+    // configure codecs. Check if any of the loaded hw modules
+    // supports this.
+    // If supported, send a set parameter to configure A2DP codecs
+    // and return. No need to toggle device state.
+    if (device & AUDIO_DEVICE_OUT_ALL_A2DP) {
+        reply = mpClientInterface->getParameters(
+                    AUDIO_IO_HANDLE_NONE,
+                    String8(AudioParameter::keyReconfigA2dpSupported));
+        AudioParameter repliedParameters(reply);
+        repliedParameters.getInt(
+                String8(AudioParameter::keyReconfigA2dpSupported), isReconfigA2dpSupported);
+        if (isReconfigA2dpSupported) {
+            const String8 key(AudioParameter::keyReconfigA2dp);
+            param.add(key, String8("true"));
+            mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString());
+            return NO_ERROR;
+        }
+    }
+
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status = setDeviceConnectionState(device,
@@ -1134,7 +1157,9 @@
         }
     } else if (mOutputRoutes.getAndClearRouteChanged(session)) {
         newDevice = getNewOutputDevice(outputDesc, false /*fromCache*/);
-        checkStrategyRoute(getStrategy(stream), output);
+        if (newDevice != outputDesc->device()) {
+            checkStrategyRoute(getStrategy(stream), output);
+        }
     } else {
         newDevice = AUDIO_DEVICE_NONE;
     }
@@ -1385,6 +1410,7 @@
                         (newDevice != desc->device())) {
                     audio_devices_t newDevice2 = getNewOutputDevice(desc, false /*fromCache*/);
                     bool force = desc->device() != newDevice2;
+
                     setOutputDevice(desc,
                                     newDevice2,
                                     force,
@@ -2788,9 +2814,32 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getAudioPort(struct audio_port *port __unused)
+status_t AudioPolicyManager::getAudioPort(struct audio_port *port)
 {
-    return NO_ERROR;
+    if (port == nullptr || port->id == AUDIO_PORT_HANDLE_NONE) {
+        return BAD_VALUE;
+    }
+    sp<DeviceDescriptor> dev = mAvailableOutputDevices.getDeviceFromId(port->id);
+    if (dev != 0) {
+        dev->toAudioPort(port);
+        return NO_ERROR;
+    }
+    dev = mAvailableInputDevices.getDeviceFromId(port->id);
+    if (dev != 0) {
+        dev->toAudioPort(port);
+        return NO_ERROR;
+    }
+    sp<SwAudioOutputDescriptor> out = mOutputs.getOutputFromId(port->id);
+    if (out != 0) {
+        out->toAudioPort(port);
+        return NO_ERROR;
+    }
+    sp<AudioInputDescriptor> in = mInputs.getInputFromId(port->id);
+    if (in != 0) {
+        in->toAudioPort(port);
+        return NO_ERROR;
+    }
+    return BAD_VALUE;
 }
 
 status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch,
@@ -4790,6 +4839,20 @@
         }
     }
 
+    // Check if an explicit routing request exists for an active stream on this output and
+    // use it in priority before any other rule
+    for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
+        if (outputDesc->isStreamActive((audio_stream_type_t)stream)) {
+            audio_devices_t forcedDevice =
+                    mOutputRoutes.getActiveDeviceForStream(
+                            (audio_stream_type_t)stream, mAvailableOutputDevices);
+
+            if (forcedDevice != AUDIO_DEVICE_NONE) {
+                return forcedDevice;
+            }
+        }
+    }
+
     // check the following by order of priority to request a routing change if necessary:
     // 1: the strategy enforced audible is active and enforced on the output:
     //      use device for strategy enforced audible
@@ -4997,19 +5060,16 @@
 audio_devices_t AudioPolicyManager::getDeviceForStrategy(routing_strategy strategy,
                                                          bool fromCache)
 {
-    // Routing
-    // see if we have an explicit route
-    // scan the whole RouteMap, for each entry, convert the stream type to a strategy
-    // (getStrategy(stream)).
-    // if the strategy from the stream type in the RouteMap is the same as the argument above,
-    // and activity count is non-zero and the device in the route descriptor is available
-    // then select this device.
-    for (size_t routeIndex = 0; routeIndex < mOutputRoutes.size(); routeIndex++) {
-        sp<SessionRoute> route = mOutputRoutes.valueAt(routeIndex);
-        routing_strategy routeStrategy = getStrategy(route->mStreamType);
-        if ((routeStrategy == strategy) && route->isActiveOrChanged() &&
-                (mAvailableOutputDevices.indexOf(route->mDeviceDescriptor) >= 0)) {
-            return route->mDeviceDescriptor->type();
+    // Check if an explicit routing request exists for a stream type corresponding to the
+    // specified strategy and use it in priority over default routing rules.
+    for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
+        if (getStrategy((audio_stream_type_t)stream) == strategy) {
+            audio_devices_t forcedDevice =
+                    mOutputRoutes.getActiveDeviceForStream(
+                            (audio_stream_type_t)stream, mAvailableOutputDevices);
+            if (forcedDevice != AUDIO_DEVICE_NONE) {
+                return forcedDevice;
+            }
         }
     }
 
@@ -5579,7 +5639,10 @@
     }
 
     float volumeDb = computeVolume(stream, index, device);
-    if (outputDesc->isFixedVolume(device)) {
+    if (outputDesc->isFixedVolume(device) ||
+            // Force VoIP volume to max for bluetooth SCO
+            ((stream == AUDIO_STREAM_VOICE_CALL || stream == AUDIO_STREAM_BLUETOOTH_SCO) &&
+             (device & AUDIO_DEVICE_OUT_ALL_SCO) != 0)) {
         volumeDb = 0.0f;
     }
 
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 9592b6a..7337f04 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -408,6 +408,7 @@
         client->active = false;
         client->isConcurrent = false;
         client->isVirtualDevice = false; //TODO : update from APM->getInputForAttr()
+        client->deviceId = *selectedDeviceId;
         mAudioRecordClients.add(*portId, client);
     }
 
@@ -434,7 +435,8 @@
     return rawbuffer;
 }
 
-static std::string audioConcurrencyString(AudioPolicyInterface::concurrency_type__mask_t concurrency)
+static std::string audioConcurrencyString(
+        AudioPolicyInterface::concurrency_type__mask_t concurrency)
 {
     char buffer[64]; // oversized
     if (concurrency & AudioPolicyInterface::API_INPUT_CONCURRENCY_ALL) {
@@ -450,6 +452,17 @@
     return &buffer[1];
 }
 
+std::string AudioPolicyService::getDeviceTypeStrForPortId(audio_port_handle_t portId) {
+    std::string typeStr;
+    struct audio_port port = {};
+    port.id = portId;
+    status_t status = mAudioPolicyManager->getAudioPort(&port);
+    if (status == NO_ERROR && port.type == AUDIO_PORT_TYPE_DEVICE) {
+        deviceToString(port.ext.device.type, typeStr);
+    }
+    return typeStr;
+}
+
 status_t AudioPolicyService::startInput(audio_port_handle_t portId, bool *silenced)
 {
     if (mAudioPolicyManager == NULL) {
@@ -488,8 +501,8 @@
 
     }
 
-    // XXX log them all for a while, during some dogfooding.
-    if (1 || status != NO_ERROR) {
+    // including successes gets very verbose
+    if (status != NO_ERROR) {
 
         static constexpr char kAudioPolicy[] = "audiopolicy";
 
@@ -498,9 +511,14 @@
         static constexpr char kAudioPolicyRqstSrc[] = "android.media.audiopolicy.rqst.src";
         static constexpr char kAudioPolicyRqstPkg[] = "android.media.audiopolicy.rqst.pkg";
         static constexpr char kAudioPolicyRqstSession[] = "android.media.audiopolicy.rqst.session";
+        static constexpr char kAudioPolicyRqstDevice[] =
+                "android.media.audiopolicy.rqst.device";
         static constexpr char kAudioPolicyActiveSrc[] = "android.media.audiopolicy.active.src";
         static constexpr char kAudioPolicyActivePkg[] = "android.media.audiopolicy.active.pkg";
-        static constexpr char kAudioPolicyActiveSession[] = "android.media.audiopolicy.active.session";
+        static constexpr char kAudioPolicyActiveSession[] =
+                "android.media.audiopolicy.active.session";
+        static constexpr char kAudioPolicyActiveDevice[] =
+                "android.media.audiopolicy.active.device";
 
         MediaAnalyticsItem *item = new MediaAnalyticsItem(kAudioPolicy);
         if (item != NULL) {
@@ -508,10 +526,15 @@
             item->setCString(kAudioPolicyReason, audioConcurrencyString(concurrency).c_str());
             item->setInt32(kAudioPolicyStatus, status);
 
-            item->setCString(kAudioPolicyRqstSrc, audioSourceString(client->attributes.source).c_str());
-            item->setCString(kAudioPolicyRqstPkg, std::string(String8(client->opPackageName).string()).c_str());
+            item->setCString(kAudioPolicyRqstSrc,
+                             audioSourceString(client->attributes.source).c_str());
+            item->setCString(kAudioPolicyRqstPkg,
+                             std::string(String8(client->opPackageName).string()).c_str());
             item->setInt32(kAudioPolicyRqstSession, client->session);
 
+            item->setCString(
+                    kAudioPolicyRqstDevice, getDeviceTypeStrForPortId(client->deviceId).c_str());
+
             // figure out who is active
             // NB: might the other party have given up the microphone since then? how sure.
             // perhaps could have given up on it.
@@ -527,8 +550,11 @@
                         // keeps the last of the clients marked active
                         item->setCString(kAudioPolicyActiveSrc,
                                          audioSourceString(other->attributes.source).c_str());
-                        item->setCString(kAudioPolicyActivePkg, std::string(String8(other->opPackageName).string()).c_str());
+                        item->setCString(kAudioPolicyActivePkg,
+                                     std::string(String8(other->opPackageName).string()).c_str());
                         item->setInt32(kAudioPolicyActiveSession, other->session);
+                        item->setCString(kAudioPolicyActiveDevice,
+                                         getDeviceTypeStrForPortId(other->deviceId).c_str());
                     }
                 }
             }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 3e179c0..407d7a5 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -263,6 +263,8 @@
     // Prints the shell command help
     status_t printHelp(int out);
 
+    std::string getDeviceTypeStrForPortId(audio_port_handle_t portId);
+
     // If recording we need to make sure the UID is allowed to do that. If the UID is idle
     // then it cannot record and gets buffers with zeros - silence. As soon as the UID
     // transitions to an active state we will start reporting buffers with data. This approach
@@ -643,7 +645,8 @@
         const audio_session_t session;       // audio session ID
         bool active;                   // Capture is active or inactive
         bool isConcurrent;             // is allowed to concurrent capture
-        bool isVirtualDevice;          // uses vitual device: updated by APM::getInputForAttr()
+        bool isVirtualDevice;          // uses virtual device: updated by APM::getInputForAttr()
+        audio_port_handle_t deviceId;  // selected input device port ID
     };
 
     // A class automatically clearing and restoring binder caller identity inside
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c49de8e..98d0534 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -624,11 +624,19 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    if (!checkPhysicalCameraId(physicalCameraId)) {
-        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
+    if (physicalCameraId.size() > 0) {
+        std::vector<std::string> physicalCameraIds;
+        std::string physicalId(physicalCameraId.string());
+        bool logicalCamera =
+                CameraProviderManager::isLogicalCamera(mDevice->info(), &physicalCameraIds);
+        if (!logicalCamera ||
+                std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalId) ==
+                physicalCameraIds.end()) {
+            String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
                     mCameraIdStr.string(), physicalCameraId.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
     }
     std::vector<sp<Surface>> surfaces;
     std::vector<sp<IBinder>> binders;
@@ -1144,43 +1152,6 @@
     return binder::Status::ok();
 }
 
-bool CameraDeviceClient::checkPhysicalCameraId(const String8& physicalCameraId) {
-    if (0 == physicalCameraId.size()) {
-        return true;
-    }
-
-    CameraMetadata staticInfo = mDevice->info();
-    camera_metadata_entry_t entryCap;
-    bool isLogicalCam = false;
-
-    entryCap = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
-    for (size_t i = 0; i < entryCap.count; ++i) {
-        uint8_t capability = entryCap.data.u8[i];
-        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
-            isLogicalCam = true;
-        }
-    }
-    if (!isLogicalCam) {
-        return false;
-    }
-
-    camera_metadata_entry_t entryIds = staticInfo.find(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS);
-    const uint8_t* ids = entryIds.data.u8;
-    size_t start = 0;
-    for (size_t i = 0; i < entryIds.count; ++i) {
-        if (ids[i] == '\0') {
-            if (start != i) {
-                String8 currentId((const char*)ids+start);
-                if (currentId == physicalCameraId) {
-                    return true;
-                }
-            }
-            start = i+1;
-        }
-    }
-    return false;
-}
-
 bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height,
         int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
         /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 66e9196..43f1a91 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -98,9 +98,14 @@
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     std::vector<std::string> deviceIds;
     for (auto& provider : mProviders) {
-        for (auto& id : provider->mUniqueAPI1CompatibleCameraIds) {
-            deviceIds.push_back(id);
-        }
+        std::vector<std::string> providerDeviceIds = provider->mUniqueAPI1CompatibleCameraIds;
+
+        // API1 app doesn't handle logical and physical camera devices well. So
+        // for each [logical, physical1, physical2, ...] id combo, only take the
+        // first id advertised by HAL, and filter out the rest.
+        filterLogicalCameraIdsLocked(providerDeviceIds);
+
+        deviceIds.insert(deviceIds.end(), providerDeviceIds.begin(), providerDeviceIds.end());
     }
 
     std::sort(deviceIds.begin(), deviceIds.end(),
@@ -172,11 +177,7 @@
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
         CameraMetadata* characteristics) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
-
-    auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
-    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
-
-    return deviceInfo->getCameraCharacteristics(characteristics);
+    return getCameraCharacteristicsLocked(id, characteristics);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -391,6 +392,37 @@
     return ret;
 }
 
+bool CameraProviderManager::isLogicalCamera(const CameraMetadata& staticInfo,
+        std::vector<std::string>* physicalCameraIds) {
+    bool isLogicalCam = false;
+    camera_metadata_ro_entry_t entryCap;
+
+    entryCap = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    for (size_t i = 0; i < entryCap.count; ++i) {
+        uint8_t capability = entryCap.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
+            isLogicalCam = true;
+            break;
+        }
+    }
+    if (!isLogicalCam) {
+        return false;
+    }
+
+    camera_metadata_ro_entry_t entryIds = staticInfo.find(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS);
+    const uint8_t* ids = entryIds.data.u8;
+    size_t start = 0;
+    for (size_t i = 0; i < entryIds.count; ++i) {
+        if (ids[i] == '\0') {
+            if (start != i) {
+                physicalCameraIds->push_back((const char*)ids+start);
+            }
+            start = i+1;
+        }
+    }
+    return true;
+}
+
 status_t CameraProviderManager::addProviderLocked(const std::string& newProvider, bool expected) {
     for (const auto& providerInfo : mProviders) {
         if (providerInfo->mProviderName == newProvider) {
@@ -599,7 +631,7 @@
 
     mUniqueCameraIds.insert(id);
     if (isAPI1Compatible) {
-        mUniqueAPI1CompatibleCameraIds.insert(id);
+        mUniqueAPI1CompatibleCameraIds.push_back(id);
     }
 
     if (parsedId != nullptr) {
@@ -613,7 +645,9 @@
         if ((*it)->mId == id) {
             mUniqueCameraIds.erase(id);
             if ((*it)->isAPI1Compatible()) {
-                mUniqueAPI1CompatibleCameraIds.erase(id);
+                mUniqueAPI1CompatibleCameraIds.erase(std::remove(
+                        mUniqueAPI1CompatibleCameraIds.begin(),
+                        mUniqueAPI1CompatibleCameraIds.end(), id));
             }
             mDevices.erase(it);
             break;
@@ -1417,5 +1451,51 @@
     return OK;
 }
 
+status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
+        CameraMetadata* characteristics) const {
+    auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    return deviceInfo->getCameraCharacteristics(characteristics);
+}
+
+void CameraProviderManager::filterLogicalCameraIdsLocked(
+        std::vector<std::string>& deviceIds) const
+{
+    std::unordered_set<std::string> removedIds;
+
+    for (auto& deviceId : deviceIds) {
+        CameraMetadata info;
+        status_t res = getCameraCharacteristicsLocked(deviceId, &info);
+        if (res != OK) {
+            ALOGE("%s: Failed to getCameraCharacteristics for id %s", __FUNCTION__,
+                    deviceId.c_str());
+            return;
+        }
+
+        // idCombo contains the ids of a logical camera and its physical cameras
+        std::vector<std::string> idCombo;
+        bool logicalCamera = CameraProviderManager::isLogicalCamera(info, &idCombo);
+        if (!logicalCamera) {
+            continue;
+        }
+        idCombo.push_back(deviceId);
+
+        for (auto& id : deviceIds) {
+            auto foundId = std::find(idCombo.begin(), idCombo.end(), id);
+            if (foundId == idCombo.end()) {
+                continue;
+            }
+
+            idCombo.erase(foundId);
+            removedIds.insert(idCombo.begin(), idCombo.end());
+            break;
+        }
+    }
+
+    deviceIds.erase(std::remove_if(deviceIds.begin(), deviceIds.end(),
+            [&removedIds](const std::string& s) {return removedIds.find(s) != removedIds.end();}),
+            deviceIds.end());
+}
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index bbe6789..b8b8b8c 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -230,6 +230,13 @@
             hardware::hidl_version minVersion = hardware::hidl_version{0,0},
             hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
 
+    /*
+     * Check if a camera with staticInfo is a logical camera. And if yes, return
+     * the physical camera ids.
+     */
+    static bool isLogicalCamera(const CameraMetadata& staticInfo,
+            std::vector<std::string>* physicalCameraIds);
+
 private:
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
     mutable std::mutex mInterfaceMutex;
@@ -314,7 +321,7 @@
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
         int mUniqueDeviceCount;
-        std::unordered_set<std::string> mUniqueAPI1CompatibleCameraIds;
+        std::vector<std::string> mUniqueAPI1CompatibleCameraIds;
 
         // HALv1-specific camera fields, including the actual device interface
         struct DeviceInfo1 : public DeviceInfo {
@@ -414,6 +421,9 @@
     static const char* torchStatusToString(
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
+    status_t getCameraCharacteristicsLocked(const std::string &id,
+            CameraMetadata* characteristics) const;
+    void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 6b958a8..d9bcba3 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -77,7 +77,8 @@
         mNextShutterFrameNumber(0),
         mNextReprocessShutterFrameNumber(0),
         mListener(NULL),
-        mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID)
+        mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
+        mLastTemplateId(-1)
 {
     ATRACE_CALL();
     camera3_callback_ops::notify = &sNotify;
@@ -1597,6 +1598,18 @@
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
+    // In case the client doesn't include any session parameter, try a
+    // speculative configuration using the values from the last cached
+    // default request.
+    if (sessionParams.isEmpty() &&
+            ((mLastTemplateId > 0) && (mLastTemplateId < CAMERA3_TEMPLATE_COUNT)) &&
+            (!mRequestTemplateCache[mLastTemplateId].isEmpty())) {
+        ALOGV("%s: Speculative session param configuration with template id: %d", __func__,
+                mLastTemplateId);
+        return filterParamsAndConfigureLocked(mRequestTemplateCache[mLastTemplateId],
+                operatingMode);
+    }
+
     return filterParamsAndConfigureLocked(sessionParams, operatingMode);
 }
 
@@ -1673,6 +1686,7 @@
 
         if (!mRequestTemplateCache[templateId].isEmpty()) {
             *request = mRequestTemplateCache[templateId];
+            mLastTemplateId = templateId;
             return OK;
         }
     }
@@ -1697,6 +1711,7 @@
         mRequestTemplateCache[templateId].acquire(rawRequest);
 
         *request = mRequestTemplateCache[templateId];
+        mLastTemplateId = templateId;
     }
     return OK;
 }
@@ -4045,6 +4060,7 @@
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
         mPrepareVideoStream(false),
+        mConstrainedMode(false),
         mRequestLatency(kRequestLatencyBinSize),
         mSessionParamKeys(sessionParamKeys),
         mLatestSessionParams(sessionParamKeys.size()) {
@@ -4068,6 +4084,7 @@
     mLatestSessionParams = sessionParams;
     // Prepare video stream for high speed recording.
     mPrepareVideoStream = isConstrainedHighSpeed;
+    mConstrainedMode = isConstrainedHighSpeed;
 }
 
 status_t Camera3Device::RequestThread::queueRequestList(
@@ -4482,6 +4499,17 @@
     return maxExpectedDuration;
 }
 
+bool Camera3Device::RequestThread::skipHFRTargetFPSUpdate(int32_t tag,
+        const camera_metadata_ro_entry_t& newEntry, const camera_metadata_entry_t& currentEntry) {
+    if (mConstrainedMode && (ANDROID_CONTROL_AE_TARGET_FPS_RANGE == tag) &&
+            (newEntry.count == currentEntry.count) && (currentEntry.count == 2) &&
+            (currentEntry.data.i32[1] == newEntry.data.i32[1])) {
+        return true;
+    }
+
+    return false;
+}
+
 bool Camera3Device::RequestThread::updateSessionParameters(const CameraMetadata& settings) {
     ATRACE_CALL();
     bool updatesDetected = false;
@@ -4514,8 +4542,10 @@
 
             if (isDifferent) {
                 ALOGV("%s: Session parameter tag id %d changed", __FUNCTION__, tag);
+                if (!skipHFRTargetFPSUpdate(tag, entry, lastEntry)) {
+                    updatesDetected = true;
+                }
                 mLatestSessionParams.update(entry);
-                updatesDetected = true;
             }
         } else if (lastEntry.count > 0) {
             // Value has been removed
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 13b83ba..35f799d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -861,6 +861,11 @@
         // Check and update latest session parameters based on the current request settings.
         bool updateSessionParameters(const CameraMetadata& settings);
 
+        // Check whether FPS range session parameter re-configuration is needed in constrained
+        // high speed recording camera sessions.
+        bool skipHFRTargetFPSUpdate(int32_t tag, const camera_metadata_ro_entry_t& newEntry,
+                const camera_metadata_entry_t& currentEntry);
+
         // Re-configure camera using the latest session parameters.
         bool reconfigureCamera();
 
@@ -919,6 +924,8 @@
         // Flag indicating if we should prepare video stream for video requests.
         bool               mPrepareVideoStream;
 
+        bool               mConstrainedMode;
+
         static const int32_t kRequestLatencyBinSize = 40; // in ms
         CameraLatencyHistogram mRequestLatency;
 
@@ -1184,6 +1191,9 @@
 
     metadata_vendor_id_t mVendorTagId;
 
+    // Cached last requested template id
+    int mLastTemplateId;
+
     /**
      * Static callback forwarding methods from HAL to instance
      */
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index f4d5a18..59ac636 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -212,7 +212,11 @@
 
     SP_LOGV("%s: Consumer wants %d buffers, Producer wants %zu", __FUNCTION__,
             maxConsumerBuffers, mMaxHalBuffers);
-    size_t totalBufferCount = maxConsumerBuffers + mMaxHalBuffers;
+    // The output slot count requirement can change depending on the current amount
+    // of outputs and incoming buffer consumption rate. To avoid any issues with
+    // insufficient slots, set their count to the maximum supported. The output
+    // surface buffer allocation is disabled so no real buffers will get allocated.
+    size_t totalBufferCount = BufferQueue::NUM_BUFFER_SLOTS;
     res = native_window_set_buffer_count(outputQueue.get(),
             totalBufferCount);
     if (res != OK) {