Merge "Select haptic output when haptic-generating effect exists."
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index df1d88e..8fe48c2 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,5 +1,5 @@
 [Hook Scripts]
-mainline_hook = tools/mainline_hook.sh ${PREUPLOAD_COMMIT} "."
+mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
 
 [Builtin Hooks]
 clang_format = true
diff --git a/apex/testing/test_manifest.json b/apex/testing/test_manifest.json
index ddd642e..e1295a2 100644
--- a/apex/testing/test_manifest.json
+++ b/apex/testing/test_manifest.json
@@ -1,4 +1,4 @@
 {
   "name": "com.android.media",
-  "version": 300000000
+  "version": 2147483647
 }
diff --git a/camera/TEST_MAPPING b/camera/TEST_MAPPING
new file mode 100644
index 0000000..683e183
--- /dev/null
+++ b/camera/TEST_MAPPING
@@ -0,0 +1,11 @@
+{
+  "postsubmit": [
+    {
+      "name": "CtsCameraTestCases"
+    },
+    {
+      "name": "CtsCameraTestCases",
+      "keywords": ["primary-device"]
+    }
+  ]
+}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index dc7f88a..09a333b 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -37,7 +37,7 @@
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
-    compile_multilib: "32",
+    compile_multilib: "prefer32",
     cflags: [
         "-Wall",
         "-Wextra",
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 2f2299f..d937865 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -270,7 +270,15 @@
     // sequence id -> last frame number holder map
     struct RequestLastFrameNumberHolder {
         int64_t lastFrameNumber;
+        // Whether the current sequence is completed (capture results are
+        // generated). May be set to true, but
+        // not removed from the map if not all inflight requests in the sequence
+        // have been completed.
         bool isSequenceCompleted = false;
+        // Whether all inflight requests in the sequence are completed
+        // (capture results and buffers are generated). May be
+        // set to true, but not removed from the map yet if the capture results
+        // haven't been delivered to the app yet.
         bool isInflightCompleted = false;
         RequestLastFrameNumberHolder(int64_t lastFN) :
                 lastFrameNumber(lastFN) {}
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 072bb02..a840bd1 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -36,6 +36,7 @@
 #ifndef _NDK_CAMERA_METADATA_H
 #define _NDK_CAMERA_METADATA_H
 
+#include <stdbool.h>
 #include <stdint.h>
 #include <sys/cdefs.h>
 
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 7db6a4b..96dc541 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -6156,10 +6156,11 @@
      * </ul></p>
      *
      * <p>The accuracy of the frame timestamp synchronization determines the physical cameras'
-     * ability to start exposure at the same time. If the sensorSyncType is CALIBRATED,
-     * the physical camera sensors usually run in master-slave mode so that their shutter
-     * time is synchronized. For APPROXIMATE sensorSyncType, the camera sensors usually run in
-     * master-master mode, and there could be offset between their start of exposure.</p>
+     * ability to start exposure at the same time. If the sensorSyncType is CALIBRATED, the
+     * physical camera sensors usually run in leader/follower mode where one sensor generates a
+     * timing signal for the other, so that their shutter time is synchronized. For APPROXIMATE
+     * sensorSyncType, the camera sensors usually run in leader/leader mode, where both sensors
+     * use their own timing generator, and there could be offset between their start of exposure.</p>
      * <p>In both cases, all images generated for a particular capture request still carry the same
      * timestamps, so that they can be used to look up the matching frame number and
      * onCaptureStarted callback.</p>
@@ -8190,19 +8191,35 @@
      * <li>ACAMERA_LENS_POSE_REFERENCE</li>
      * <li>ACAMERA_LENS_DISTORTION</li>
      * </ul>
-     * <p>The field of view of all non-RAW physical streams must be the same or as close as
-     * possible to that of non-RAW logical streams. If the requested FOV is outside of the
-     * range supported by the physical camera, the physical stream for that physical camera
-     * will use either the maximum or minimum scaler crop region, depending on which one is
-     * closer to the requested FOV. For example, for a logical camera with wide-tele lens
-     * configuration where the wide lens is the default, if the logical camera's crop region
-     * is set to maximum, the physical stream for the tele lens will be configured to its
-     * maximum crop region. On the other hand, if the logical camera has a normal-wide lens
-     * configuration where the normal lens is the default, when the logical camera's crop
-     * region is set to maximum, the FOV of the logical streams will be that of the normal
-     * lens. The FOV of the physical streams for the wide lens will be the same as the
-     * logical stream, by making the crop region smaller than its active array size to
-     * compensate for the smaller focal length.</p>
+     * <p>The field of view of non-RAW physical streams must not be smaller than that of the
+     * non-RAW logical streams, or the maximum field-of-view of the physical camera,
+     * whichever is smaller. The application should check the physical capture result
+     * metadata for how the physical streams are cropped or zoomed. More specifically, given
+     * the physical camera result metadata, the effective horizontal field-of-view of the
+     * physical camera is:</p>
+     * <pre><code>fov = 2 * atan2(cropW * sensorW / (2 * zoomRatio * activeArrayW), focalLength)
+     * </code></pre>
+     * <p>where the equation parameters are the physical camera's crop region width, physical
+     * sensor width, zoom ratio, active array width, and focal length respectively. Typically
+     * the physical stream of active physical camera has the same field-of-view as the
+     * logical streams. However, the same may not be true for physical streams from
+     * non-active physical cameras. For example, if the logical camera has a wide-ultrawide
+     * configuration where the wide lens is the default, when the crop region is set to the
+     * logical camera's active array size, (and the zoom ratio set to 1.0 starting from
+     * Android 11), a physical stream for the ultrawide camera may prefer outputing images
+     * with larger field-of-view than that of the wide camera for better stereo matching
+     * margin or more robust motion tracking. At the same time, the physical non-RAW streams'
+     * field of view must not be smaller than the requested crop region and zoom ratio, as
+     * long as it's within the physical lens' capability. For example, for a logical camera
+     * with wide-tele lens configuration where the wide lens is the default, if the logical
+     * camera's crop region is set to maximum size, and zoom ratio set to 1.0, the physical
+     * stream for the tele lens will be configured to its maximum size crop region (no zoom).</p>
+     * <p><em>Deprecated:</em> Prior to Android 11, the field of view of all non-RAW physical streams
+     * cannot be larger than that of non-RAW logical streams. If the logical camera has a
+     * wide-ultrawide lens configuration where the wide lens is the default, when the logical
+     * camera's crop region is set to maximum size, the FOV of the physical streams for the
+     * ultrawide lens will be the same as the logical stream, by making the crop region
+     * smaller than its active array size to compensate for the smaller focal length.</p>
      * <p>Even if the underlying physical cameras have different RAW characteristics (such as
      * size or CFA pattern), a logical camera can still advertise RAW capability. In this
      * case, when the application configures a RAW stream, the camera device will make sure
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index b68e6c2..fcd291f 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -43,7 +43,7 @@
         "-Werror",
     ],
 
-    compile_multilib: "32",
+    compile_multilib: "prefer32",
 
     init_rc: ["drmserver.rc"],
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index 3ecf6d5..1495703 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -148,14 +148,17 @@
     // Calculate the output buffer size and determine if any subsamples are
     // encrypted.
     size_t destSize = 0;
+    size_t srcSize = 0;
     bool haveEncryptedSubsamples = false;
     for (size_t i = 0; i < subSamples.size(); i++) {
         const SubSample &subSample = subSamples[i];
-        if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize)) {
+        if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize) ||
+            __builtin_add_overflow(srcSize, subSample.numBytesOfClearData, &srcSize)) {
             _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample clear size overflow");
             return Void();
         }
-        if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize)) {
+        if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize) ||
+            __builtin_add_overflow(srcSize, subSample.numBytesOfEncryptedData, &srcSize)) {
             _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample encrypted size overflow");
             return Void();
         }
@@ -164,7 +167,7 @@
         }
     }
 
-    if (destSize > destBuffer.size) {
+    if (destSize > destBuffer.size || srcSize > source.size) {
         _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample sum too large");
         return Void();
     }
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 206f87f..b006f38 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -48,7 +48,7 @@
         }
     ],
 
-    "staged-platinum-postsubmit": [
+    "platinum-postsubmit": [
         // runs regularly, independent of changes in this tree.
         // signals if changes elsewhere break media functionality
         {
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index f39620e..677f316 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -89,11 +89,18 @@
         addParameter(
                 DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
                 .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
-                .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+                .withFields({C2F(mChannelCount, value).inRange(1, MAX_CHANNEL_COUNT)})
                 .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
+                DefineParam(mMaxChannelCount, C2_PARAMKEY_MAX_CHANNEL_COUNT)
+                .withDefault(new C2StreamMaxChannelCountInfo::input(0u, MAX_CHANNEL_COUNT))
+                .withFields({C2F(mMaxChannelCount, value).inRange(1, MAX_CHANNEL_COUNT)})
+                .withSetter(Setter<decltype(*mMaxChannelCount)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
                 .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
                 .withFields({C2F(mBitrate, value).inRange(8000, 960000)})
@@ -225,6 +232,7 @@
     int32_t getDrcAttenuationFactor() const { return mDrcAttenuationFactor->value * 127. + 0.5; }
     int32_t getDrcEffectType() const { return mDrcEffectType->value; }
     int32_t getDrcAlbumMode() const { return mDrcAlbumMode->value; }
+    u_int32_t getMaxChannelCount() const { return mMaxChannelCount->value; }
     int32_t getDrcOutputLoudness() const { return (mDrcOutputLoudness->value <= 0 ? -mDrcOutputLoudness->value * 4. + 0.5 : -1); }
 
 private:
@@ -241,6 +249,7 @@
     std::shared_ptr<C2StreamDrcAttenuationFactorTuning::input> mDrcAttenuationFactor;
     std::shared_ptr<C2StreamDrcEffectTypeTuning::input> mDrcEffectType;
     std::shared_ptr<C2StreamDrcAlbumModeTuning::input> mDrcAlbumMode;
+    std::shared_ptr<C2StreamMaxChannelCountInfo::input> mMaxChannelCount;
     std::shared_ptr<C2StreamDrcOutputLoudnessTuning::output> mDrcOutputLoudness;
     // TODO Add : C2StreamAacSbrModeTuning
 };
@@ -366,9 +375,10 @@
     ALOGV("AAC decoder using MPEG-D DRC album mode %d", albumMode);
     aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE, albumMode);
 
-    // By default, the decoder creates a 5.1 channel downmix signal.
-    // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
-    aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+    // AAC_PCM_MAX_OUTPUT_CHANNELS
+    u_int32_t maxChannelCount = mIntf->getMaxChannelCount();
+    ALOGV("AAC decoder using maximum output channel count %d", maxChannelCount);
+    aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, maxChannelCount);
 
     return status;
 }
@@ -707,6 +717,11 @@
         ALOGV("AAC decoder using MPEG-D DRC album mode %d", albumMode);
         aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE, albumMode);
 
+        // AAC_PCM_MAX_OUTPUT_CHANNELS
+        int32_t maxChannelCount = mIntf->getMaxChannelCount();
+        ALOGV("AAC decoder using maximum output channel count %d", maxChannelCount);
+        aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, maxChannelCount);
+
         mDrcWrap.update();
 
         UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
@@ -776,7 +791,6 @@
 
                 // After an error, replace bufferSize with the sum of the
                 // decodedSizes to resynchronize the in/out lists.
-                inInfo.decodedSizes.pop_back();
                 inInfo.bufferSize = std::accumulate(
                         inInfo.decodedSizes.begin(), inInfo.decodedSizes.end(), 0);
 
@@ -847,6 +861,51 @@
                     ALOGE("Getting output loudness failed");
                 }
             }
+
+            // update config with values used for decoding:
+            //    Album mode, target reference level, DRC effect type, DRC attenuation and boost
+            //    factor, DRC compression mode, encoder target level and max channel count
+            // with input values as they were not modified by decoder
+
+            C2StreamDrcAttenuationFactorTuning::input currentAttenuationFactor(0u,
+                    (C2FloatValue) (attenuationFactor/127.));
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentAttenuationFactor));
+
+            C2StreamDrcBoostFactorTuning::input currentBoostFactor(0u,
+                    (C2FloatValue) (boostFactor/127.));
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentBoostFactor));
+
+            C2StreamDrcCompressionModeTuning::input currentCompressMode(0u,
+                    (C2Config::drc_compression_mode_t) compressMode);
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentCompressMode));
+
+            C2StreamDrcEncodedTargetLevelTuning::input currentEncodedTargetLevel(0u,
+                    (C2FloatValue) (encTargetLevel*-0.25));
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentEncodedTargetLevel));
+
+            C2StreamDrcAlbumModeTuning::input currentAlbumMode(0u,
+                    (C2Config::drc_album_mode_t) albumMode);
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentAlbumMode));
+
+            C2StreamDrcTargetReferenceLevelTuning::input currentTargetRefLevel(0u,
+                    (float) (targetRefLevel*-0.25));
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentTargetRefLevel));
+
+            C2StreamDrcEffectTypeTuning::input currentEffectype(0u,
+                    (C2Config::drc_effect_type_t) effectType);
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentEffectype));
+
+            C2StreamMaxChannelCountInfo::input currentMaxChannelCnt(0u, maxChannelCount);
+            work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(currentMaxChannelCnt));
+
         } while (decoderErr == AAC_DEC_OK);
     }
 
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 4db94f5..2e85915 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -293,6 +293,30 @@
     return OK;
 }
 
+static void MaybeLogTimestampWarning(
+        long long lastFrameEndTimestampUs, long long inputTimestampUs) {
+    using Clock = std::chrono::steady_clock;
+    thread_local Clock::time_point sLastLogTimestamp{};
+    thread_local int32_t sOverlapCount = -1;
+    if (Clock::now() - sLastLogTimestamp > std::chrono::minutes(1) || sOverlapCount < 0) {
+        AString countMessage = "";
+        if (sOverlapCount > 0) {
+            countMessage = AStringPrintf(
+                    "(%d overlapping timestamp detected since last log)", sOverlapCount);
+        }
+        ALOGI("Correcting overlapping timestamp: last frame ended at %lldus but "
+                "current frame is starting at %lldus. Using the last frame's end timestamp %s",
+                lastFrameEndTimestampUs, inputTimestampUs, countMessage.c_str());
+        sLastLogTimestamp = Clock::now();
+        sOverlapCount = 0;
+    } else {
+        ALOGV("Correcting overlapping timestamp: last frame ended at %lldus but "
+                "current frame is starting at %lldus. Using the last frame's end timestamp",
+                lastFrameEndTimestampUs, inputTimestampUs);
+        ++sOverlapCount;
+    }
+}
+
 void C2SoftAacEnc::process(
         const std::unique_ptr<C2Work> &work,
         const std::shared_ptr<C2BlockPool> &pool) {
@@ -366,9 +390,7 @@
     }
     c2_cntr64_t inputTimestampUs = work->input.ordinal.timestamp;
     if (inputTimestampUs < mLastFrameEndTimestampUs.value_or(inputTimestampUs)) {
-        ALOGW("Correcting overlapping timestamp: last frame ended at %lldus but "
-              "current frame is starting at %lldus. Using the last frame's end timestamp",
-              mLastFrameEndTimestampUs->peekll(), inputTimestampUs.peekll());
+        MaybeLogTimestampWarning(mLastFrameEndTimestampUs->peekll(), inputTimestampUs.peekll());
         inputTimestampUs = *mLastFrameEndTimestampUs;
     }
     if (capacity > 0) {
diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp
index 5c019f3..29740d1 100644
--- a/media/codec2/components/base/SimpleC2Interface.cpp
+++ b/media/codec2/components/base/SimpleC2Interface.cpp
@@ -39,6 +39,16 @@
     setDerivedInstance(this);
 
     addParameter(
+            DefineParam(mApiFeatures, C2_PARAMKEY_API_FEATURES)
+            .withConstValue(new C2ApiFeaturesSetting(C2Config::api_feature_t(
+                    API_REFLECTION |
+                    API_VALUES |
+                    API_CURRENT_VALUES |
+                    API_DEPENDENCY |
+                    API_SAME_INPUT_BUFFER)))
+            .build());
+
+    addParameter(
             DefineParam(mName, C2_PARAMKEY_COMPONENT_NAME)
             .withConstValue(AllocSharedString<C2ComponentNameSetting>(name.c_str()))
             .build());
@@ -305,7 +315,6 @@
     Clients need to handle the following base params due to custom dependency.
 
     std::shared_ptr<C2ApiLevelSetting> mApiLevel;
-    std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
     std::shared_ptr<C2ComponentAttributesSetting> mAttrib;
 
     std::shared_ptr<C2PortSuggestedBufferCountTuning::input> mSuggestedInputBufferCount;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 74e105e..7e9090f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -460,8 +460,8 @@
 
     const C2ConstGraphicBlock inBuffer =
         inputBuffer->data().graphicBlocks().front();
-    if (inBuffer.width() != mSize->width ||
-        inBuffer.height() != mSize->height) {
+    if (inBuffer.width() < mSize->width ||
+        inBuffer.height() < mSize->height) {
         ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
               inBuffer.width(), mSize->width, inBuffer.height(),
               mSize->height);
@@ -472,8 +472,8 @@
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
     vpx_image_t raw_frame;
     const C2PlanarLayout &layout = rView->layout();
-    uint32_t width = rView->width();
-    uint32_t height = rView->height();
+    uint32_t width = mSize->width;
+    uint32_t height = mSize->height;
     if (width > 0x8000 || height > 0x8000) {
         ALOGE("Image too big: %u x %u", width, height);
         work->result = C2_BAD_VALUE;
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index a41c2dc..0251ec2 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -92,7 +92,10 @@
         for (size_t i = 0; i < updates.size(); ++i) {
             C2Param* param = updates[i].get();
             if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
-                csd = true;
+                C2StreamInitDataInfo::output* csdBuffer =
+                        (C2StreamInitDataInfo::output*)(param);
+                size_t csdSize = csdBuffer->flexCount();
+                if (csdSize > 0) csd = true;
             } else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
                        (param->index() == C2StreamChannelCountInfo::output::PARAM_TYPE) ||
                        (param->index() == C2StreamPictureSizeInfo::output::PARAM_TYPE)) {
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 74088dd..12ed725 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -109,6 +109,7 @@
         mFramesReceived = 0;
         mTimestampUs = 0u;
         mWorkResult = C2_OK;
+        mReorderDepth = -1;
         mTimestampDevTest = false;
         mMd5Offset = 0;
         mMd5Enable = false;
@@ -211,34 +212,46 @@
         for (std::unique_ptr<C2Work>& work : workItems) {
             if (!work->worklets.empty()) {
                 // For decoder components current timestamp always exceeds
-                // previous timestamp
+                // previous timestamp if output is in display order
                 typedef std::unique_lock<std::mutex> ULock;
                 mWorkResult |= work->result;
                 bool codecConfig = ((work->worklets.front()->output.flags &
                                      C2FrameData::FLAG_CODEC_CONFIG) != 0);
                 if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
-                    EXPECT_GE((work->worklets.front()->output.ordinal.timestamp.peeku()),
-                              mTimestampUs);
-                    mTimestampUs = work->worklets.front()->output.ordinal.timestamp.peeku();
-
-                    ULock l(mQueueLock);
-                    if (mTimestampDevTest) {
-                        bool tsHit = false;
-                        std::list<uint64_t>::iterator it = mTimestampUslist.begin();
-                        while (it != mTimestampUslist.end()) {
-                            if (*it == mTimestampUs) {
-                                mTimestampUslist.erase(it);
-                                tsHit = true;
-                                break;
-                            }
-                            it++;
+                    if (mReorderDepth < 0) {
+                        C2PortReorderBufferDepthTuning::output reorderBufferDepth;
+                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
+                                          nullptr);
+                        mReorderDepth = reorderBufferDepth.value;
+                        if (mReorderDepth > 0) {
+                            // TODO: Add validation for reordered output
+                            mTimestampDevTest = false;
                         }
-                        if (tsHit == false) {
-                            if (mTimestampUslist.empty() == false) {
-                                EXPECT_EQ(tsHit, true) << "TimeStamp not recognized";
-                            } else {
-                                std::cout << "[   INFO   ] Received non-zero "
-                                             "output / TimeStamp not recognized \n";
+                    }
+                    if (mTimestampDevTest) {
+                        EXPECT_GE((work->worklets.front()->output.ordinal.timestamp.peeku()),
+                                  mTimestampUs);
+                        mTimestampUs = work->worklets.front()->output.ordinal.timestamp.peeku();
+
+                        ULock l(mQueueLock);
+                        {
+                            bool tsHit = false;
+                            std::list<uint64_t>::iterator it = mTimestampUslist.begin();
+                            while (it != mTimestampUslist.end()) {
+                                if (*it == mTimestampUs) {
+                                    mTimestampUslist.erase(it);
+                                    tsHit = true;
+                                    break;
+                                }
+                                it++;
+                            }
+                            if (tsHit == false) {
+                                if (mTimestampUslist.empty() == false) {
+                                    EXPECT_EQ(tsHit, true) << "TimeStamp not recognized";
+                                } else {
+                                    std::cout << "[   INFO   ] Received non-zero "
+                                                 "output / TimeStamp not recognized \n";
+                                }
                             }
                         }
                     }
@@ -281,6 +294,7 @@
     standardComp mCompName;
 
     int32_t mWorkResult;
+    int32_t mReorderDepth;
     uint32_t mFramesReceived;
     C2BlockPool::local_id_t mBlockPoolId;
     std::shared_ptr<C2BlockPool> mLinearPool;
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 9e425d2..ecaf3a8 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -510,12 +510,10 @@
         ASSERT_TRUE(false);
     }
 
-    if (!mCsd && (mCompName != vp8 && mCompName != vp9)) {
-        ASSERT_TRUE(false) << "CSD Buffer not received";
-    }
-
-    if (mCsd && (mCompName == vp8 || mCompName == vp9)) {
-        ASSERT_TRUE(false) << "CSD Buffer not expected";
+    if (mCompName == vp8 || mCompName == h263) {
+        ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
+    } else if (mCompName != vp9) {
+        ASSERT_TRUE(mCsd) << "CSD Buffer not received";
     }
 
     if (mTimestampDevTest) EXPECT_EQ(mTimestampUslist.empty(), true);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 05e086f..73b3857 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1868,7 +1868,7 @@
                 config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
             }
             mChannel->onWorkDone(
-                    std::move(work), changed ? config->mOutputFormat : nullptr,
+                    std::move(work), changed ? config->mOutputFormat->dup() : nullptr,
                     initData.hasChanged() ? initData.update().get() : nullptr);
             break;
         }
@@ -1958,11 +1958,98 @@
             inputSurface->getHalInterface()));
 }
 
-static void MaybeLogUnrecognizedName(const char *func, const std::string &name) {
-    thread_local std::set<std::string> sLogged{};
-    if (sLogged.insert(name).second) {
-        ALOGW("%s: Unrecognized interface name: %s", func, name.c_str());
+class IntfCache {
+public:
+    IntfCache() = default;
+
+    status_t init(const std::string &name) {
+        std::shared_ptr<Codec2Client::Interface> intf{
+            Codec2Client::CreateInterfaceByName(name.c_str())};
+        if (!intf) {
+            ALOGW("IntfCache [%s]: Unrecognized interface name", name.c_str());
+            mInitStatus = NO_INIT;
+            return NO_INIT;
+        }
+        const static C2StreamUsageTuning::input sUsage{0u /* stream id */};
+        mFields.push_back(C2FieldSupportedValuesQuery::Possible(
+                C2ParamField{&sUsage, &sUsage.value}));
+        c2_status_t err = intf->querySupportedValues(mFields, C2_MAY_BLOCK);
+        if (err != C2_OK) {
+            ALOGW("IntfCache [%s]: failed to query usage supported value (err=%d)",
+                    name.c_str(), err);
+            mFields[0].status = err;
+        }
+        std::vector<std::unique_ptr<C2Param>> params;
+        err = intf->query(
+                {&mApiFeatures},
+                {C2PortAllocatorsTuning::input::PARAM_TYPE},
+                C2_MAY_BLOCK,
+                &params);
+        if (err != C2_OK && err != C2_BAD_INDEX) {
+            ALOGW("IntfCache [%s]: failed to query api features (err=%d)",
+                    name.c_str(), err);
+        }
+        while (!params.empty()) {
+            C2Param *param = params.back().release();
+            params.pop_back();
+            if (!param) {
+                continue;
+            }
+            if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
+                mInputAllocators.reset(
+                        C2PortAllocatorsTuning::input::From(params[0].get()));
+            }
+        }
+        mInitStatus = OK;
+        return OK;
     }
+
+    status_t initCheck() const { return mInitStatus; }
+
+    const C2FieldSupportedValuesQuery &getUsageSupportedValues() const {
+        CHECK_EQ(1u, mFields.size());
+        return mFields[0];
+    }
+
+    const C2ApiFeaturesSetting &getApiFeatures() const {
+        return mApiFeatures;
+    }
+
+    const C2PortAllocatorsTuning::input &getInputAllocators() const {
+        static std::unique_ptr<C2PortAllocatorsTuning::input> sInvalidated = []{
+            std::unique_ptr<C2PortAllocatorsTuning::input> param =
+                C2PortAllocatorsTuning::input::AllocUnique(0);
+            param->invalidate();
+            return param;
+        }();
+        return mInputAllocators ? *mInputAllocators : *sInvalidated;
+    }
+
+private:
+    status_t mInitStatus{NO_INIT};
+
+    std::vector<C2FieldSupportedValuesQuery> mFields;
+    C2ApiFeaturesSetting mApiFeatures;
+    std::unique_ptr<C2PortAllocatorsTuning::input> mInputAllocators;
+};
+
+static const IntfCache &GetIntfCache(const std::string &name) {
+    static IntfCache sNullIntfCache;
+    static std::mutex sMutex;
+    static std::map<std::string, IntfCache> sCache;
+    std::unique_lock<std::mutex> lock{sMutex};
+    auto it = sCache.find(name);
+    if (it == sCache.end()) {
+        lock.unlock();
+        IntfCache intfCache;
+        status_t err = intfCache.init(name);
+        if (err != OK) {
+            return sNullIntfCache;
+        }
+        lock.lock();
+        it = sCache.insert({name, std::move(intfCache)}).first;
+    }
+    return it->second;
 }
 
 static status_t GetCommonAllocatorIds(
@@ -1980,24 +2067,16 @@
     }
     bool firstIteration = true;
     for (const std::string &name : names) {
-        std::shared_ptr<Codec2Client::Interface> intf{
-            Codec2Client::CreateInterfaceByName(name.c_str())};
-        if (!intf) {
-            MaybeLogUnrecognizedName(__FUNCTION__, name);
+        const IntfCache &intfCache = GetIntfCache(name);
+        if (intfCache.initCheck() != OK) {
             continue;
         }
-        std::vector<std::unique_ptr<C2Param>> params;
-        c2_status_t err = intf->query(
-                {}, {C2PortAllocatorsTuning::input::PARAM_TYPE}, C2_MAY_BLOCK, &params);
+        const C2PortAllocatorsTuning::input &allocators = intfCache.getInputAllocators();
         if (firstIteration) {
             firstIteration = false;
-            if (err == C2_OK && params.size() == 1u) {
-                C2PortAllocatorsTuning::input *allocators =
-                    C2PortAllocatorsTuning::input::From(params[0].get());
-                if (allocators && allocators->flexCount() > 0) {
-                    ids->insert(allocators->m.values,
-                                allocators->m.values + allocators->flexCount());
-                }
+            if (allocators && allocators.flexCount() > 0) {
+                ids->insert(allocators.m.values,
+                            allocators.m.values + allocators.flexCount());
             }
             if (ids->empty()) {
                 // The component does not advertise allocators. Use default.
@@ -2006,24 +2085,20 @@
             continue;
         }
         bool filtered = false;
-        if (err == C2_OK && params.size() == 1u) {
-            C2PortAllocatorsTuning::input *allocators =
-                C2PortAllocatorsTuning::input::From(params[0].get());
-            if (allocators && allocators->flexCount() > 0) {
-                filtered = true;
-                for (auto it = ids->begin(); it != ids->end(); ) {
-                    bool found = false;
-                    for (size_t j = 0; j < allocators->flexCount(); ++j) {
-                        if (allocators->m.values[j] == *it) {
-                            found = true;
-                            break;
-                        }
+        if (allocators && allocators.flexCount() > 0) {
+            filtered = true;
+            for (auto it = ids->begin(); it != ids->end(); ) {
+                bool found = false;
+                for (size_t j = 0; j < allocators.flexCount(); ++j) {
+                    if (allocators.m.values[j] == *it) {
+                        found = true;
+                        break;
                     }
-                    if (found) {
-                        ++it;
-                    } else {
-                        it = ids->erase(it);
-                    }
+                }
+                if (found) {
+                    ++it;
+                } else {
+                    it = ids->erase(it);
                 }
             }
         }
@@ -2055,23 +2130,16 @@
     *minUsage = 0;
     *maxUsage = ~0ull;
     for (const std::string &name : names) {
-        std::shared_ptr<Codec2Client::Interface> intf{
-            Codec2Client::CreateInterfaceByName(name.c_str())};
-        if (!intf) {
-            MaybeLogUnrecognizedName(__FUNCTION__, name);
+        const IntfCache &intfCache = GetIntfCache(name);
+        if (intfCache.initCheck() != OK) {
             continue;
         }
-        std::vector<C2FieldSupportedValuesQuery> fields;
-        fields.push_back(C2FieldSupportedValuesQuery::Possible(
-                C2ParamField{&sUsage, &sUsage.value}));
-        c2_status_t err = intf->querySupportedValues(fields, C2_MAY_BLOCK);
-        if (err != C2_OK) {
+        const C2FieldSupportedValuesQuery &usageSupportedValues =
+            intfCache.getUsageSupportedValues();
+        if (usageSupportedValues.status != C2_OK) {
             continue;
         }
-        if (fields[0].status != C2_OK) {
-            continue;
-        }
-        const C2FieldSupportedValues &supported = fields[0].values;
+        const C2FieldSupportedValues &supported = usageSupportedValues.values;
         if (supported.type != C2FieldSupportedValues::FLAGS) {
             continue;
         }
@@ -2092,6 +2160,17 @@
 // static
 status_t CCodec::CanFetchLinearBlock(
         const std::vector<std::string> &names, const C2MemoryUsage &usage, bool *isCompatible) {
+    for (const std::string &name : names) {
+        const IntfCache &intfCache = GetIntfCache(name);
+        if (intfCache.initCheck() != OK) {
+            continue;
+        }
+        const C2ApiFeaturesSetting &features = intfCache.getApiFeatures();
+        if (features && !(features.value & API_SAME_INPUT_BUFFER)) {
+            *isCompatible = false;
+            return OK;
+        }
+    }
     uint64_t minUsage = usage.expected;
     uint64_t maxUsage = ~0ull;
     std::set<C2Allocator::id_t> allocators;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 907aa39..0626c8d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "CCodecBufferChannel"
 #include <utils/Log.h>
 
+#include <algorithm>
+#include <list>
 #include <numeric>
 
 #include <C2AllocatorGralloc.h>
@@ -616,13 +618,14 @@
 }
 
 void CCodecBufferChannel::feedInputBufferIfAvailableInternal() {
-    if (mInputMetEos ||
-           mOutput.lock()->buffers->hasPending() ||
-           mPipelineWatcher.lock()->pipelineFull()) {
+    if (mInputMetEos || mPipelineWatcher.lock()->pipelineFull()) {
         return;
-    } else {
+    }
+    {
         Mutexed<Output>::Locked output(mOutput);
-        if (!output->buffers || output->buffers->numClientBuffers() >= output->numSlots) {
+        if (!output->buffers ||
+                output->buffers->hasPending() ||
+                output->buffers->numClientBuffers() >= output->numSlots) {
             return;
         }
     }
@@ -729,6 +732,9 @@
     std::shared_ptr<const C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
         std::static_pointer_cast<const C2StreamHdr10PlusInfo::output>(
                 c2Buffer->getInfo(C2StreamHdr10PlusInfo::output::PARAM_TYPE));
+    if (hdr10PlusInfo && hdr10PlusInfo->flexCount() == 0) {
+        hdr10PlusInfo.reset();
+    }
 
     {
         Mutexed<OutputSurface>::Locked output(mOutputSurface);
@@ -780,7 +786,7 @@
                     .maxLuminance = hdrStaticInfo->mastering.maxLuminance,
                     .minLuminance = hdrStaticInfo->mastering.minLuminance,
                 };
-                hdr.validTypes = HdrMetadata::SMPTE2086;
+                hdr.validTypes |= HdrMetadata::SMPTE2086;
                 hdr.smpte2086 = smpte2086_meta;
             }
             // If the content light level fields are 0, do not use them, it
@@ -914,6 +920,12 @@
 
     if (inputFormat != nullptr) {
         bool graphic = (iStreamFormat.value == C2BufferData::GRAPHIC);
+        C2Config::api_feature_t apiFeatures = C2Config::api_feature_t(
+                API_REFLECTION |
+                API_VALUES |
+                API_CURRENT_VALUES |
+                API_DEPENDENCY |
+                API_SAME_INPUT_BUFFER);
         std::shared_ptr<C2BlockPool> pool;
         {
             Mutexed<BlockPools>::Locked pools(mBlockPools);
@@ -925,14 +937,15 @@
             // query C2PortAllocatorsTuning::input from component. If an allocator ID is obtained
             // from component, create the input block pool with given ID. Otherwise, use default IDs.
             std::vector<std::unique_ptr<C2Param>> params;
-            err = mComponent->query({ },
+            C2ApiFeaturesSetting featuresSetting{apiFeatures};
+            err = mComponent->query({ &featuresSetting },
                                     { C2PortAllocatorsTuning::input::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
             if ((err != C2_OK && err != C2_BAD_INDEX) || params.size() != 1) {
                 ALOGD("[%s] Query input allocators returned %zu params => %s (%u)",
                         mName, params.size(), asString(err), err);
-            } else if (err == C2_OK && params.size() == 1) {
+            } else if (params.size() == 1) {
                 C2PortAllocatorsTuning::input *inputAllocators =
                     C2PortAllocatorsTuning::input::From(params[0].get());
                 if (inputAllocators && inputAllocators->flexCount() > 0) {
@@ -947,6 +960,9 @@
                     }
                 }
             }
+            if (featuresSetting) {
+                apiFeatures = featuresSetting.value;
+            }
 
             // TODO: use C2Component wrapper to associate this pool with ourselves
             if ((poolMask >> pools->inputAllocatorId) & 1) {
@@ -980,7 +996,10 @@
         input->numSlots = numInputSlots;
         input->extraBuffers.flush();
         input->numExtraSlots = 0u;
-        if (!buffersBoundToCodec) {
+        bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
+        // For encrypted content, framework decrypts source buffer (ashmem) into
+        // C2Buffers. Thus non-conforming codecs can process these.
+        if (!buffersBoundToCodec && (hasCryptoOrDescrambler() || conforming)) {
             input->buffers.reset(new SlotInputBuffers(mName));
         } else if (graphic) {
             if (mInputSurface) {
@@ -1242,62 +1261,98 @@
         return UNKNOWN_ERROR;
     }
     size_t numInputSlots = mInput.lock()->numSlots;
-    std::vector<sp<MediaCodecBuffer>> toBeQueued;
-    for (size_t i = 0; i < numInputSlots; ++i) {
+
+    struct ClientInputBuffer {
         size_t index;
         sp<MediaCodecBuffer> buffer;
-        {
-            Mutexed<Input>::Locked input(mInput);
-            if (!input->buffers->requestNewBuffer(&index, &buffer)) {
-                if (i == 0) {
-                    ALOGW("[%s] start: cannot allocate memory at all", mName);
-                    return NO_MEMORY;
-                } else {
-                    ALOGV("[%s] start: cannot allocate memory, only %zu buffers allocated",
-                            mName, i);
-                }
+        size_t capacity;
+    };
+    std::list<ClientInputBuffer> clientInputBuffers;
+
+    {
+        Mutexed<Input>::Locked input(mInput);
+        while (clientInputBuffers.size() < numInputSlots) {
+            ClientInputBuffer clientInputBuffer;
+            if (!input->buffers->requestNewBuffer(&clientInputBuffer.index,
+                                                  &clientInputBuffer.buffer)) {
                 break;
             }
+            clientInputBuffer.capacity = clientInputBuffer.buffer->capacity();
+            clientInputBuffers.emplace_back(std::move(clientInputBuffer));
         }
-        if (buffer) {
-            Mutexed<std::list<sp<ABuffer>>>::Locked configs(mFlushedConfigs);
-            ALOGV("[%s] input buffer %zu available", mName, index);
-            bool post = true;
-            if (!configs->empty()) {
+    }
+    if (clientInputBuffers.empty()) {
+        ALOGW("[%s] start: cannot allocate memory at all", mName);
+        return NO_MEMORY;
+    } else if (clientInputBuffers.size() < numInputSlots) {
+        ALOGD("[%s] start: cannot allocate memory for all slots, "
+              "only %zu buffers allocated",
+              mName, clientInputBuffers.size());
+    } else {
+        ALOGV("[%s] %zu initial input buffers available",
+              mName, clientInputBuffers.size());
+    }
+    // Sort input buffers by their capacities in increasing order.
+    clientInputBuffers.sort(
+            [](const ClientInputBuffer& a, const ClientInputBuffer& b) {
+                return a.capacity < b.capacity;
+            });
+
+    {
+        Mutexed<std::list<sp<ABuffer>>>::Locked configs(mFlushedConfigs);
+        if (!configs->empty()) {
+            while (!configs->empty()) {
                 sp<ABuffer> config = configs->front();
                 configs->pop_front();
-                if (buffer->capacity() >= config->size()) {
-                    memcpy(buffer->base(), config->data(), config->size());
-                    buffer->setRange(0, config->size());
-                    buffer->meta()->clear();
-                    buffer->meta()->setInt64("timeUs", 0);
-                    buffer->meta()->setInt32("csd", 1);
-                    post = false;
-                } else {
-                    ALOGD("[%s] buffer capacity too small for the config (%zu < %zu)",
-                            mName, buffer->capacity(), config->size());
+                // Find the smallest input buffer that can fit the config.
+                auto i = std::find_if(
+                        clientInputBuffers.begin(),
+                        clientInputBuffers.end(),
+                        [cfgSize = config->size()](const ClientInputBuffer& b) {
+                            return b.capacity >= cfgSize;
+                        });
+                if (i == clientInputBuffers.end()) {
+                    ALOGW("[%s] no input buffer large enough for the config "
+                          "(%zu bytes)",
+                          mName, config->size());
+                    return NO_MEMORY;
                 }
-            } else if (oStreamFormat.value == C2BufferData::LINEAR && i == 0
-                        && (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
-                // WORKAROUND: Some apps expect CSD available without queueing
-                //             any input. Queue an empty buffer to get the CSD.
-                buffer->setRange(0, 0);
+                sp<MediaCodecBuffer> buffer = i->buffer;
+                memcpy(buffer->base(), config->data(), config->size());
+                buffer->setRange(0, config->size());
                 buffer->meta()->clear();
                 buffer->meta()->setInt64("timeUs", 0);
-                post = false;
+                buffer->meta()->setInt32("csd", 1);
+                if (queueInputBufferInternal(buffer) != OK) {
+                    ALOGW("[%s] Error while queueing a flushed config",
+                          mName);
+                    return UNKNOWN_ERROR;
+                }
+                clientInputBuffers.erase(i);
             }
-            if (post) {
-                mCallback->onInputBufferAvailable(index, buffer);
-            } else {
-                toBeQueued.emplace_back(buffer);
+        } else if (oStreamFormat.value == C2BufferData::LINEAR &&
+                   (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
+            sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
+            // WORKAROUND: Some apps expect CSD available without queueing
+            //             any input. Queue an empty buffer to get the CSD.
+            buffer->setRange(0, 0);
+            buffer->meta()->clear();
+            buffer->meta()->setInt64("timeUs", 0);
+            if (queueInputBufferInternal(buffer) != OK) {
+                ALOGW("[%s] Error while queueing an empty buffer to get CSD",
+                      mName);
+                return UNKNOWN_ERROR;
             }
+            clientInputBuffers.pop_front();
         }
     }
-    for (const sp<MediaCodecBuffer> &buffer : toBeQueued) {
-        if (queueInputBufferInternal(buffer) != OK) {
-            ALOGV("[%s] Error while queueing initial buffers", mName);
-        }
+
+    for (const ClientInputBuffer& clientInputBuffer: clientInputBuffers) {
+        mCallback->onInputBufferAvailable(
+                clientInputBuffer.index,
+                clientInputBuffer.buffer);
     }
+
     return OK;
 }
 
@@ -1732,7 +1787,7 @@
                     realloc(c2Buffer);
             output.unlock();
             mCCodecCallback->onOutputBuffersChanged();
-            return;
+            break;
         case OutputBuffers::RETRY:
             ALOGV("[%s] sendOutputBuffers: unable to register output buffer",
                   mName);
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 60d3ae1..c036bb5 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -10,4 +10,11 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index 49c9567..440065f 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -8,4 +8,10 @@
         "libstagefright_foundation",
     ],
 
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    }
 }
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 826c1a0..2593000 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -21,4 +21,12 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
 }
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
index 0a70815..a9fc7e4 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/extractors/fuzzers/Android.bp
@@ -17,13 +17,8 @@
  *****************************************************************************
  * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
  */
-
-cc_library {
-    name: "libextractorfuzzerbase",
-
-    srcs: [
-        "ExtractorFuzzerBase.cpp",
-    ],
+cc_defaults {
+    name: "extractor-fuzzerbase-defaults",
 
     local_include_dirs: [
         "include",
@@ -43,16 +38,10 @@
     shared_libs: [
         "libutils",
         "libbinder",
+        "libbase",
+        "libcutils",
     ],
 
-    /* GETEXTRACTORDEF is not defined as extractor library is not linked in the
-     * base class. It will be included when the extractor fuzzer binary is
-     * generated.
-     */
-    allow_undefined_symbols: true,
-
-    host_supported: true,
-
     target: {
         darwin: {
             enabled: false,
@@ -60,8 +49,64 @@
     },
 }
 
+cc_defaults {
+    name: "extractor-fuzzer-defaults",
+    defaults: ["extractor-fuzzerbase-defaults"],
+
+    static_libs: [
+        "libextractorfuzzerbase",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_defaults {
+    name: "mpeg2-extractor-fuzzer-defaults",
+    defaults: ["extractor-fuzzer-defaults"],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mpeg2",
+        "frameworks/av/media/libstagefright",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation_without_imemory",
+        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2extractor",
+        "libstagefright_esds",
+        "libmpeg2extractor",
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "android.hidl.allocator@1.0",
+        "libcrypto",
+        "libhidlmemory",
+        "libhidlbase",
+    ],
+}
+
+cc_library_static {
+    name: "libextractorfuzzerbase",
+    defaults: ["extractor-fuzzerbase-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "ExtractorFuzzerBase.cpp",
+    ],
+}
+
 cc_fuzz {
     name: "mp4_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "mp4_extractor_fuzzer.cpp",
@@ -76,35 +121,18 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmediandk_format",
-        "libmedia_ndkformatpriv",
-        "libextractorfuzzerbase",
         "libstagefright_id3",
         "libstagefright_esds",
         "libmp4extractor",
     ],
 
-    shared_libs: [
-        "libutils",
-        "libbinder",
-    ],
-
     dictionary: "mp4_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
-
-    host_supported: true,
 }
 
 cc_fuzz {
     name: "wav_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "wav_extractor_fuzzer.cpp",
@@ -115,32 +143,19 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libfifo",
         "libwavextractor",
     ],
 
     shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
         "libbinder_ndk",
-        "libbase",
     ],
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "amr_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "amr_extractor_fuzzer.cpp",
@@ -151,31 +166,16 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libamrextractor",
     ],
 
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-    ],
-
     dictionary: "amr_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "mkv_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "mkv_extractor_fuzzer.cpp",
@@ -186,10 +186,6 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libwebm",
         "libstagefright_flacdec",
         "libstagefright_metadatautils",
@@ -197,24 +193,13 @@
         "libFLAC",
     ],
 
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-    ],
-
     dictionary: "mkv_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "ogg_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "ogg_extractor_fuzzer.cpp",
@@ -225,129 +210,44 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libstagefright_metadatautils",
         "libvorbisidec",
         "liboggextractor",
     ],
 
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-    ],
-
     dictionary: "ogg_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "mpeg2ps_extractor_fuzzer",
+    defaults: ["mpeg2-extractor-fuzzer-defaults"],
 
     srcs: [
         "mpeg2_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/mpeg2",
-        "frameworks/av/media/libstagefright",
-    ],
-
-    static_libs: [
-        "liblog",
-        "libstagefright_foundation_without_imemory",
-        "libmedia",
-        "libextractorfuzzerbase",
-        "libstagefright_mpeg2support",
-        "libstagefright_mpeg2extractor",
-        "libstagefright_esds",
-        "libmpeg2extractor",
-    ],
-
     cflags: [
         "-DMPEG2PS",
     ],
 
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-        "android.hardware.cas@1.0",
-        "android.hardware.cas.native@1.0",
-        "android.hidl.token@1.0-utils",
-        "android.hidl.allocator@1.0",
-        "libcrypto",
-        "libhidlmemory",
-        "libhidlbase",
-    ],
-
     dictionary: "mpeg2ps_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "mpeg2ts_extractor_fuzzer",
+    defaults: ["mpeg2-extractor-fuzzer-defaults"],
 
     srcs: [
         "mpeg2_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/mpeg2",
-        "frameworks/av/media/libstagefright",
-    ],
-
-    static_libs: [
-        "liblog",
-        "libstagefright_foundation_without_imemory",
-        "libmedia",
-        "libextractorfuzzerbase",
-        "libstagefright_mpeg2support",
-        "libstagefright_mpeg2extractor",
-        "libstagefright_esds",
-        "libmpeg2extractor",
-    ],
-
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-        "android.hardware.cas@1.0",
-        "android.hardware.cas.native@1.0",
-        "android.hidl.token@1.0-utils",
-        "android.hidl.allocator@1.0",
-        "libcrypto",
-        "libhidlmemory",
-        "libhidlbase",
-    ],
-
     dictionary: "mpeg2ts_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "mp3_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "mp3_extractor_fuzzer.cpp",
@@ -358,31 +258,16 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libfifo",
         "libmp3extractor",
         "libstagefright_id3",
     ],
-
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-    ],
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "aac_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "aac_extractor_fuzzer.cpp",
@@ -393,30 +278,15 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libaacextractor",
         "libstagefright_metadatautils",
     ],
-
-    shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
-    ],
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
 
 cc_fuzz {
     name: "flac_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
 
     srcs: [
         "flac_extractor_fuzzer.cpp",
@@ -427,29 +297,14 @@
     ],
 
     static_libs: [
-        "liblog",
-        "libstagefright_foundation",
-        "libmedia",
-        "libextractorfuzzerbase",
         "libstagefright_metadatautils",
         "libFLAC",
         "libflacextractor",
     ],
 
     shared_libs: [
-        "libutils",
-        "libmediandk",
-        "libbinder",
         "libbinder_ndk",
-        "libbase",
     ],
 
     dictionary: "flac_extractor_fuzzer.dict",
-
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
 }
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 7ad8cc1..330d4fe 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -21,4 +21,12 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
 }
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 102ac81..7d70548 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -13,4 +13,11 @@
         "libstagefright_foundation",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 7c85952..7d16fb4 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -150,6 +150,7 @@
 
     bool mIsHeif;
     bool mIsAudio;
+    bool mIsUsac = false;
     sp<ItemTable> mItemTable;
 
     /* Shift start offset (move to earlier time) when media_time > 0,
@@ -2879,6 +2880,21 @@
             break;
         }
 
+        case FOURCC("pasp"):
+        {
+            *offset += chunk_size;
+            // this must be in a VisualSampleEntry box under the Sample Description Box ('stsd')
+            // ignore otherwise
+            if (depth >= 2 && mPath[depth - 2] == FOURCC("stsd")) {
+                status_t err = parsePaspBox(data_offset, chunk_data_size);
+                if (err != OK) {
+                    return err;
+                }
+            }
+
+            break;
+        }
+
         case FOURCC("titl"):
         case FOURCC("perf"):
         case FOURCC("auth"):
@@ -4050,6 +4066,26 @@
     return OK;
 }
 
+status_t MPEG4Extractor::parsePaspBox(off64_t offset, size_t size) {
+    if (size < 8 || size == SIZE_MAX || mLastTrack == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    uint32_t data[2]; // hSpacing, vSpacing
+    if (mDataSource->readAt(offset, data, 8) < 8) {
+        return ERROR_IO;
+    }
+    uint32_t hSpacing = ntohl(data[0]);
+    uint32_t vSpacing = ntohl(data[1]);
+
+    if (hSpacing != 0 && vSpacing != 0) {
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_WIDTH, hSpacing);
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_HEIGHT, vSpacing);
+    }
+
+    return OK;
+}
+
 status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int depth) {
     if (size < 4 || size == SIZE_MAX) {
         return ERROR_MALFORMED;
@@ -4476,7 +4512,7 @@
     //AOT_SLS              = 38, /**< SLS                                       */
     //AOT_ER_AAC_ELD       = 39, /**< AAC Enhanced Low Delay                    */
 
-    //AOT_USAC             = 42, /**< USAC                                      */
+    AOT_USAC               = 42, /**< USAC                                      */
     //AOT_SAOC             = 43, /**< SAOC                                      */
     //AOT_LD_MPEGS         = 44, /**< Low Delay MPEG Surround                   */
 
@@ -4624,7 +4660,7 @@
     ABitReader br(csd, csd_size);
     uint32_t objectType = br.getBits(5);
 
-    if (objectType == 31) {  // AAC-ELD => additional 6 bits
+    if (objectType == AOT_ESCAPE) {  // AAC-ELD => additional 6 bits
         objectType = 32 + br.getBits(6);
     }
 
@@ -5000,6 +5036,12 @@
     mIsPcm = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
 
+    int32_t aacObjectType = -1;
+
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_AAC_PROFILE, &aacObjectType)) {
+        mIsUsac = (aacObjectType == AOT_USAC);
+    }
+
     if (mIsPcm) {
         int32_t numChannels = 0;
         int32_t bitsPerSample = 0;
@@ -5993,10 +6035,10 @@
             }
 
             uint32_t syncSampleIndex = sampleIndex;
-            // assume every audio sample is a sync sample. This works around
+            // assume every non-USAC audio sample is a sync sample. This works around
             // seek issues with files that were incorrectly written with an
             // empty or single-sample stss block for the audio track
-            if (err == OK && !mIsAudio) {
+            if (err == OK && (!mIsAudio || mIsUsac)) {
                 err = mSampleTable->findSyncSampleNear(
                         sampleIndex, &syncSampleIndex, findFlags);
             }
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 1e49d50..bafc7f5 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -160,6 +160,7 @@
     status_t parseChunk(off64_t *offset, int depth);
     status_t parseITunesMetaData(off64_t offset, size_t size);
     status_t parseColorInfo(off64_t offset, size_t size);
+    status_t parsePaspBox(off64_t offset, size_t size);
     status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);
     void parseID3v2MetaData(off64_t offset, uint64_t size);
     status_t parseQTMetaKey(off64_t data_offset, size_t data_size);
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index bc8632c..0eee254 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -19,7 +19,7 @@
         "libaudioclient_headers",
         "libbase_headers",
         "libstagefright_headers",
-        "libmedia_headers",
+        "libmedia_datasource_headers",
     ],
 
     static_libs: [
@@ -37,7 +37,7 @@
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
         "libstagefright_mpeg2extractor",
-        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2support_nocrypto",
         "libutils",
     ],
 
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index 7aed683..579065e 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -20,4 +20,11 @@
         "libvorbisidec",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
index a18b122..13d4cf6 100644
--- a/media/extractors/tests/ExtractorUnitTest.cpp
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -50,8 +50,29 @@
 constexpr int32_t kRandomSeed = 700;
 constexpr int32_t kUndefined = -1;
 
+enum inputID {
+    // audio streams
+    AAC_1,
+    AMR_NB_1,
+    AMR_WB_1,
+    FLAC_1,
+    GSM_1,
+    MIDI_1,
+    MP3_1,
+    OPUS_1,
+    VORBIS_1,
+    // video streams
+    HEVC_1,
+    MPEG2_PS_1,
+    MPEG2_TS_1,
+    MPEG4_1,
+    VP9_1,
+    UNKNOWN_ID,
+};
+
 // LookUpTable of clips and metadata for component testing
 static const struct InputData {
+    inputID inpId;
     string mime;
     string inputFile;
     int32_t firstParam;
@@ -59,26 +80,34 @@
     int32_t profile;
     int32_t frameRate;
 } kInputData[] = {
-        {MEDIA_MIMETYPE_AUDIO_AAC, "test_mono_44100Hz_aac.aac", 44100, 1, AACObjectLC, kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_AMR_NB, "bbb_mono_8kHz_amrnb.amr", 8000, 1, kUndefined, kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_mono_16kHz_amrwb.amr", 16000, 1, kUndefined, kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_stereo_48kHz_vorbis.ogg", 48000, 2, kUndefined,
+        {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "test_mono_44100Hz_aac.aac", 44100, 1, AACObjectLC,
          kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_MSGSM, "test_mono_8kHz_gsm.wav", 8000, 1, kUndefined, kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_RAW, "bbb_stereo_48kHz_flac.flac", 48000, 2, kUndefined, kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_OPUS, "test_stereo_48kHz_opus.opus", 48000, 2, kUndefined,
+        {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "bbb_mono_8kHz_amrnb.amr", 8000, 1, kUndefined,
          kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_mp3.mp3", 48000, 2, kUndefined, kUndefined},
-        {MEDIA_MIMETYPE_AUDIO_RAW, "midi_a.mid", 22050, 2, kUndefined, kUndefined},
-        {MEDIA_MIMETYPE_VIDEO_MPEG2, "bbb_cif_768kbps_30fps_mpeg2.ts", 352, 288, MPEG2ProfileMain,
-         30},
-        {MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_cif_768kbps_30fps_mpeg4.mkv", 352, 288,
-         MPEG4ProfileSimple, 30},
+        {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_mono_16kHz_amrwb.amr", 16000, 1, kUndefined,
+         kUndefined},
+        {FLAC_1, MEDIA_MIMETYPE_AUDIO_RAW, "bbb_stereo_48kHz_flac.flac", 48000, 2, kUndefined,
+         kUndefined},
+        {GSM_1, MEDIA_MIMETYPE_AUDIO_MSGSM, "test_mono_8kHz_gsm.wav", 8000, 1, kUndefined,
+         kUndefined},
+        {MIDI_1, MEDIA_MIMETYPE_AUDIO_RAW, "midi_a.mid", 22050, 2, kUndefined, kUndefined},
+        {MP3_1, MEDIA_MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_mp3.mp3", 48000, 2, kUndefined,
+         kUndefined},
+        {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "test_stereo_48kHz_opus.opus", 48000, 2, kUndefined,
+         kUndefined},
+        {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_stereo_48kHz_vorbis.ogg", 48000, 2, kUndefined,
+         kUndefined},
+
         // Test (b/151677264) for MP4 extractor
-        {MEDIA_MIMETYPE_VIDEO_HEVC, "crowd_508x240_25fps_hevc.mp4", 508, 240, HEVCProfileMain,
-         25},
-        {MEDIA_MIMETYPE_VIDEO_VP9, "bbb_340x280_30fps_vp9.webm", 340, 280, VP9Profile0, 30},
-        {MEDIA_MIMETYPE_VIDEO_MPEG2, "swirl_144x136_mpeg2.mpg", 144, 136, MPEG2ProfileMain, 12},
+        {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "crowd_508x240_25fps_hevc.mp4", 508, 240,
+         HEVCProfileMain, 25},
+        {MPEG2_PS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "swirl_144x136_mpeg2.mpg", 144, 136,
+         MPEG2ProfileMain, 12},
+        {MPEG2_TS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "bbb_cif_768kbps_30fps_mpeg2.ts", 352, 288,
+         MPEG2ProfileMain, 30},
+        {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_cif_768kbps_30fps_mpeg4.mkv", 352, 288,
+         MPEG4ProfileSimple, 30},
+        {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_340x280_30fps_vp9.webm", 340, 280, VP9Profile0, 30},
 };
 
 static ExtractorUnitTestEnvironment *gEnv = nullptr;
@@ -148,14 +177,23 @@
     MediaExtractorPluginHelper *mExtractor;
 };
 
-class ExtractorFunctionalityTest : public ExtractorUnitTest,
-                                   public ::testing::TestWithParam<pair<string, string>> {
+class ExtractorFunctionalityTest
+    : public ExtractorUnitTest,
+      public ::testing::TestWithParam<tuple<string /* container */, string /* InputFile */,
+                                            int32_t /* numTracks */, bool /* seekSupported */>> {
   public:
-    virtual void SetUp() override { setupExtractor(GetParam().first); }
+    virtual void SetUp() override {
+        tuple<string, string, int32_t, bool> params = GetParam();
+        mContainer = get<0>(params);
+        mNumTracks = get<2>(params);
+        setupExtractor(mContainer);
+    }
+    string mContainer;
+    int32_t mNumTracks;
 };
 
 class ConfigParamTest : public ExtractorUnitTest,
-                        public ::testing::TestWithParam<pair<string, int32_t>> {
+                        public ::testing::TestWithParam<pair<string, inputID>> {
   public:
     virtual void SetUp() override { setupExtractor(GetParam().first); }
 
@@ -169,7 +207,7 @@
         int32_t frameRate;
     };
 
-    void getFileProperties(int32_t inputIdx, string &inputFile, configFormat &configParam);
+    void getFileProperties(inputID inputId, string &inputFile, configFormat &configParam);
 };
 
 int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
@@ -228,9 +266,16 @@
     return 0;
 }
 
-void ConfigParamTest::getFileProperties(int32_t inputIdx, string &inputFile,
+void ConfigParamTest::getFileProperties(inputID inputId, string &inputFile,
                                         configFormat &configParam) {
-    if (inputIdx >= sizeof(kInputData) / sizeof(kInputData[0])) {
+    int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+    int32_t inputIdx = 0;
+    for (; inputIdx < inputDataSize; inputIdx++) {
+        if (inputId == kInputData[inputIdx].inpId) {
+            break;
+        }
+    }
+    if (inputIdx == inputDataSize) {
         return;
     }
     inputFile += kInputData[inputIdx].inputFile;
@@ -316,16 +361,17 @@
     if (mDisableTest) return;
 
     ALOGV("Checks if a valid extractor is created for a given input file");
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
-    ASSERT_EQ(setDataSource(inputFileName), 0)
-            << "SetDataSource failed for" << GetParam().first << "extractor";
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
-    ASSERT_EQ(createExtractor(), 0)
-            << "Extractor creation failed for" << GetParam().first << "extractor";
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
-    // A valid extractor instace should return success for following calls
-    ASSERT_GT(mExtractor->countTracks(), 0);
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     AMediaFormat *format = AMediaFormat_new();
     ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -337,17 +383,18 @@
 TEST_P(ExtractorFunctionalityTest, ExtractorTest) {
     if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Validates %s Extractor for a given input file", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     for (int32_t idx = 0; idx < numTracks; idx++) {
         MediaTrackHelper *track = mExtractor->getTrack(idx);
@@ -388,16 +435,17 @@
     if (mDisableTest) return;
 
     ALOGV("Validates Extractor's meta data for a given input file");
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     AMediaFormat *extractorFormat = AMediaFormat_new();
     ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -462,17 +510,18 @@
 TEST_P(ExtractorFunctionalityTest, MultipleStartStopTest) {
     if (mDisableTest) return;
 
-    ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Test %s extractor for multiple start and stop calls", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     // start/stop the tracks multiple times
     for (int32_t count = 0; count < kMaxCount; count++) {
@@ -504,22 +553,25 @@
 TEST_P(ExtractorFunctionalityTest, SeekTest) {
     if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Validates %s Extractor behaviour for different seek modes", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     uint32_t seekFlag = mExtractor->flags();
-    if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
-        cout << "[   WARN   ] Test Skipped. " << GetParam().first
-             << " Extractor doesn't support seek\n";
+    bool seekSupported = get<3>(GetParam());
+    bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+    if (!seekable) {
+        ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+        cout << "[   WARN   ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
         return;
     }
 
@@ -563,7 +615,7 @@
         // next/previous sync frames but not to samples between two sync frames.
         getSeekablePoints(seekablePoints, track);
         ASSERT_GT(seekablePoints.size(), 0)
-                << "Failed to get seekable points for " << GetParam().first << " extractor";
+                << "Failed to get seekable points for " << mContainer << " extractor";
 
         AMediaFormat *trackFormat = AMediaFormat_new();
         ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
@@ -664,23 +716,148 @@
     seekablePoints.clear();
 }
 
+// Tests the extractors for seek beyond range : (0, ClipDuration)
+TEST_P(ExtractorFunctionalityTest, MonkeySeekTest) {
+    if (mDisableTest) return;
+    // TODO(b/155630778): Enable test for wav extractors
+    if (mExtractorName == WAV) return;
+
+    ALOGV("Validates %s Extractor behaviour for invalid seek points", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
+
+    uint32_t seekFlag = mExtractor->flags();
+    bool seekSupported = get<3>(GetParam());
+    bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+    if (!seekable) {
+        ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+        cout << "[   WARN   ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
+        return;
+    }
+
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        AMediaFormat *trackMeta = AMediaFormat_new();
+        ASSERT_NE(trackMeta, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = mExtractor->getTrackMetaData(
+                trackMeta, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+        int64_t clipDuration = 0;
+        AMediaFormat_getInt64(trackMeta, AMEDIAFORMAT_KEY_DURATION, &clipDuration);
+        ASSERT_GT(clipDuration, 0) << "Invalid clip duration ";
+        AMediaFormat_delete(trackMeta);
+
+        int64_t seekToTimeStampUs[] = {-clipDuration, clipDuration / 2, clipDuration,
+                                       clipDuration * 2};
+        for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+             mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+            for (int64_t seekTimeUs : seekToTimeStampUs) {
+                MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+                        mode | CMediaTrackReadOptions::SEEK, seekTimeUs);
+                ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+                MediaBufferHelper *buffer = nullptr;
+                status = track->read(&buffer, options);
+                if (status == AMEDIA_ERROR_END_OF_STREAM) {
+                    delete options;
+                    continue;
+                }
+                if (buffer) {
+                    AMediaFormat *metaData = buffer->meta_data();
+                    int64_t timeStamp;
+                    AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+                    ALOGV("Seeked to timestamp : %lld, requested : %lld", (long long)timeStamp,
+                          (long long)seekTimeUs);
+                    buffer->release();
+                }
+                delete options;
+            }
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+        delete bufferGroup;
+        delete track;
+    }
+}
+
+// Tests extractors for invalid tracks
+TEST_P(ExtractorFunctionalityTest, SanityTest) {
+    if (mDisableTest) return;
+    // TODO(b/155626946): Enable test for MPEG2 TS/PS extractors
+    if (mExtractorName == MPEG2TS || mExtractorName == MPEG2PS) return;
+
+    ALOGV("Validates %s Extractor behaviour for invalid tracks", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
+
+    int32_t trackIdx[] = {-1, numTracks};
+    for (int32_t idx : trackIdx) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_EQ(track, nullptr) << "Failed to get track for index " << idx << "\n";
+
+        AMediaFormat *extractorFormat = AMediaFormat_new();
+        ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = mExtractor->getTrackMetaData(
+                extractorFormat, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+        ASSERT_NE(OK, status) << "getTrackMetaData should return error for invalid index " << idx;
+        AMediaFormat_delete(extractorFormat);
+    }
+
+    // Validate Extractor's getTrackMetaData for null format
+    AMediaFormat *mediaFormat = nullptr;
+    status = mExtractor->getTrackMetaData(mediaFormat, 0,
+                                          MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+    ASSERT_NE(OK, status) << "getTrackMetaData should return error for null Media format";
+}
+
 // This test validates config params for a given input file.
 // For this test we only take single track files since the focus of this test is
 // to validate the file properties reported by Extractor and not multi-track behavior
 TEST_P(ConfigParamTest, ConfigParamValidation) {
     if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor for input's file properties", GetParam().first.c_str());
+    string container = GetParam().first;
+    ALOGV("Validates %s Extractor for input's file properties", container.c_str());
     string inputFileName = gEnv->getRes();
-    int32_t inputFileIdx = GetParam().second;
+    inputID inputFileId = GetParam().second;
     configFormat configParam;
-    getFileProperties(inputFileIdx, inputFileName, configParam);
+    getFileProperties(inputFileId, inputFileName, configParam);
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for " << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for " << container << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for " << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for " << container << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
     ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
@@ -907,50 +1084,55 @@
                                            make_pair("loudsoftaac.aac", "loudsoftaac.mkv")));
 
 INSTANTIATE_TEST_SUITE_P(ConfigParamTestAll, ConfigParamTest,
-                         ::testing::Values(make_pair("aac", 0),
-                                           make_pair("amr", 1),
-                                           make_pair("amr", 2),
-                                           make_pair("ogg", 3),
-                                           make_pair("wav", 4),
-                                           make_pair("flac", 5),
-                                           make_pair("ogg", 6),
-                                           make_pair("mp3", 7),
-                                           make_pair("midi", 8),
-                                           make_pair("mpeg2ts", 9),
-                                           make_pair("mkv", 10),
-                                           make_pair("mpeg4", 11),
-                                           make_pair("mkv", 12),
-                                           make_pair("mpeg2ps", 13)));
+                         ::testing::Values(make_pair("aac", AAC_1),
+                                           make_pair("amr", AMR_NB_1),
+                                           make_pair("amr", AMR_WB_1),
+                                           make_pair("flac", FLAC_1),
+                                           make_pair("wav", GSM_1),
+                                           make_pair("midi", MIDI_1),
+                                           make_pair("mp3", MP3_1),
+                                           make_pair("ogg", OPUS_1),
+                                           make_pair("ogg", VORBIS_1),
 
-INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorFunctionalityTest,
-                         ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
-                                           make_pair("amr", "testamr.amr"),
-                                           make_pair("amr", "amrwb.wav"),
-                                           make_pair("ogg", "john_cage.ogg"),
-                                           make_pair("wav", "monotestgsm.wav"),
-                                           make_pair("mpeg2ts", "segment000001.ts"),
-                                           make_pair("mpeg2ts", "testac3ts.ts"),
-                                           make_pair("mpeg2ts", "testac4ts.ts"),
-                                           make_pair("mpeg2ts", "testeac3ts.ts"),
-                                           make_pair("flac", "sinesweepflac.flac"),
-                                           make_pair("ogg", "testopus.opus"),
-                                           make_pair("ogg", "sinesweepoggalbumart.ogg"),
-                                           make_pair("midi", "midi_a.mid"),
-                                           make_pair("mkv", "sinesweepvorbis.mkv"),
-                                           make_pair("mkv", "sinesweepmp3lame.mkv"),
-                                           make_pair("mkv", "loudsoftaac.mkv"),
-                                           make_pair("mpeg4", "sinesweepoggmp4.mp4"),
-                                           make_pair("mp3", "sinesweepmp3lame.mp3"),
-                                           make_pair("mp3", "id3test10.mp3"),
-                                           make_pair("mkv", "swirl_144x136_vp9.webm"),
-                                           make_pair("mkv", "swirl_144x136_vp8.webm"),
-                                           make_pair("mkv", "swirl_144x136_avc.mkv"),
-                                           make_pair("mkv", "withoutcues.mkv"),
-                                           make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
-                                           make_pair("mpeg2ps", "programstream.mpeg"),
-                                           make_pair("mpeg4", "testac3mp4.mp4"),
-                                           make_pair("mpeg4", "testeac3mp4.mp4"),
-                                           make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+                                           make_pair("mpeg4", HEVC_1),
+                                           make_pair("mpeg2ps", MPEG2_PS_1),
+                                           make_pair("mpeg2ts", MPEG2_TS_1),
+                                           make_pair("mkv", MPEG4_1),
+                                           make_pair("mkv", VP9_1)));
+
+// Validate extractors for container format, input file and supports seek flag
+INSTANTIATE_TEST_SUITE_P(
+        ExtractorUnitTestAll, ExtractorFunctionalityTest,
+        ::testing::Values(
+                make_tuple("aac", "loudsoftaac.aac", 1, true),
+                make_tuple("amr", "testamr.amr", 1, true),
+                make_tuple("amr", "amrwb.wav", 1, true),
+                make_tuple("flac", "sinesweepflac.flac", 1, true),
+                make_tuple("midi", "midi_a.mid", 1, true),
+                make_tuple("mkv", "sinesweepvorbis.mkv", 1, true),
+                make_tuple("mkv", "sinesweepmp3lame.mkv", 1, true),
+                make_tuple("mkv", "loudsoftaac.mkv", 1, true),
+                make_tuple("mp3", "sinesweepmp3lame.mp3", 1, true),
+                make_tuple("mp3", "id3test10.mp3", 1, true),
+                make_tuple("mpeg2ts", "segment000001.ts", 2, false),
+                make_tuple("mpeg2ts", "testac3ts.ts", 1, false),
+                make_tuple("mpeg2ts", "testac4ts.ts", 1, false),
+                make_tuple("mpeg2ts", "testeac3ts.ts", 1, false),
+                make_tuple("mpeg4", "sinesweepoggmp4.mp4", 1, true),
+                make_tuple("mpeg4", "testac3mp4.mp4", 1, true),
+                make_tuple("mpeg4", "testeac3mp4.mp4", 1, true),
+                make_tuple("ogg", "john_cage.ogg", 1, true),
+                make_tuple("ogg", "testopus.opus", 1, true),
+                make_tuple("ogg", "sinesweepoggalbumart.ogg", 1, true),
+                make_tuple("wav", "monotestgsm.wav", 1, true),
+
+                make_tuple("mkv", "swirl_144x136_avc.mkv", 1, true),
+                make_tuple("mkv", "withoutcues.mkv", 2, true),
+                make_tuple("mkv", "swirl_144x136_vp9.webm", 1, true),
+                make_tuple("mkv", "swirl_144x136_vp8.webm", 1, true),
+                make_tuple("mpeg2ps", "swirl_144x136_mpeg2.mpg", 1, false),
+                make_tuple("mpeg2ps", "programstream.mpeg", 2, false),
+                make_tuple("mpeg4", "swirl_132x130_mpeg4.mp4", 1, true)));
 
 int main(int argc, char **argv) {
     gEnv = new ExtractorUnitTestEnvironment();
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 5d38a81..85d4cce 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -19,4 +19,11 @@
         "libfifo",
         "libstagefright_foundation",
     ],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index a47f189..7aaf908 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -29,6 +29,8 @@
 #ifndef AAUDIO_AAUDIO_H
 #define AAUDIO_AAUDIO_H
 
+#include <stdbool.h>
+#include <stdint.h>
 #include <time.h>
 
 #ifdef __cplusplus
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index bc973bd..983887b 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -141,6 +141,7 @@
         case AAUDIO_STREAM_STATE_PAUSED:
         case AAUDIO_STREAM_STATE_STOPPING:
         case AAUDIO_STREAM_STATE_STOPPED:
+        case AAUDIO_STREAM_STATE_FLUSHING:
         case AAUDIO_STREAM_STATE_FLUSHED:
             break; // Proceed with starting.
 
@@ -314,8 +315,11 @@
 
 void AudioStream::setState(aaudio_stream_state_t state) {
     ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
+    if (state == mState) {
+        return; // no change
+    }
     // Track transition to DISCONNECTED state.
-    if (state == AAUDIO_STREAM_STATE_DISCONNECTED && mState != state) {
+    if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
                 .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
@@ -323,18 +327,18 @@
     }
     // CLOSED is a final state
     if (mState == AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
     // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
                && !(state == AAUDIO_STREAM_STATE_CLOSING
                    || state == AAUDIO_STREAM_STATE_CLOSED)) {
-        ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
 
     } else {
         mState = state;
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 6e5110f..3bfa2b7 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -212,9 +212,6 @@
     setSamplesPerFrame(mAudioRecord->channelCount());
 
     int32_t actualSampleRate = mAudioRecord->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
     setSampleRate(actualSampleRate);
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
@@ -345,13 +342,17 @@
     // Enable callback before starting AudioRecord to avoid shutting
     // down because of a race condition.
     mCallbackEnabled.store(true);
+    aaudio_stream_state_t originalState = getState();
+    // Set before starting the callback so that we are in the correct state
+    // before updateStateMachine() can be called by the callback.
+    setState(AAUDIO_STREAM_STATE_STARTING);
     mFramesWritten.reset32(); // service writes frames
     mTimestampPosition.reset32();
     status_t err = mAudioRecord->start(); // resets position to zero
     if (err != OK) {
+        mCallbackEnabled.store(false);
+        setState(originalState);
         return AAudioConvert_androidToAAudioResult(err);
-    } else {
-        setState(AAUDIO_STREAM_STATE_STARTING);
     }
     return AAUDIO_OK;
 }
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index ea08361..0427220 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -195,9 +195,6 @@
     setDeviceFormat(mAudioTrack->format());
 
     int32_t actualSampleRate = mAudioTrack->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
     setSampleRate(actualSampleRate);
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
@@ -240,11 +237,11 @@
 
     setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
 
-    // Log warning if we did not get what we asked for.
-    ALOGW_IF(actualFlags != flags,
+    // Log if we did not get what we asked for.
+    ALOGD_IF(actualFlags != flags,
              "open() flags changed from 0x%08X to 0x%08X",
              flags, actualFlags);
-    ALOGW_IF(actualPerformanceMode != perfMode,
+    ALOGD_IF(actualPerformanceMode != perfMode,
              "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
@@ -307,11 +304,15 @@
     // Enable callback before starting AudioTrack to avoid shutting
     // down because of a race condition.
     mCallbackEnabled.store(true);
+    aaudio_stream_state_t originalState = getState();
+    // Set before starting the callback so that we are in the correct state
+    // before updateStateMachine() can be called by the callback.
+    setState(AAUDIO_STREAM_STATE_STARTING);
     err = mAudioTrack->start();
     if (err != OK) {
+        mCallbackEnabled.store(false);
+        setState(originalState);
         return AAudioConvert_androidToAAudioResult(err);
-    } else {
-        setState(AAUDIO_STREAM_STATE_STARTING);
     }
     return AAUDIO_OK;
 }
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index 1c26615..a20c799 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -124,7 +124,7 @@
 }
 
 enum FunctionToCall {
-    CALL_START, CALL_STOP, CALL_PAUSE, CALL_FLUSH
+    CALL_START, CALL_STOP, CALL_PAUSE, CALL_FLUSH, CALL_RELEASE
 };
 
 void checkStateTransition(aaudio_performance_mode_t perfMode,
@@ -177,11 +177,27 @@
             } else if (originalState == AAUDIO_STREAM_STATE_PAUSED) {
                 ASSERT_EQ(AAUDIO_OK, AAudioStream_requestPause(aaudioStream));
                 inputState = AAUDIO_STREAM_STATE_PAUSING;
+            } else if (originalState == AAUDIO_STREAM_STATE_FLUSHING) {
+                ASSERT_EQ(AAUDIO_OK, AAudioStream_requestPause(aaudioStream));
+                // We can only flush() after pause is complete.
+                ASSERT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                                 AAUDIO_STREAM_STATE_PAUSING,
+                                                                 &state,
+                                                                 1000 * NANOS_PER_MILLISECOND));
+                ASSERT_EQ(AAUDIO_STREAM_STATE_PAUSED, state);
+                ASSERT_EQ(AAUDIO_OK, AAudioStream_requestFlush(aaudioStream));
+                // That will put the stream into the FLUSHING state.
+                // The FLUSHING state will persist until we process functionToCall.
+                // That is because the transition to FLUSHED is caused by the callback,
+                // or by calling write() or waitForStateChange(). But those will not
+                // occur.
+            } else if (originalState == AAUDIO_STREAM_STATE_CLOSING) {
+                ASSERT_EQ(AAUDIO_OK, AAudioStream_release(aaudioStream));
             }
         }
     }
 
-    // Wait until past transitional state.
+    // Wait until we get past the transitional state if requested.
     if (inputState != AAUDIO_STREAM_STATE_UNINITIALIZED) {
         ASSERT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
                                                              inputState,
@@ -208,12 +224,20 @@
             EXPECT_EQ(expectedResult, AAudioStream_requestFlush(aaudioStream));
             transitionalState = AAUDIO_STREAM_STATE_FLUSHING;
             break;
+        case FunctionToCall::CALL_RELEASE:
+            EXPECT_EQ(expectedResult, AAudioStream_release(aaudioStream));
+            // Set to UNINITIALIZED so the waitForStateChange() below will
+            // will return immediately with the current state.
+            transitionalState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+            break;
     }
 
-    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
-                                                         transitionalState,
-                                                         &state,
-                                                         1000 * NANOS_PER_MILLISECOND));
+    EXPECT_EQ(AAUDIO_OK,
+            AAudioStream_waitForStateChange(aaudioStream,
+                    transitionalState,
+                    &state,
+                    1000 * NANOS_PER_MILLISECOND));
+
     // We should not change state when a function fails.
     if (expectedResult != AAUDIO_OK) {
         ASSERT_EQ(originalState, expectedState);
@@ -493,6 +517,88 @@
         AAUDIO_STREAM_STATE_FLUSHED);
 }
 
+// FLUSHING ================================================================
+TEST(test_various, aaudio_state_lowlat_flushing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_STREAM_STATE_FLUSHING,
+        FunctionToCall::CALL_START,
+        AAUDIO_OK,
+        AAUDIO_STREAM_STATE_STARTED);
+}
+
+TEST(test_various, aaudio_state_none_flushing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_STREAM_STATE_FLUSHING,
+        FunctionToCall::CALL_START,
+        AAUDIO_OK,
+        AAUDIO_STREAM_STATE_STARTED);
+}
+
+TEST(test_various, aaudio_state_lowlat_flushing_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_STREAM_STATE_FLUSHING,
+        FunctionToCall::CALL_RELEASE,
+        AAUDIO_OK,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_flushing_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_STREAM_STATE_FLUSHING,
+        FunctionToCall::CALL_RELEASE,
+        AAUDIO_OK,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_lowlat_starting_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_STREAM_STATE_STARTING,
+        FunctionToCall::CALL_RELEASE,
+        AAUDIO_OK,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_starting_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_STREAM_STATE_STARTING,
+        FunctionToCall::CALL_RELEASE,
+        AAUDIO_OK,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+// CLOSING ================================================================
+TEST(test_various, aaudio_state_lowlat_closing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_STREAM_STATE_CLOSING,
+        FunctionToCall::CALL_START,
+        AAUDIO_ERROR_INVALID_STATE,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_closing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_STREAM_STATE_CLOSING,
+        FunctionToCall::CALL_START,
+        AAUDIO_ERROR_INVALID_STATE,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_lowlat_closing_stop) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_STREAM_STATE_CLOSING,
+        FunctionToCall::CALL_STOP,
+        AAUDIO_ERROR_INVALID_STATE,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_closing_stop) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_STREAM_STATE_CLOSING,
+        FunctionToCall::CALL_STOP,
+        AAUDIO_ERROR_INVALID_STATE,
+        AAUDIO_STREAM_STATE_CLOSING);
+}
+
 // ==========================================================================
 TEST(test_various, aaudio_set_buffer_size) {
 
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 7efa67c..df47def 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -742,8 +742,6 @@
     void *iMemPointer;
     audio_track_cblk_t* cblk;
     status_t status;
-    std::string flagsAsString;
-    std::string originalFlagsAsString;
 
     if (audioFlinger == 0) {
         ALOGE("%s(%d): Could not get audioflinger", __func__, mPortId);
@@ -922,15 +920,13 @@
     mDeathNotifier = new DeathNotifier(this);
     IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this);
 
-    InputFlagConverter::toString(mFlags, flagsAsString);
-    InputFlagConverter::toString(mOrigFlags, originalFlagsAsString);
     mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD) + std::to_string(mPortId);
     mediametrics::LogItem(mMetricsId)
         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
         .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
         // the following are immutable (at least until restore)
-        .set(AMEDIAMETRICS_PROP_FLAGS, flagsAsString.c_str())
-        .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, originalFlagsAsString.c_str())
+        .set(AMEDIAMETRICS_PROP_FLAGS, toString(mFlags).c_str())
+        .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
         .set(AMEDIAMETRICS_PROP_TRACKID, mPortId)
         .set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 005d358..32129f0 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1713,16 +1713,12 @@
     // is the first log of the AudioTrack and must be present before
     // any AudioTrack client logs will be accepted.
 
-    std::string flagsAsString;
-    OutputFlagConverter::toString(mFlags, flagsAsString);
-    std::string originalFlagsAsString;
-    OutputFlagConverter::toString(mOrigFlags, originalFlagsAsString);
     mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK) + std::to_string(mPortId);
     mediametrics::LogItem(mMetricsId)
         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
         // the following are immutable
-        .set(AMEDIAMETRICS_PROP_FLAGS, flagsAsString.c_str())
-        .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, originalFlagsAsString.c_str())
+        .set(AMEDIAMETRICS_PROP_FLAGS, toString(mFlags).c_str())
+        .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
         .set(AMEDIAMETRICS_PROP_TRACKID, mPortId) // dup from key
         .set(AMEDIAMETRICS_PROP_CONTENTTYPE, toString(mAttributes.content_type).c_str())
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 16d2232..6d79aba 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -1001,7 +1001,7 @@
             break;
     }
 
-    // Whitelist of relevant events to trigger log merging.
+    // List of relevant events that trigger log merging.
     // Log merging should activate during audio activity of any kind. This are considered the
     // most relevant events.
     // TODO should select more wisely the items from the list
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index 5fd3912..f6a554b 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -19,6 +19,7 @@
 
 #include <audio_utils/clock.h>
 #include <audio_utils/PowerLog.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <system/audio.h>
 
diff --git a/media/libeffects/data/audio_effects.xml b/media/libeffects/data/audio_effects.xml
index 2e5f529..93a2181 100644
--- a/media/libeffects/data/audio_effects.xml
+++ b/media/libeffects/data/audio_effects.xml
@@ -21,6 +21,7 @@
         <library name="downmix" path="libdownmix.so"/>
         <library name="loudness_enhancer" path="libldnhncr.so"/>
         <library name="dynamics_processing" path="libdynproc.so"/>
+        <library name="haptic_generator" path="libhapticgenerator.so"/>
     </libraries>
 
     <!-- list of effects to load.
@@ -58,6 +59,7 @@
         <effect name="downmix" library="downmix" uuid="93f04452-e4fe-41cc-91f9-e475b6d1d69f"/>
         <effect name="loudness_enhancer" library="loudness_enhancer" uuid="fa415329-2034-4bea-b5dc-5b381c8d1e2c"/>
         <effect name="dynamics_processing" library="dynamics_processing" uuid="e0e6539b-1781-7261-676f-6d7573696340"/>
+        <effect name="haptic_generator" library="haptic_generator" uuid="97c4acd1-8b82-4f2f-832e-c2fe5d7a9931"/>
     </effects>
 
     <!-- Audio pre processor configurations.
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
new file mode 100644
index 0000000..ac40e33
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -0,0 +1,49 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// HapticGenerator library
+cc_library_shared {
+    name: "libhapticgenerator",
+
+    vendor: true,
+
+    srcs: [
+        "EffectHapticGenerator.cpp",
+        "Processors.cpp",
+    ],
+
+    cflags: [
+        "-O2", // Turning on the optimization in order to reduce effect processing time.
+               // The latency is around 1/5 less than without the optimization.
+        "-Wall",
+        "-Werror",
+        "-ffast-math", // This is needed for the non-zero coefficients optimization for
+                       // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+                       // with/without `-ffast-math` for more context.
+        "-fvisibility=hidden",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libutils",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+}
+
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
new file mode 100644
index 0000000..3d3fce8
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -0,0 +1,485 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "EffectHapticGenerator.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include <errno.h>
+#include <inttypes.h>
+
+#include <audio_effects/effect_hapticgenerator.h>
+#include <audio_utils/format.h>
+#include <system/audio.h>
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "HapticGenerator Library",
+        .implementor = "The Android Open Source Project",
+        .create_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Create,
+        .release_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Release,
+        .get_descriptor = android::audio_effect::haptic_generator::HapticGeneratorLib_GetDescriptor,
+};
+
+namespace android::audio_effect::haptic_generator {
+
+// effect_handle_t interface implementation for haptic generator effect
+const struct effect_interface_s gHapticGeneratorInterface = {
+        HapticGenerator_Process,
+        HapticGenerator_Command,
+        HapticGenerator_GetDescriptor,
+        nullptr /* no process_reverse function, no reference stream needed */
+};
+
+//-----------------------------------------------------------------------------
+// Effect Descriptor
+//-----------------------------------------------------------------------------
+
+// UUIDs for effect types have been generated from http://www.itu.int/ITU-T/asn1/uuid.html
+// Haptic Generator
+static const effect_descriptor_t gHgDescriptor = {
+        FX_IID_HAPTICGENERATOR_, // type
+        {0x97c4acd1, 0x8b82, 0x4f2f, 0x832e, {0xc2, 0xfe, 0x5d, 0x7a, 0x99, 0x31}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
+        0, // FIXME what value should be reported? // cpu load
+        0, // FIXME what value should be reported? // memory usage
+        "Haptic Generator",
+        "The Android Open Source Project"
+};
+
+//-----------------------------------------------------------------------------
+// Internal functions
+//-----------------------------------------------------------------------------
+
+namespace {
+
+int HapticGenerator_Init(struct HapticGeneratorContext *context) {
+    context->itfe = &gHapticGeneratorInterface;
+
+    context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.inputCfg.samplingRate = 0;
+    context->config.inputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.cookie = nullptr;
+    context->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.outputCfg.samplingRate = 0;
+    context->config.outputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.cookie = nullptr;
+    context->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    memset(&context->param, 0, sizeof(struct HapticGeneratorParam));
+
+    context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+    return 0;
+}
+
+void addBiquadFilter(
+        std::vector<std::function<void(float *, const float *, size_t)>> &processingChain,
+        struct HapticGeneratorProcessorsRecord &processorsRecord,
+        std::shared_ptr<BiquadFilter> filter) {
+    // The process chain captures the shared pointer of the filter in lambda.
+    // The process record will keep a shared pointer to the filter so that it is possible to access
+    // the filter outside of the process chain.
+    processorsRecord.filters.push_back(filter);
+    processingChain.push_back([filter](float *out, const float *in, size_t frameCount) {
+            filter->process(out, in, frameCount);
+    });
+}
+
+/**
+ * \brief build haptic generator processing chain.
+ *
+ * \param processingChain
+ * \param processorsRecord a structure to cache all the shared pointers for processors
+ * \param sampleRate the audio sampling rate. Use a float here as it may be used to create filters
+ * \param channelCount haptic channel count
+ */
+void HapticGenerator_buildProcessingChain(
+        std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        struct HapticGeneratorProcessorsRecord& processorsRecord,
+        float sampleRate, size_t channelCount) {
+    float highPassCornerFrequency = 100.0f;
+    auto hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    float lowPassCornerFrequency = 3000.0f;
+    auto lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto ramp = std::make_shared<Ramp>(channelCount);
+    // The process chain captures the shared pointer of the ramp in lambda. It will be the only
+    // reference to the ramp.
+    // The process record will keep a weak pointer to the ramp so that it is possible to access
+    // the ramp outside of the process chain.
+    processorsRecord.ramps.push_back(ramp);
+    processingChain.push_back([ramp](float *out, const float *in, size_t frameCount) {
+            ramp->process(out, in, frameCount);
+    });
+
+    highPassCornerFrequency = 60.0f;
+    hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    lowPassCornerFrequency = 700.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    lowPassCornerFrequency = 5.0f;
+    float normalizationPower = -0.3f;
+    // The process chain captures the shared pointer of the slow envelope in lambda. It will
+    // be the only reference to the slow envelope.
+    // The process record will keep a weak pointer to the slow envelope so that it is possible
+    // to access the slow envelope outside of the process chain.
+    auto slowEnv = std::make_shared<SlowEnvelope>(
+            lowPassCornerFrequency, sampleRate, normalizationPower, channelCount);
+    processorsRecord.slowEnvs.push_back(slowEnv);
+    processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
+            slowEnv->process(out, in, frameCount);
+    });
+
+    lowPassCornerFrequency = 400.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+    lowPassCornerFrequency = 500.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto apf = createAPF2(400.0f, 200.0f, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+    apf = createAPF2(100.0f, 50.0f, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+    float allPassCornerFrequency = 25.0f;
+    apf = createAPF(allPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+
+    float resonantFrequency = 150.0f;
+    float bandpassQ = 1.0f;
+    auto bpf = createBPF(resonantFrequency, bandpassQ, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, bpf);
+
+    float zeroQ = 8.0f;
+    float poleQ = 4.0f;
+    auto bsf = createBSF(resonantFrequency, zeroQ, poleQ, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, bsf);
+}
+
+int HapticGenerator_Configure(struct HapticGeneratorContext *context, effect_config_t *config) {
+    if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
+        config->inputCfg.format != config->outputCfg.format ||
+        config->inputCfg.format != AUDIO_FORMAT_PCM_FLOAT ||
+        config->inputCfg.channels != config->outputCfg.channels ||
+        config->inputCfg.buffer.frameCount != config->outputCfg.buffer.frameCount) {
+        return -EINVAL;
+    }
+    if (&context->config != config) {
+        context->processingChain.clear();
+        context->processorsRecord.filters.clear();
+        context->processorsRecord.ramps.clear();
+        context->processorsRecord.slowEnvs.clear();
+        memcpy(&context->config, config, sizeof(effect_config_t));
+        context->param.audioChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->inputCfg.channels) & ~AUDIO_CHANNEL_HAPTIC_ALL);
+        context->param.hapticChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->outputCfg.channels) & AUDIO_CHANNEL_HAPTIC_ALL);
+        ALOG_ASSERT(context->param.hapticChannelCount <= 2,
+                    "haptic channel count(%zu) is too large",
+                    context->param.hapticChannelCount);
+        context->audioDataBytesPerFrame = audio_bytes_per_frame(
+                context->param.audioChannelCount, (audio_format_t) config->inputCfg.format);
+        for (size_t i = 0; i < context->param.hapticChannelCount; ++i) {
+            // By default, use the first audio channel to generate haptic channels.
+            context->param.hapticChannelSource[i] = 0;
+        }
+
+        HapticGenerator_buildProcessingChain(context->processingChain,
+                                             context->processorsRecord,
+                                             config->inputCfg.samplingRate,
+                                             context->param.hapticChannelCount);
+    }
+    return 0;
+}
+
+int HapticGenerator_Reset(struct HapticGeneratorContext *context) {
+    for (auto& filter : context->processorsRecord.filters) {
+        filter->clear();
+    }
+    for (auto& slowEnv : context->processorsRecord.slowEnvs) {
+        slowEnv->clear();
+    }
+    return 0;
+}
+
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context __unused,
+                                 int32_t param __unused,
+                                 uint32_t size __unused,
+                                 void *value __unused) {
+    ALOGW("Setparameter is not implemented in HapticGenerator");
+    return -ENOSYS;
+}
+
+/**
+ * \brief run the processing chain to generate haptic data from audio data
+ *
+ * \param processingChain the processing chain for generating haptic data
+ * \param buf1 a buffer contains raw audio data
+ * \param buf2 a buffer that is large enough to keep all the data
+ * \param frameCount frame count of the data
+ * \return a pointer to the output buffer
+ */
+float* HapticGenerator_runProcessingChain(
+        const std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        float* buf1, float* buf2, size_t frameCount) {
+    float *in = buf1;
+    float *out = buf2;
+    for (const auto processingFunc : processingChain) {
+        processingFunc(out, in, frameCount);
+        std::swap(in, out);
+    }
+    return in;
+}
+
+} // namespace (anonymous)
+
+//-----------------------------------------------------------------------------
+// Effect API Implementation
+//-----------------------------------------------------------------------------
+
+/*--- Effect Library Interface Implementation ---*/
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId __unused,
+                                  int32_t ioId __unused,
+                                  effect_handle_t *handle) {
+    if (handle == nullptr || uuid == nullptr) {
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) != 0) {
+        return -EINVAL;
+    }
+
+    HapticGeneratorContext *context = new HapticGeneratorContext;
+    HapticGenerator_Init(context);
+
+    *handle = (effect_handle_t) context;
+    ALOGV("%s context is %p", __func__, context);
+    return 0;
+}
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) handle;
+    delete context;
+    return 0;
+}
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor) {
+
+    if (descriptor == nullptr || uuid == nullptr) {
+        ALOGE("%s() called with NULL pointer", __func__);
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) == 0) {
+        *descriptor = gHgDescriptor;
+        return 0;
+    }
+
+    return -EINVAL;
+}
+
+/*--- Effect Control Interface Implementation ---*/
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (inBuffer == nullptr || inBuffer->raw == nullptr
+            || outBuffer == nullptr || outBuffer->raw == nullptr) {
+        return 0;
+    }
+
+    // The audio data must not be modified but just written to
+    // output buffer according the access mode.
+    size_t audioBytes = context->audioDataBytesPerFrame * inBuffer->frameCount;
+    size_t audioSampleCount = inBuffer->frameCount * context->param.audioChannelCount;
+    if (inBuffer->raw != outBuffer->raw) {
+        if (context->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i = 0; i < audioSampleCount; ++i) {
+                outBuffer->f32[i] += inBuffer->f32[i];
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, audioBytes);
+        }
+    }
+
+    if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+        ALOGE("State(%d) is not HAPTICGENERATOR_STATE_ACTIVE when calling %s",
+                context->state, __func__);
+        return -ENODATA;
+    }
+
+    // Resize buffer if the haptic sample count is greater than buffer size.
+    size_t hapticSampleCount = inBuffer->frameCount * context->param.hapticChannelCount;
+    if (hapticSampleCount > context->inputBuffer.size()) {
+        // The context->inputBuffer and context->outputBuffer must have the same size,
+        // which must be at least the haptic sample count.
+        context->inputBuffer.resize(hapticSampleCount);
+        context->outputBuffer.resize(hapticSampleCount);
+    }
+
+    // Construct input buffer according to haptic channel source
+    for (size_t i = 0; i < inBuffer->frameCount; ++i) {
+        for (size_t j = 0; j < context->param.hapticChannelCount; ++j) {
+            context->inputBuffer[i * context->param.hapticChannelCount + j] =
+                    inBuffer->f32[i * context->param.audioChannelCount
+                            + context->param.hapticChannelSource[j]];
+        }
+    }
+
+    float* hapticOutBuffer = HapticGenerator_runProcessingChain(
+            context->processingChain, context->inputBuffer.data(),
+            context->outputBuffer.data(), inBuffer->frameCount);
+
+    // For haptic data, the haptic playback thread will copy the data from effect input buffer,
+    // which contains haptic data at the end of the buffer, directly to sink buffer.
+    // In that case, copy haptic data to input buffer instead of output buffer.
+    // Note: this may not work with rpc/binder calls
+    memcpy_by_audio_format(static_cast<char*>(inBuffer->raw) + audioBytes,
+                           static_cast<audio_format_t>(context->config.outputCfg.format),
+                           hapticOutBuffer,
+                           AUDIO_FORMAT_PCM_FLOAT,
+                           hapticSampleCount);
+
+    return 0;
+}
+
+int32_t HapticGenerator_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+                                void *cmdData, uint32_t *replySize, void *replyData) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr || context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    ALOGV("HapticGenerator_Command command %u cmdSize %u", cmdCode, cmdSize);
+
+    switch (cmdCode) {
+        case EFFECT_CMD_INIT:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Init(context);
+            break;
+
+        case EFFECT_CMD_SET_CONFIG:
+            if (cmdData == nullptr || cmdSize != sizeof(effect_config_t)
+                || replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Configure(
+                    context, (effect_config_t *) cmdData);
+            break;
+
+        case EFFECT_CMD_RESET:
+            HapticGenerator_Reset(context);
+            break;
+
+        case EFFECT_CMD_GET_PARAM:
+            ALOGV("HapticGenerator_Command EFFECT_CMD_GET_PARAM cmdData %p,"
+                  "*replySize %u, replyData: %p",
+                  cmdData, *replySize, replyData);
+            break;
+
+        case EFFECT_CMD_SET_PARAM: {
+            ALOGV("HapticGenerator_Command EFFECT_CMD_SET_PARAM cmdSize %d cmdData %p, "
+                  "*replySize %u, replyData %p", cmdSize, cmdData, *replySize, replyData);
+            if (cmdData == nullptr || (cmdSize < (int) (sizeof(effect_param_t) + sizeof(int32_t)))
+                || replyData == nullptr || replySize == nullptr ||
+                *replySize != (int) sizeof(int32_t)) {
+                return -EINVAL;
+            }
+            effect_param_t *cmd = (effect_param_t *) cmdData;
+            *(int *) replyData = HapticGenerator_SetParameter(
+                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+        }
+            break;
+
+        case EFFECT_CMD_ENABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_INITIALIZED) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_ACTIVE;
+            ALOGV("EFFECT_CMD_ENABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_DISABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+            ALOGV("EFFECT_CMD_DISABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_SET_VOLUME:
+        case EFFECT_CMD_SET_DEVICE:
+        case EFFECT_CMD_SET_AUDIO_MODE:
+            break;
+
+        default:
+            ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
+            return -EINVAL;
+    }
+
+    return 0;
+}
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self, effect_descriptor_t *descriptor) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr ||
+        context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    memcpy(descriptor, &gHgDescriptor, sizeof(effect_descriptor_t));
+
+    return 0;
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
new file mode 100644
index 0000000..4a2308b
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECTHAPTICGENERATOR_H_
+#define ANDROID_EFFECTHAPTICGENERATOR_H_
+
+#include <functional>
+#include <vector>
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
+
+#include "Processors.h"
+
+namespace android::audio_effect::haptic_generator {
+
+//-----------------------------------------------------------------------------
+// Definition
+//-----------------------------------------------------------------------------
+
+enum hapticgenerator_state_t {
+    HAPTICGENERATOR_STATE_UNINITIALIZED,
+    HAPTICGENERATOR_STATE_INITIALIZED,
+    HAPTICGENERATOR_STATE_ACTIVE,
+};
+
+// parameters for each haptic generator
+struct HapticGeneratorParam {
+    uint32_t hapticChannelSource[2]; // The audio channels used to generate haptic channels.
+                                     // The first channel will be used to generate HAPTIC_A,
+                                     // The second channel will be used to generate HAPTIC_B
+                                     // The value will be offset of audio channel
+    uint32_t audioChannelCount;
+    uint32_t hapticChannelCount;
+};
+
+// A structure to keep all shared pointers for all processors in HapticGenerator.
+struct HapticGeneratorProcessorsRecord {
+    std::vector<std::shared_ptr<BiquadFilter>> filters;
+    std::vector<std::shared_ptr<Ramp>> ramps;
+    std::vector<std::shared_ptr<SlowEnvelope>> slowEnvs;
+};
+
+// A structure to keep all the context for HapticGenerator.
+struct HapticGeneratorContext {
+    const struct effect_interface_s *itfe;
+    effect_config_t config;
+    hapticgenerator_state_t state;
+    struct HapticGeneratorParam param;
+    size_t audioDataBytesPerFrame;
+
+    // A cache for all shared pointers of the HapticGenerator
+    struct HapticGeneratorProcessorsRecord processorsRecord;
+
+    // Using a vector of functions to record the processing chain for haptic-generating algorithm.
+    // The three parameters of the processing functions are pointer to output buffer, pointer to
+    // input buffer and frame count.
+    std::vector<std::function<void(float*, const float*, size_t)>> processingChain;
+
+    // inputBuffer is where to keep input buffer for the generating algorithm. It will be
+    // constructed according to HapticGeneratorParam.hapticChannelSource.
+    std::vector<float> inputBuffer;
+
+    // outputBuffer is a buffer having the same length as inputBuffer. It can be used as
+    // intermediate buffer in the generating algorithm.
+    std::vector<float> outputBuffer;
+};
+
+//-----------------------------------------------------------------------------
+// Effect API
+//-----------------------------------------------------------------------------
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId,
+                                  int32_t ioId,
+                                  effect_handle_t *handle);
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle);
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor);
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer,
+                                audio_buffer_t *outBuffer);
+
+int32_t HapticGenerator_Command(effect_handle_t self,
+                                uint32_t cmdCode,
+                                uint32_t cmdSize,
+                                void *cmdData,
+                                uint32_t *replySize,
+                                void *replyData);
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self,
+                                      effect_descriptor_t *descriptor);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // ANDROID_EFFECTHAPTICGENERATOR_H_
diff --git a/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2 b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/hapticgenerator/Processors.cpp b/media/libeffects/hapticgenerator/Processors.cpp
new file mode 100644
index 0000000..179b5dc
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG_Processors"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <assert.h>
+
+#include <cmath>
+
+#include "Processors.h"
+
+#if defined(__aarch64__) || defined(__ARM_NEON__)
+#ifndef USE_NEON
+#define USE_NEON (true)
+#endif
+#else
+#define USE_NEON (false)
+#endif
+#if USE_NEON
+#include <arm_neon.h>
+#endif
+
+namespace android::audio_effect::haptic_generator {
+
+float getRealPoleZ(float cornerFrequency, float sampleRate) {
+    // This will be a pole of a first order filter.
+    float realPoleS = -2 * M_PI * cornerFrequency;
+    return exp(realPoleS / sampleRate); // zero-pole matching
+}
+
+std::pair<float, float> getComplexPoleZ(float ringingFrequency, float q, float sampleRate) {
+    // This is the pole for 1/(s^2 + s/q + 1) in normalized frequency. The other pole is
+    // the complex conjugate of this.
+    float poleImagS = 2 * M_PI * ringingFrequency;
+    float poleRealS = -poleImagS / (2 * q);
+    float poleRadius = exp(poleRealS / sampleRate);
+    float poleImagZ = poleRadius * sin(poleImagS / sampleRate);
+    float poleRealZ = poleRadius * cos(poleImagS / sampleRate);
+    return {poleRealZ, poleImagZ};
+}
+
+// Implementation of Ramp
+
+Ramp::Ramp(size_t channelCount) : mChannelCount(channelCount) {}
+
+void Ramp::process(float *out, const float *in, size_t frameCount) {
+    size_t i = 0;
+#if USE_NEON
+    size_t sampleCount = frameCount * mChannelCount;
+    float32x2_t allZero = vdup_n_f32(0.0f);
+    while (i + 1 < sampleCount) {
+        vst1_f32(out, vmax_f32(vld1_f32(in), allZero));
+        in += 2;
+        out += 2;
+        i += 2;
+    }
+#endif // USE_NEON
+    for (; i < frameCount * mChannelCount; ++i) {
+        *out = *in >= 0.0f ? *in : 0.0f;
+        out++;
+        in++;
+    }
+}
+
+// Implementation of SlowEnvelope
+
+SlowEnvelope::SlowEnvelope(
+        float cornerFrequency,
+        float sampleRate,
+        float normalizationPower,
+        size_t channelCount)
+        : mLpf(createLPF(cornerFrequency, sampleRate, channelCount)),
+          mNormalizationPower(normalizationPower),
+          mChannelCount(channelCount),
+          mEnv(0.25 * (sampleRate / (2 * M_PI * cornerFrequency))) {}
+
+void SlowEnvelope::process(float* out, const float* in, size_t frameCount) {
+    size_t sampleCount = frameCount * mChannelCount;
+    if (sampleCount > mLpfInBuffer.size()) {
+        mLpfInBuffer.resize(sampleCount, mEnv);
+        mLpfOutBuffer.resize(sampleCount);
+    }
+    mLpf->process(mLpfOutBuffer.data(), mLpfInBuffer.data(), frameCount);
+    for (size_t i = 0; i < sampleCount; ++i) {
+        *out = *in * pow(mLpfOutBuffer[i], mNormalizationPower);
+        out++;
+        in++;
+    }
+}
+
+void SlowEnvelope::clear() {
+    mLpf->clear();
+}
+
+// Implementation of helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                   const BiquadFilterCoefficients &coefs2) {
+    assert(coefs1[2] == 0.0f);
+    assert(coefs2[2] == 0.0f);
+    assert(coefs1[4] == 0.0f);
+    assert(coefs2[4] == 0.0f);
+    return {coefs1[0] * coefs2[0],
+            coefs1[0] * coefs2[1] + coefs1[1] * coefs2[0],
+            coefs1[1] * coefs2[1],
+            coefs1[3] + coefs2[3],
+            coefs1[3] * coefs2[3]};
+}
+
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    // This is a zero at nyquist
+    coefficient[0] = 0.5f * (1 - realPoleZ);
+    coefficient[1] = coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ; // This is traditional 1/(s+1) filter
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+std::shared_ptr<BiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<BiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<BiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+std::shared_ptr<BiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    // Note: this is valid only when corner frequency is less than nyquist / 2.
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+
+    // Note: this is a zero at DC
+    coefficient[0] = 0.5f * (1 + realPoleZ);
+    coefficient[1] = -coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return std::make_shared<BiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+BiquadFilterCoefficients apfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    float zeroZ = 1.0f / realPoleZ;
+    coefficient[0] = (1.0f - realPoleZ) / (1.0f - zeroZ);
+    coefficient[1] = -coefficient[0] * zeroZ;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+std::shared_ptr<BiquadFilter> createAPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = apfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<BiquadFilter> createAPF2(const float cornerFrequency1,
+                                         const float cornerFrequency2,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefs1 = apfCoefs(cornerFrequency1, sampleRate);
+    BiquadFilterCoefficients coefs2 = apfCoefs(cornerFrequency2, sampleRate);
+    return std::make_shared<BiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefs1, coefs2));
+}
+
+std::shared_ptr<BiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
+    // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
+    coefficient[0] = 1.0f;
+    coefficient[1] = -1.0f;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -2 * real;
+    coefficient[4] = real * real + img * img;
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<BiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
+    float zeroCoeff1 = -2 * zeroReal;
+    float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
+    const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
+    float poleCoeff1 = -2 * poleReal;
+    float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
+    const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
+    coefficient[0] = 1.0f * norm;
+    coefficient[1] = zeroCoeff1 * norm;
+    coefficient[2] = zeroCoeff2 * norm;
+    coefficient[3] = poleCoeff1;
+    coefficient[4] = poleCoeff2;
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/Processors.h b/media/libeffects/hapticgenerator/Processors.h
new file mode 100644
index 0000000..e14458b
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+#define _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+
+#include <sys/types.h>
+
+#include <memory>
+#include <vector>
+
+#include <audio_utils/BiquadFilter.h>
+
+using android::audio_utils::BiquadFilter;
+using BiquadFilterCoefficients = std::array<float, android::audio_utils::kBiquadNumCoefs>;
+
+namespace android::audio_effect::haptic_generator {
+
+// A class providing a process function that makes input data non-negative.
+class Ramp {
+public:
+    explicit Ramp(size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+private:
+    const size_t mChannelCount;
+};
+
+
+class SlowEnvelope {
+public:
+    SlowEnvelope(float cornerFrequency, float sampleRate,
+                 float normalizationPower, size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+    void clear();
+
+private:
+    const std::shared_ptr<BiquadFilter> mLpf;
+    std::vector<float> mLpfInBuffer;
+    std::vector<float> mLpfOutBuffer;
+    const float mNormalizationPower;
+    const float mChannelCount;
+    const float mEnv;
+};
+
+// Helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                  const BiquadFilterCoefficients &coefs2);
+
+std::shared_ptr<BiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded LPF with same corner frequency.
+std::shared_ptr<BiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+// Create two cascaded HPF with same corner frequency.
+std::shared_ptr<BiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<BiquadFilter> createAPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded APF with two different corner frequency.
+std::shared_ptr<BiquadFilter> createAPF2(const float cornerFrequency1,
+                                         const float cornerFrequency2,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<BiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+std::shared_ptr<BiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index c87635f..16cd0ad 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -8,12 +8,6 @@
 
     srcs: ["PreProcessing.cpp"],
 
-    include_dirs: [
-        "external/webrtc",
-        "external/webrtc/webrtc/modules/include",
-        "external/webrtc/webrtc/modules/audio_processing/include",
-    ],
-
     shared_libs: [
         "libwebrtc_audio_preprocessing",
         "libspeexresampler",
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 818548d..4ad780c 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -49,10 +49,8 @@
     path: "aidl",
 }
 
-aidl_interface {
-    name: "resourcemanager_aidl_interface",
-    unstable: true,
-    local_include_dir: "aidl",
+filegroup {
+    name: "resourcemanager_aidl",
     srcs: [
         "aidl/android/media/IResourceManagerClient.aidl",
         "aidl/android/media/IResourceManagerService.aidl",
@@ -61,6 +59,16 @@
         "aidl/android/media/MediaResourceParcel.aidl",
         "aidl/android/media/MediaResourcePolicyParcel.aidl",
     ],
+    path: "aidl",
+}
+
+aidl_interface {
+    name: "resourcemanager_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":resourcemanager_aidl",
+    ],
 }
 
 cc_library_shared {
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 0906411..84388c9 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -147,6 +147,7 @@
 #define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
 #define AMEDIAMETRICS_PROP_THROTTLEMS     "throttleMs"     // double
 #define AMEDIAMETRICS_PROP_TRACKID        "trackId"        // int32 port id of track/record
+#define AMEDIAMETRICS_PROP_TRAITS         "traits"         // string
 #define AMEDIAMETRICS_PROP_TYPE           "type"           // string (thread type)
 #define AMEDIAMETRICS_PROP_UNDERRUN       "underrun"       // int32
 #define AMEDIAMETRICS_PROP_UNDERRUNFRAMES "underrunFrames" // int64_t from Thread
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index e6bb2e1..439dbe8 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -432,7 +432,7 @@
             }
             if (mDataSource == nullptr) {
                 ALOGD("FileSource local");
-                mDataSource = new PlayerServiceFileSource(mFd.get(), mOffset, mLength);
+                mDataSource = new PlayerServiceFileSource(dup(mFd.get()), mOffset, mLength);
             }
         }
 
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index b6dca8d..29ed65a 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -25,6 +25,7 @@
         "aidl/android/media/ITranscodingClientCallback.aidl",
         "aidl/android/media/TranscodingErrorCode.aidl",
         "aidl/android/media/TranscodingJobPriority.aidl",
+        "aidl/android/media/TranscodingJobStats.aidl",
         "aidl/android/media/TranscodingType.aidl",
         "aidl/android/media/TranscodingVideoCodecType.aidl",
         "aidl/android/media/TranscodingVideoTrackFormat.aidl",
@@ -48,6 +49,7 @@
         "TranscodingClientManager.cpp",
         "TranscodingJobScheduler.cpp",
         "TranscodingUidPolicy.cpp",
+        "TranscoderWrapper.cpp",
     ],
 
     shared_libs: [
@@ -57,6 +59,7 @@
         "libutils",
         "libmediatranscoder",
         "libbinder",
+        "libmediandk",
     ],
 
     export_include_dirs: ["include"],
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
new file mode 100644
index 0000000..aaa15c4
--- /dev/null
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -0,0 +1,456 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscoderWrapper"
+
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/TranscoderWrapper.h>
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingVideoCodecType;
+using ::aidl::android::media::TranscodingVideoTrackFormat;
+
+static TranscodingErrorCode toTranscodingError(media_status_t status) {
+    switch (status) {
+    case AMEDIA_OK:
+        return TranscodingErrorCode::kNoError;
+    case AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE:  // FALLTHRU
+    case AMEDIACODEC_ERROR_RECLAIMED:
+        return TranscodingErrorCode::kInsufficientResources;
+    case AMEDIA_ERROR_MALFORMED:
+        return TranscodingErrorCode::kMalformed;
+    case AMEDIA_ERROR_UNSUPPORTED:
+        return TranscodingErrorCode::kUnsupported;
+    case AMEDIA_ERROR_INVALID_OBJECT:  // FALLTHRU
+    case AMEDIA_ERROR_INVALID_PARAMETER:
+        return TranscodingErrorCode::kInvalidParameter;
+    case AMEDIA_ERROR_INVALID_OPERATION:
+        return TranscodingErrorCode::kInvalidOperation;
+    case AMEDIA_ERROR_IO:
+        return TranscodingErrorCode::kErrorIO;
+    case AMEDIA_ERROR_UNKNOWN:  // FALLTHRU
+    default:
+        return TranscodingErrorCode::kUnknown;
+    }
+}
+
+static AMediaFormat* getVideoFormat(
+        const char* originalMime,
+        const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
+    if (requestedFormat == std::nullopt) {
+        return nullptr;
+    }
+
+    AMediaFormat* format = AMediaFormat_new();
+    bool changed = false;
+    if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
+        strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+        changed = true;
+    } else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
+               strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        changed = true;
+    }
+    if (requestedFormat->bitrateBps > 0) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+        changed = true;
+    }
+    // TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
+    // Also need to determine more settings to expose in TranscodingVideoTrackFormat.
+    if (!changed) {
+        AMediaFormat_delete(format);
+        // Use null format for passthru.
+        format = nullptr;
+    }
+    return format;
+}
+
+//static
+const char* TranscoderWrapper::toString(Event::Type type) {
+    switch (type) {
+    case Event::Start:
+        return "Start";
+    case Event::Pause:
+        return "Pause";
+    case Event::Resume:
+        return "Resume";
+    case Event::Stop:
+        return "Stop";
+    case Event::Finish:
+        return "Finish";
+    case Event::Error:
+        return "Error";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+class TranscoderWrapper::CallbackImpl : public MediaTranscoder::CallbackInterface {
+public:
+    CallbackImpl(const std::shared_ptr<TranscoderWrapper>& owner, ClientIdType clientId,
+                 JobIdType jobId)
+          : mOwner(owner), mClientId(clientId), mJobId(jobId) {}
+
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onFinish(mClientId, mJobId);
+        }
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onError(mClientId, mJobId, toTranscodingError(error));
+        }
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        ALOGV("%s: job {%lld, %d}, progress %d", __FUNCTION__, (long long)mClientId, mJobId,
+              progress);
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<const Parcel>& pausedState
+                                             __unused) override {
+        ALOGV("%s: job {%lld, %d}", __FUNCTION__, (long long)mClientId, mJobId);
+    }
+
+private:
+    std::weak_ptr<TranscoderWrapper> mOwner;
+    ClientIdType mClientId;
+    JobIdType mJobId;
+};
+
+TranscoderWrapper::TranscoderWrapper() : mCurrentClientId(0), mCurrentJobId(-1) {
+    std::thread(&TranscoderWrapper::threadLoop, this).detach();
+}
+
+void TranscoderWrapper::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
+    mCallback = cb;
+}
+
+void TranscoderWrapper::start(ClientIdType clientId, JobIdType jobId,
+                              const TranscodingRequestParcel& request,
+                              const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    queueEvent(Event::Start, clientId, jobId, [=] {
+        TranscodingErrorCode err = handleStart(clientId, jobId, request, clientCb);
+
+        auto callback = mCallback.lock();
+        if (err != TranscodingErrorCode::kNoError) {
+            cleanup();
+
+            if (callback != nullptr) {
+                callback->onError(clientId, jobId, err);
+            }
+        } else {
+            if (callback != nullptr) {
+                callback->onStarted(clientId, jobId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::pause(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Pause, clientId, jobId, [=] {
+        TranscodingErrorCode err = handlePause(clientId, jobId);
+
+        cleanup();
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            if (err != TranscodingErrorCode::kNoError) {
+                callback->onError(clientId, jobId, err);
+            } else {
+                callback->onPaused(clientId, jobId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::resume(ClientIdType clientId, JobIdType jobId,
+                               const TranscodingRequestParcel& request,
+                               const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    queueEvent(Event::Resume, clientId, jobId, [=] {
+        TranscodingErrorCode err = handleResume(clientId, jobId, request, clientCb);
+
+        auto callback = mCallback.lock();
+        if (err != TranscodingErrorCode::kNoError) {
+            cleanup();
+
+            if (callback != nullptr) {
+                callback->onError(clientId, jobId, err);
+            }
+        } else {
+            if (callback != nullptr) {
+                callback->onResumed(clientId, jobId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::stop(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Stop, clientId, jobId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId && jobId == mCurrentJobId) {
+            // Cancelling the currently running job.
+            media_status_t err = mTranscoder->cancel();
+            if (err != AMEDIA_OK) {
+                ALOGE("failed to stop transcoder: %d", err);
+            } else {
+                ALOGI("transcoder stopped");
+            }
+            cleanup();
+        } else {
+            // For jobs that's not currently running, release any pausedState for the job.
+            mPausedStateMap.erase(JobKeyType(clientId, jobId));
+        }
+        // No callback needed for stop.
+    });
+}
+
+void TranscoderWrapper::onFinish(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Finish, clientId, jobId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId && jobId == mCurrentJobId) {
+            cleanup();
+        }
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onFinish(clientId, jobId);
+        }
+    });
+}
+
+void TranscoderWrapper::onError(ClientIdType clientId, JobIdType jobId,
+                                TranscodingErrorCode error) {
+    queueEvent(Event::Error, clientId, jobId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId && jobId == mCurrentJobId) {
+            cleanup();
+        }
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onError(clientId, jobId, error);
+        }
+    });
+}
+
+TranscodingErrorCode TranscoderWrapper::setupTranscoder(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+        const std::shared_ptr<const Parcel>& pausedState) {
+    if (clientCb == nullptr) {
+        ALOGE("client callback is null");
+        return TranscodingErrorCode::kInvalidParameter;
+    }
+
+    if (mTranscoder != nullptr) {
+        ALOGE("transcoder already running");
+        return TranscodingErrorCode::kInvalidOperation;
+    }
+
+    Status status;
+    ::ndk::ScopedFileDescriptor srcFd, dstFd;
+    status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
+    if (!status.isOk() || srcFd.get() < 0) {
+        ALOGE("failed to open source");
+        return TranscodingErrorCode::kErrorIO;
+    }
+
+    // Open dest file with "rw", as the transcoder could potentially reuse part of it
+    // for resume case. We might want the further differentiate and open with "w" only
+    // for start.
+    status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
+    if (!status.isOk() || dstFd.get() < 0) {
+        ALOGE("failed to open destination");
+        return TranscodingErrorCode::kErrorIO;
+    }
+
+    mCurrentClientId = clientId;
+    mCurrentJobId = jobId;
+    mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, jobId);
+    mTranscoder = MediaTranscoder::create(mTranscoderCb, pausedState);
+    if (mTranscoder == nullptr) {
+        ALOGE("failed to create transcoder");
+        return TranscodingErrorCode::kUnknown;
+    }
+
+    media_status_t err = mTranscoder->configureSource(srcFd.get());
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure source: %d", err);
+        return toTranscodingError(err);
+    }
+
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
+    if (trackFormats.size() == 0) {
+        ALOGE("failed to get track formats!");
+        return TranscodingErrorCode::kMalformed;
+    }
+
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        AMediaFormat* format = nullptr;
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if (!strncmp(mime, "video/", 6)) {
+            format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+        }
+
+        err = mTranscoder->configureTrackFormat(i, format);
+        if (format != nullptr) {
+            AMediaFormat_delete(format);
+        }
+        if (err != AMEDIA_OK) {
+            ALOGE("failed to configure track format for track %d: %d", i, err);
+            return toTranscodingError(err);
+        }
+    }
+
+    err = mTranscoder->configureDestination(dstFd.get());
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure dest: %d", err);
+        return toTranscodingError(err);
+    }
+
+    return TranscodingErrorCode::kNoError;
+}
+
+TranscodingErrorCode TranscoderWrapper::handleStart(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    ALOGI("setting up transcoder for start");
+    TranscodingErrorCode err = setupTranscoder(clientId, jobId, request, clientCb);
+    if (err != TranscodingErrorCode::kNoError) {
+        ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+        return err;
+    }
+
+    media_status_t status = mTranscoder->start();
+    if (status != AMEDIA_OK) {
+        ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+        return toTranscodingError(status);
+    }
+
+    ALOGI("%s: transcoder started", __FUNCTION__);
+    return TranscodingErrorCode::kNoError;
+}
+
+TranscodingErrorCode TranscoderWrapper::handlePause(ClientIdType clientId, JobIdType jobId) {
+    if (mTranscoder == nullptr) {
+        ALOGE("%s: transcoder is not running", __FUNCTION__);
+        return TranscodingErrorCode::kInvalidOperation;
+    }
+
+    if (clientId != mCurrentClientId || jobId != mCurrentJobId) {
+        ALOGW("%s: stopping job {%lld, %d} that's not current job {%lld, %d}", __FUNCTION__,
+              (long long)clientId, jobId, (long long)mCurrentClientId, mCurrentJobId);
+    }
+
+    std::shared_ptr<const Parcel> pauseStates;
+    media_status_t err = mTranscoder->pause(&pauseStates);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
+        return toTranscodingError(err);
+    }
+    mPausedStateMap[JobKeyType(clientId, jobId)] = pauseStates;
+
+    ALOGI("%s: transcoder paused", __FUNCTION__);
+    return TranscodingErrorCode::kNoError;
+}
+
+TranscodingErrorCode TranscoderWrapper::handleResume(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    std::shared_ptr<const Parcel> pausedState;
+    auto it = mPausedStateMap.find(JobKeyType(clientId, jobId));
+    if (it != mPausedStateMap.end()) {
+        pausedState = it->second;
+        mPausedStateMap.erase(it);
+    } else {
+        ALOGE("%s: can't find paused state", __FUNCTION__);
+        return TranscodingErrorCode::kInvalidOperation;
+    }
+
+    ALOGI("setting up transcoder for resume");
+    TranscodingErrorCode err = setupTranscoder(clientId, jobId, request, clientCb, pausedState);
+    if (err != TranscodingErrorCode::kNoError) {
+        ALOGE("%s: failed to setup transcoder", __FUNCTION__);
+        return err;
+    }
+
+    media_status_t status = mTranscoder->resume();
+    if (status != AMEDIA_OK) {
+        ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+        return toTranscodingError(status);
+    }
+
+    ALOGI("%s: transcoder resumed", __FUNCTION__);
+    return TranscodingErrorCode::kNoError;
+}
+
+void TranscoderWrapper::cleanup() {
+    mCurrentClientId = 0;
+    mCurrentJobId = -1;
+    mTranscoderCb = nullptr;
+    mTranscoder = nullptr;
+}
+
+void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                                   const std::function<void()> runnable) {
+    ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)clientId, jobId, toString(type));
+
+    std::scoped_lock lock{mLock};
+
+    mQueue.push_back({type, clientId, jobId, runnable});
+    mCondition.notify_one();
+}
+
+void TranscoderWrapper::threadLoop() {
+    std::unique_lock<std::mutex> lock{mLock};
+    // TranscoderWrapper currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            mCondition.wait(lock);
+        }
+
+        Event event = *mQueue.begin();
+        mQueue.pop_front();
+
+        ALOGD("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId, event.jobId,
+              toString(event.type));
+
+        lock.unlock();
+        event.runnable();
+        lock.lock();
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingJobScheduler.cpp b/media/libmediatranscoding/TranscodingJobScheduler.cpp
index ea07c5f..3e4f319 100644
--- a/media/libmediatranscoding/TranscodingJobScheduler.cpp
+++ b/media/libmediatranscoding/TranscodingJobScheduler.cpp
@@ -76,9 +76,11 @@
         // the topJob now.
         if (!mResourceLost) {
             if (topJob->state == Job::NOT_STARTED) {
-                mTranscoder->start(topJob->key.first, topJob->key.second, curJob->request);
+                mTranscoder->start(topJob->key.first, topJob->key.second, topJob->request,
+                                   topJob->callback.lock());
             } else if (topJob->state == Job::PAUSED) {
-                mTranscoder->resume(topJob->key.first, topJob->key.second);
+                mTranscoder->resume(topJob->key.first, topJob->key.second, topJob->request,
+                                    topJob->callback.lock());
             }
             topJob->state = Job::RUNNING;
         }
@@ -291,15 +293,16 @@
     return true;
 }
 
-void TranscodingJobScheduler::onFinish(ClientIdType clientId, JobIdType jobId) {
+void TranscodingJobScheduler::notifyClient(ClientIdType clientId, JobIdType jobId,
+                                           const char* reason,
+                                           std::function<void(const JobKeyType&)> func) {
     JobKeyType jobKey = std::make_pair(clientId, jobId);
 
-    ALOGV("%s: job %s", __FUNCTION__, jobToString(jobKey).c_str());
-
     std::scoped_lock lock{mLock};
 
     if (mJobMap.count(jobKey) == 0) {
-        ALOGW("ignoring finish for non-existent job");
+        ALOGW("%s: ignoring %s for job %s that doesn't exist", __FUNCTION__, reason,
+              jobToString(jobKey).c_str());
         return;
     }
 
@@ -307,90 +310,91 @@
     // to client if the job is paused. Transcoder could have posted finish when
     // we're pausing it, and the finish arrived after we changed current job.
     if (mJobMap[jobKey].state == Job::NOT_STARTED) {
-        ALOGW("ignoring finish for job that was never started");
+        ALOGW("%s: ignoring %s for job %s that was never started", __FUNCTION__, reason,
+              jobToString(jobKey).c_str());
         return;
     }
 
-    {
-        auto clientCallback = mJobMap[jobKey].callback.lock();
-        if (clientCallback != nullptr) {
-            clientCallback->onTranscodingFinished(jobId, TranscodingResultParcel({jobId, 0}));
+    ALOGV("%s: job %s %s", __FUNCTION__, jobToString(jobKey).c_str(), reason);
+    func(jobKey);
+}
+
+void TranscodingJobScheduler::onStarted(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "started", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingStarted(jobId);
         }
-    }
+    });
+}
 
-    // Remove the job.
-    removeJob_l(jobKey);
+void TranscodingJobScheduler::onPaused(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "paused", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingPaused(jobId);
+        }
+    });
+}
 
-    // Start next job.
-    updateCurrentJob_l();
+void TranscodingJobScheduler::onResumed(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "resumed", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingResumed(jobId);
+        }
+    });
+}
 
-    validateState_l();
+void TranscodingJobScheduler::onFinish(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "finish", [=](const JobKeyType& jobKey) {
+        {
+            auto clientCallback = mJobMap[jobKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        jobId, TranscodingResultParcel({jobId, -1 /*actualBitrateBps*/,
+                                                        std::nullopt /*jobStats*/}));
+            }
+        }
+
+        // Remove the job.
+        removeJob_l(jobKey);
+
+        // Start next job.
+        updateCurrentJob_l();
+
+        validateState_l();
+    });
 }
 
 void TranscodingJobScheduler::onError(ClientIdType clientId, JobIdType jobId,
                                       TranscodingErrorCode err) {
-    JobKeyType jobKey = std::make_pair(clientId, jobId);
-
-    ALOGV("%s: job %s, err %d", __FUNCTION__, jobToString(jobKey).c_str(), (int32_t)err);
-
-    std::scoped_lock lock{mLock};
-
-    if (mJobMap.count(jobKey) == 0) {
-        ALOGW("ignoring error for non-existent job");
-        return;
-    }
-
-    // Only ignore if job was never started. In particular, propagate the status
-    // to client if the job is paused. Transcoder could have posted finish when
-    // we're pausing it, and the finish arrived after we changed current job.
-    if (mJobMap[jobKey].state == Job::NOT_STARTED) {
-        ALOGW("ignoring error for job that was never started");
-        return;
-    }
-
-    {
-        auto clientCallback = mJobMap[jobKey].callback.lock();
-        if (clientCallback != nullptr) {
-            clientCallback->onTranscodingFailed(jobId, err);
+    notifyClient(clientId, jobId, "error", [=](const JobKeyType& jobKey) {
+        {
+            auto clientCallback = mJobMap[jobKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(jobId, err);
+            }
         }
-    }
 
-    // Remove the job.
-    removeJob_l(jobKey);
+        // Remove the job.
+        removeJob_l(jobKey);
 
-    // Start next job.
-    updateCurrentJob_l();
+        // Start next job.
+        updateCurrentJob_l();
 
-    validateState_l();
+        validateState_l();
+    });
 }
 
 void TranscodingJobScheduler::onProgressUpdate(ClientIdType clientId, JobIdType jobId,
                                                int32_t progress) {
-    JobKeyType jobKey = std::make_pair(clientId, jobId);
-
-    ALOGV("%s: job %s, progress %d", __FUNCTION__, jobToString(jobKey).c_str(), progress);
-
-    std::scoped_lock lock{mLock};
-
-    if (mJobMap.count(jobKey) == 0) {
-        ALOGW("ignoring progress for non-existent job");
-        return;
-    }
-
-    // Only ignore if job was never started. In particular, propagate the status
-    // to client if the job is paused. Transcoder could have posted finish when
-    // we're pausing it, and the finish arrived after we changed current job.
-    if (mJobMap[jobKey].state == Job::NOT_STARTED) {
-        ALOGW("ignoring progress for job that was never started");
-        return;
-    }
-
-    {
-        auto clientCallback = mJobMap[jobKey].callback.lock();
-        if (clientCallback != nullptr) {
-            clientCallback->onProgressUpdate(jobId, progress);
+    notifyClient(clientId, jobId, "progress", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onProgressUpdate(jobId, progress);
         }
-    }
+    });
 }
 
 void TranscodingJobScheduler::onResourceLost() {
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
index 1a42809..73edb95 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
@@ -41,6 +41,30 @@
 
     /**
     * Called when the transcoding associated with the jobId finished.
+    * This will only be called if client request to get all the status of the job.
+    *
+    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingStarted(in int jobId);
+
+    /**
+    * Called when the transcoding associated with the jobId is paused.
+    * This will only be called if client request to get all the status of the job.
+    *
+    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingPaused(in int jobId);
+
+    /**
+    * Called when the transcoding associated with the jobId is resumed.
+    * This will only be called if client request to get all the status of the job.
+    *
+    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingResumed(in int jobId);
+
+    /**
+    * Called when the transcoding associated with the jobId finished.
     *
     * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
     * @param result contains the transcoded file stats and other transcoding metrics if requested.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index 7f47fdc..b044d41 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,11 +23,12 @@
  */
 @Backing(type = "int")
 enum TranscodingErrorCode {
-    kUnknown = 0,
-    kUnsupported = 1,
-    kDecoderError = 2,
-    kEncoderError = 3,
-    kExtractorError = 4,
-    kMuxerError = 5,
-    kInvalidBitstream = 6
+    kNoError = 0,
+    kUnknown = 1,
+    kMalformed = 2,
+    kUnsupported = 3,
+    kInvalidParameter = 4,
+    kInvalidOperation = 5,
+    kErrorIO = 6,
+    kInsufficientResources = 7,
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobStats.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobStats.aidl
new file mode 100644
index 0000000..1b41b87
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobStats.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TranscodingJobStats encapsulated the stats of the a TranscodingJob.
+ *
+ * {@hide}
+ */
+parcelable TranscodingJobStats {
+    /**
+     * System time of when the job is created.
+     */
+    long jobCreatedTimeUs;
+
+    /**
+     * System time of when the job is finished.
+     */
+    long jobFinishedTimeUs;
+
+    /**
+     * Total time spend on transcoding, exclude the time in pause.
+     */
+    long totalProcessingTimeUs;
+
+    /**
+     * Total time spend on handling the job, include the time in pause.
+     * The totaltimeUs is actually the same as jobFinishedTimeUs - jobCreatedTimeUs.
+     */
+    long totalTimeUs;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 3ed9f0d..83ea707 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -58,8 +58,15 @@
 
     /**
      * Whether to receive update on progress and change of awaitNumJobs.
+     * Default to false.
      */
-    boolean requestUpdate;
+    boolean requestProgressUpdate = false;
+
+    /**
+     * Whether to receive update on job's start/stop/pause/resume.
+     * Default to false.
+     */
+    boolean requestJobEventUpdate = false;
 
     /**
      * Whether this request is for testing.
@@ -67,8 +74,14 @@
     boolean isForTesting = false;
 
     /**
-     * Test configuration. This is only valid when isForTesting is set to true.
+     * Test configuration. This will be available only when isForTesting is set to true.
      */
+    @nullable TranscodingTestConfig testConfig;
 
-    TranscodingTestConfig testConfig;
+     /**
+      * Whether to get the stats of the transcoding.
+      * If this is enabled, the TranscodingJobStats will be returned in TranscodingResultParcel
+      * upon transcoding finishes.
+      */
+    boolean enableStats = false;
 }
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index 65c49e7..a20c8b1 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.TranscodingJobStats;
+
 /**
  * Result of the transcoding.
  *
@@ -34,5 +36,9 @@
      */
     int actualBitrateBps;
 
-    // TODO(hkuang): Add more fields.
+    /**
+     * Stats of the transcoding job. This will only be available when client requests to get the
+     * stats in TranscodingRequestParcel.
+     */
+    @nullable TranscodingJobStats jobStats;
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
index 8d7091a..a564799 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
@@ -23,15 +23,21 @@
   */
 parcelable TranscodingTestConfig {
     /**
+     * Whether to use SimulatedTranscoder for testing. Note that SimulatedTranscoder does not send
+     * transcoding jobs to real MediaTranscoder.
+     */
+    boolean useSimulatedTranscoder = false;
+
+    /**
      * Passthrough mode used for testing. The transcoding service will assume the destination
      * path already contains the transcoding of the source file and return it to client directly.
      */
     boolean passThroughMode = false;
 
     /**
-     * Delay of processing the job in milliseconds. Used only for testing. This comebines with
-     * passThroughMode are used to simulate the transcoding latency in transcoding without involvign
-     * MediaTranscoder.
+     * Time of processing the job in milliseconds. Service will return the job result at least after
+     * processingTotalTimeMs from the time it starts to process the job. Note that if service uses
+     * real MediaTranscoder to do transcoding, the time spent on transcoding may be more than that.
      */
-    int processingDelayMs = 0;
+    int processingTotalTimeMs = 0;
 }
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
index ef51f65..1a3f505 100644
--- a/media/libmediatranscoding/include/media/TranscoderInterface.h
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -17,12 +17,14 @@
 #ifndef ANDROID_MEDIA_TRANSCODER_INTERFACE_H
 #define ANDROID_MEDIA_TRANSCODER_INTERFACE_H
 
+#include <aidl/android/media/ITranscodingClientCallback.h>
 #include <aidl/android/media/TranscodingErrorCode.h>
 #include <aidl/android/media/TranscodingRequestParcel.h>
 #include <media/TranscodingDefs.h>
 
 namespace android {
 
+using ::aidl::android::media::ITranscodingClientCallback;
 using ::aidl::android::media::TranscodingErrorCode;
 using ::aidl::android::media::TranscodingRequestParcel;
 class TranscoderCallbackInterface;
@@ -30,13 +32,14 @@
 // Interface for the scheduler to call the transcoder to take actions.
 class TranscoderInterface {
 public:
-    // TODO(chz): determine what parameters are needed here.
-    // For now, always pass in clientId&jobId.
     virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) = 0;
     virtual void start(ClientIdType clientId, JobIdType jobId,
-                       const TranscodingRequestParcel& request) = 0;
+                       const TranscodingRequestParcel& request,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
     virtual void pause(ClientIdType clientId, JobIdType jobId) = 0;
-    virtual void resume(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void resume(ClientIdType clientId, JobIdType jobId,
+                        const TranscodingRequestParcel& request,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
     virtual void stop(ClientIdType clientId, JobIdType jobId) = 0;
 
 protected:
@@ -48,6 +51,9 @@
 class TranscoderCallbackInterface {
 public:
     // TODO(chz): determine what parameters are needed here.
+    virtual void onStarted(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void onPaused(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void onResumed(ClientIdType clientId, JobIdType jobId) = 0;
     virtual void onFinish(ClientIdType clientId, JobIdType jobId) = 0;
     virtual void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) = 0;
     virtual void onProgressUpdate(ClientIdType clientId, JobIdType jobId, int32_t progress) = 0;
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
new file mode 100644
index 0000000..804119f
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_TRANSCODER_WRAPPER_H
+#define ANDROID_TRANSCODER_WRAPPER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+class MediaTranscoder;
+class Parcelable;
+
+/*
+ * Wrapper class around MediaTranscoder.
+ * Implements TranscoderInterface for TranscodingJobScheduler to use.
+ */
+class TranscoderWrapper : public TranscoderInterface,
+                          public std::enable_shared_from_this<TranscoderWrapper> {
+public:
+    TranscoderWrapper();
+
+    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
+    virtual void start(ClientIdType clientId, JobIdType jobId,
+                       const TranscodingRequestParcel& request,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    virtual void pause(ClientIdType clientId, JobIdType jobId) override;
+    virtual void resume(ClientIdType clientId, JobIdType jobId,
+                        const TranscodingRequestParcel& request,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    virtual void stop(ClientIdType clientId, JobIdType jobId) override;
+
+private:
+    class CallbackImpl;
+    struct Event {
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finish, Error } type;
+        ClientIdType clientId;
+        JobIdType jobId;
+        std::function<void()> runnable;
+    };
+    using JobKeyType = std::pair<ClientIdType, JobIdType>;
+
+    std::shared_ptr<CallbackImpl> mTranscoderCb;
+    std::shared_ptr<MediaTranscoder> mTranscoder;
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue;  // GUARDED_BY(mLock);
+    std::map<JobKeyType, std::shared_ptr<const Parcel>> mPausedStateMap;
+    ClientIdType mCurrentClientId;
+    JobIdType mCurrentJobId;
+
+    static const char* toString(Event::Type type);
+    void onFinish(ClientIdType clientId, JobIdType jobId);
+    void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode error);
+
+    TranscodingErrorCode handleStart(ClientIdType clientId, JobIdType jobId,
+                                     const TranscodingRequestParcel& request,
+                                     const std::shared_ptr<ITranscodingClientCallback>& callback);
+    TranscodingErrorCode handlePause(ClientIdType clientId, JobIdType jobId);
+    TranscodingErrorCode handleResume(ClientIdType clientId, JobIdType jobId,
+                                      const TranscodingRequestParcel& request,
+                                      const std::shared_ptr<ITranscodingClientCallback>& callback);
+    TranscodingErrorCode setupTranscoder(
+            ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+            const std::shared_ptr<ITranscodingClientCallback>& callback,
+            const std::shared_ptr<const Parcel>& pausedState = nullptr);
+
+    void cleanup();
+    void queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                    const std::function<void()> runnable);
+    void threadLoop();
+};
+
+}  // namespace android
+#endif  // ANDROID_TRANSCODER_WRAPPER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingJobScheduler.h b/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
index 63001c3..5ccadad 100644
--- a/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
+++ b/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
@@ -47,6 +47,9 @@
     // ~SchedulerClientInterface
 
     // TranscoderCallbackInterface
+    void onStarted(ClientIdType clientId, JobIdType jobId) override;
+    void onPaused(ClientIdType clientId, JobIdType jobId) override;
+    void onResumed(ClientIdType clientId, JobIdType jobId) override;
     void onFinish(ClientIdType clientId, JobIdType jobId) override;
     void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) override;
     void onProgressUpdate(ClientIdType clientId, JobIdType jobId, int32_t progress) override;
@@ -105,7 +108,8 @@
     void updateCurrentJob_l();
     void removeJob_l(const JobKeyType& jobKey);
     void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
-
+    void notifyClient(ClientIdType clientId, JobIdType jobId, const char* reason,
+                      std::function<void(const JobKeyType&)> func);
     // Internal state verifier (debug only)
     void validateState_l();
 
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
index 7471cb7..63de1fb 100644
--- a/media/libmediatranscoding/include/media/TranscodingRequest.h
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -38,8 +38,10 @@
         sourceFilePath = parcel.sourceFilePath;
         destinationFilePath = parcel.destinationFilePath;
         transcodingType = parcel.transcodingType;
+        requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
         priority = parcel.priority;
-        requestUpdate = parcel.requestUpdate;
+        requestProgressUpdate = parcel.requestProgressUpdate;
+        requestJobEventUpdate = parcel.requestJobEventUpdate;
         isForTesting = parcel.isForTesting;
         testConfig = parcel.testConfig;
     }
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index db171ce..1583325 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -64,6 +64,12 @@
         return Status::ok();
     }
 
+    Status onTranscodingStarted(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_jobId*/) override { return Status::ok(); }
+
     Status onTranscodingFinished(int32_t in_jobId,
                                  const TranscodingResultParcel& in_result) override {
         EXPECT_EQ(in_jobId, in_result.jobId);
@@ -179,7 +185,8 @@
             auto clientCallback = it->second.callback.lock();
             if (clientCallback != nullptr) {
                 clientCallback->onTranscodingFinished(
-                        mLastJob.second, TranscodingResultParcel({mLastJob.second, 0}));
+                        mLastJob.second,
+                        TranscodingResultParcel({mLastJob.second, 0, std::nullopt}));
             }
         }
         mJobs.erase(it);
diff --git a/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp b/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
index d4fd483..d21b595 100644
--- a/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
@@ -87,14 +87,15 @@
     // TranscoderInterface
     void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) override {}
 
-    void start(ClientIdType clientId, JobIdType jobId,
-               const TranscodingRequestParcel& /*request*/) override {
+    void start(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& /*request*/,
+               const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
         mEventQueue.push_back(Start(clientId, jobId));
     }
     void pause(ClientIdType clientId, JobIdType jobId) override {
         mEventQueue.push_back(Pause(clientId, jobId));
     }
-    void resume(ClientIdType clientId, JobIdType jobId) override {
+    void resume(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& /*request*/,
+                const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
         mEventQueue.push_back(Resume(clientId, jobId));
     }
     void stop(ClientIdType clientId, JobIdType jobId) override {
@@ -167,6 +168,12 @@
         return Status::ok();
     }
 
+    Status onTranscodingStarted(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_jobId*/) override { return Status::ok(); }
+
     Status onTranscodingFinished(int32_t in_jobId,
                                  const TranscodingResultParcel& in_result) override {
         EXPECT_EQ(in_jobId, in_result.jobId);
@@ -427,9 +434,9 @@
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
 
     // Fail running offline job, and test error code propagation.
-    mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kInvalidBitstream);
+    mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kInvalidOperation);
     EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(0)));
-    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidBitstream);
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
 
     // Duplicate fail for last job, should be ignored.
     mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kUnknown);
diff --git a/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4
new file mode 100644
index 0000000..80d1ec3
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/transcoder/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4 b/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
rename to media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4 b/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
new file mode 100644
index 0000000..df42a15
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4
new file mode 100644
index 0000000..7794b99
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/longtest_15s.mp4 b/media/libmediatranscoding/tests/assets/longtest_15s.mp4
new file mode 100644
index 0000000..b50d8e4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/longtest_15s.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4 b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4
new file mode 100644
index 0000000..92dda3b
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4
new file mode 100644
index 0000000..2fe37bd
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/push_assets.sh b/media/libmediatranscoding/tests/assets/push_assets.sh
new file mode 100755
index 0000000..8afc947
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/push_assets.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+# Pushes the assets to the /data/local/tmp.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount
+fi
+
+echo "Copying files to device"
+
+adb shell mkdir -p /data/local/tmp/TranscodingTestAssets
+
+FILES=$ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/*
+for file in $FILES
+do 
+adb push --sync $file /data/local/tmp/TranscodingTestAssets
+done
+
+echo "Copy done"
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
index 843d047..c153a42 100644
--- a/media/libmediatranscoding/transcoder/Android.bp
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -23,6 +23,7 @@
         "MediaSampleWriter.cpp",
         "MediaTrackTranscoder.cpp",
         "MediaTranscoder.cpp",
+        "NdkCommon.cpp",
         "PassthroughTrackTranscoder.cpp",
         "VideoTrackTranscoder.cpp",
     ],
@@ -33,6 +34,8 @@
         "libmediandk",
         "libnativewindow",
         "libutils",
+        // TODO: Use libbinder_ndk
+        "libbinder",
     ],
 
     export_include_dirs: [
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
index aaa2adb..91dbf78 100644
--- a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -26,7 +26,19 @@
 class DefaultMuxer : public MediaSampleWriterMuxerInterface {
 public:
     // MediaSampleWriterMuxerInterface
-    ssize_t addTrack(const AMediaFormat* trackFormat) override {
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        // If the track format has rotation, need to call AMediaMuxer_setOrientationHint
+        // to set the rotation. Muxer doesn't take rotation specified on the track.
+        const char* mime;
+        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime) &&
+            strncmp(mime, "video/", 6) == 0) {
+            int32_t rotation;
+            if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+                (rotation != 0)) {
+                AMediaMuxer_setOrientationHint(mMuxer, rotation);
+            }
+        }
+
         return AMediaMuxer_addTrack(mMuxer, trackFormat);
     }
     media_status_t start() override { return AMediaMuxer_start(mMuxer); }
@@ -110,7 +122,12 @@
         return false;
     }
 
-    mTracks.emplace_back(sampleQueue, static_cast<size_t>(trackIndex));
+    int64_t durationUs;
+    if (!AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+        durationUs = 0;
+    }
+
+    mTracks.emplace_back(sampleQueue, static_cast<size_t>(trackIndex), durationUs);
     return true;
 }
 
@@ -188,10 +205,23 @@
                 } else if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
                     // Track reached end of stream.
                     track.mReachedEos = true;
-                    break;
+
+                    // Preserve source track duration by setting the appropriate timestamp on the
+                    // empty End-Of-Stream sample.
+                    if (track.mDurationUs > 0 && track.mFirstSampleTimeSet) {
+                        sample->info.presentationTimeUs =
+                                track.mDurationUs + track.mFirstSampleTimeUs;
+                    }
+                } else {
+                    samplesLeft = true;
                 }
 
-                samplesLeft = true;
+                // Record the first sample's timestamp in order to translate duration to EOS time
+                // for tracks that does not start at 0.
+                if (!track.mFirstSampleTimeSet) {
+                    track.mFirstSampleTimeUs = sample->info.presentationTimeUs;
+                    track.mFirstSampleTimeSet = true;
+                }
 
                 bufferInfo.offset = sample->dataOffset;
                 bufferInfo.size = sample->info.size;
@@ -205,7 +235,7 @@
                     return status;
                 }
 
-            } while (sample->info.presentationTimeUs < segmentEndTimeUs);
+            } while (sample->info.presentationTimeUs < segmentEndTimeUs && !track.mReachedEos);
         }
 
         segmentEndTimeUs += mTrackSegmentLengthUs;
@@ -213,4 +243,4 @@
 
     return AMEDIA_OK;
 }
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
index 10c0c6c..b485826 100644
--- a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
@@ -19,6 +19,7 @@
 
 #include <android-base/logging.h>
 #include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
 
 namespace android {
 
@@ -101,6 +102,12 @@
     return false;
 }
 
+void MediaTrackTranscoder::notifyTrackFormatAvailable() {
+    if (auto callbacks = mTranscoderCallback.lock()) {
+        callbacks->onTrackFormatAvailable(this);
+    }
+}
+
 std::shared_ptr<MediaSampleQueue> MediaTrackTranscoder::getOutputQueue() const {
     return mOutputQueue;
 }
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
index f2f7810..bde1cf6 100644
--- a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -18,8 +18,10 @@
 #define LOG_TAG "MediaTranscoder"
 
 #include <android-base/logging.h>
+#include <binder/Parcel.h>
 #include <fcntl.h>
 #include <media/MediaSampleReaderNDK.h>
+#include <media/MediaSampleWriter.h>
 #include <media/MediaTranscoder.h>
 #include <media/PassthroughTrackTranscoder.h>
 #include <media/VideoTrackTranscoder.h>
@@ -83,6 +85,17 @@
 }
 
 void MediaTranscoder::sendCallback(media_status_t status) {
+    // If the transcoder is already cancelled explicitly, don't send any error callbacks.
+    // Tracks and sample writer will report errors for abort. However, currently we can't
+    // tell it apart from real errors. Ideally we still want to report real errors back
+    // to client, as there is a small chance that explicit abort and the real error come
+    // at around the same time, we should report that if abort has a specific error code.
+    // On the other hand, if the transcoder actually finished (status is AMEDIA_OK) at around
+    // the same time of the abort, we should still report the finish back to the client.
+    if (mCancelled && status != AMEDIA_OK) {
+        return;
+    }
+
     bool expected = false;
     if (mCallbackSent.compare_exchange_strong(expected, true)) {
         if (status == AMEDIA_OK) {
@@ -92,15 +105,43 @@
         }
 
         // Transcoding is done and the callback to the client has been sent, so tear down the
-        // pipeline but do it asynchronously to avoid deadlocks. If an error occurred then
-        // automatically delete the output file.
-        const bool deleteOutputFile = status != AMEDIA_OK;
-        std::thread asyncCancelThread{
-                [self = shared_from_this(), deleteOutputFile] { self->cancel(deleteOutputFile); }};
+        // pipeline but do it asynchronously to avoid deadlocks. If an error occurred, client
+        // should clean up the file.
+        std::thread asyncCancelThread{[self = shared_from_this()] { self->cancel(); }};
         asyncCancelThread.detach();
     }
 }
 
+void MediaTranscoder::onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) {
+    LOG(INFO) << "TrackTranscoder " << transcoder << " format available.";
+
+    std::scoped_lock lock{mTracksAddedMutex};
+
+    // Ignore duplicate format change.
+    if (mTracksAdded.count(transcoder) > 0) {
+        return;
+    }
+
+    // Add track to the writer.
+    const bool ok =
+            mSampleWriter->addTrack(transcoder->getOutputQueue(), transcoder->getOutputFormat());
+    if (!ok) {
+        LOG(ERROR) << "Unable to add track to sample writer.";
+        sendCallback(AMEDIA_ERROR_UNKNOWN);
+        return;
+    }
+
+    mTracksAdded.insert(transcoder);
+    if (mTracksAdded.size() == mTrackTranscoders.size()) {
+        LOG(INFO) << "Starting sample writer.";
+        bool started = mSampleWriter->start();
+        if (!started) {
+            LOG(ERROR) << "Unable to start sample writer.";
+            sendCallback(AMEDIA_ERROR_UNKNOWN);
+        }
+    }
+}
+
 void MediaTranscoder::onTrackFinished(const MediaTrackTranscoder* transcoder) {
     LOG(DEBUG) << "TrackTranscoder " << transcoder << " finished";
 }
@@ -115,13 +156,16 @@
     sendCallback(status);
 }
 
+MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks)
+      : mCallbacks(callbacks) {}
+
 std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
         const std::shared_ptr<CallbackInterface>& callbacks,
-        const std::shared_ptr<Parcel>& pausedState) {
+        const std::shared_ptr<const Parcel>& pausedState) {
     if (pausedState != nullptr) {
-        LOG(ERROR) << "Initializing from paused state is currently not supported.";
-        return nullptr;
-    } else if (callbacks == nullptr) {
+        LOG(INFO) << "Initializing from paused state.";
+    }
+    if (callbacks == nullptr) {
         LOG(ERROR) << "Callbacks cannot be null";
         return nullptr;
     }
@@ -129,15 +173,9 @@
     return std::shared_ptr<MediaTranscoder>(new MediaTranscoder(callbacks));
 }
 
-media_status_t MediaTranscoder::configureSource(const char* path) {
-    if (path == nullptr) {
-        LOG(ERROR) << "Source path cannot be null";
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    const int fd = open(path, O_RDONLY);
-    if (fd <= 0) {
-        LOG(ERROR) << "Unable to open source path: " << path;
+media_status_t MediaTranscoder::configureSource(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid source fd: " << fd;
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
@@ -145,10 +183,9 @@
     lseek(fd, 0, SEEK_SET);
 
     mSampleReader = MediaSampleReaderNDK::createFromFd(fd, 0 /* offset */, fileSize);
-    close(fd);
 
     if (mSampleReader == nullptr) {
-        LOG(ERROR) << "Unable to parse source file: " << path;
+        LOG(ERROR) << "Unable to parse source fd: " << fd;
         return AMEDIA_ERROR_UNSUPPORTED;
     }
 
@@ -239,35 +276,23 @@
     return AMEDIA_OK;
 }
 
-media_status_t MediaTranscoder::configureDestination(const char* path) {
-    if (path == nullptr || strlen(path) < 1) {
-        LOG(ERROR) << "Invalid destination path: " << path;
+media_status_t MediaTranscoder::configureDestination(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid destination fd: " << fd;
         return AMEDIA_ERROR_INVALID_PARAMETER;
-    } else if (mSampleWriter != nullptr) {
+    }
+
+    if (mSampleWriter != nullptr) {
         LOG(ERROR) << "Destination is already configured.";
         return AMEDIA_ERROR_INVALID_OPERATION;
     }
 
-    // Write-only, create file if non-existent, don't overwrite existing file.
-    static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
-    // User R+W permission.
-    static constexpr int kFileMode = S_IRUSR | S_IWUSR;
-
-    const int fd = open(path, kOpenFlags, kFileMode);
-    if (fd < 0) {
-        LOG(ERROR) << "Unable to open destination file \"" << path << "\" for writing: " << fd;
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    mDestinationPath = std::string(path);
-
     mSampleWriter = std::make_unique<MediaSampleWriter>();
     const bool initOk = mSampleWriter->init(
             fd, std::bind(&MediaTranscoder::onSampleWriterFinished, this, std::placeholders::_1));
-    close(fd);
 
     if (!initOk) {
-        LOG(ERROR) << "Unable to initialize sample writer with destination path " << path;
+        LOG(ERROR) << "Unable to initialize sample writer with destination fd: " << fd;
         mSampleWriter.reset();
         return AMEDIA_ERROR_UNKNOWN;
     }
@@ -284,46 +309,30 @@
         return AMEDIA_ERROR_INVALID_OPERATION;
     }
 
-    // Add tracks to the writer.
-    for (auto& transcoder : mTrackTranscoders) {
-        const bool ok = mSampleWriter->addTrack(transcoder->getOutputQueue(),
-                                                transcoder->getOutputFormat());
-        if (!ok) {
-            LOG(ERROR) << "Unable to add track to sample writer.";
-            return AMEDIA_ERROR_UNKNOWN;
-        }
-    }
-
-    bool started = mSampleWriter->start();
-    if (!started) {
-        LOG(ERROR) << "Unable to start sample writer.";
-        return AMEDIA_ERROR_UNKNOWN;
-    }
-
     // Start transcoders
     for (auto& transcoder : mTrackTranscoders) {
-        started = transcoder->start();
+        bool started = transcoder->start();
         if (!started) {
             LOG(ERROR) << "Unable to start track transcoder.";
-            cancel(true);
+            cancel();
             return AMEDIA_ERROR_UNKNOWN;
         }
     }
     return AMEDIA_OK;
 }
 
-media_status_t MediaTranscoder::pause(std::shared_ptr<const Parcelable>* pausedState) {
-    (void)pausedState;
-    LOG(ERROR) << "Pause is not currently supported";
-    return AMEDIA_ERROR_UNSUPPORTED;
+media_status_t MediaTranscoder::pause(std::shared_ptr<const Parcel>* pausedState) {
+    // TODO: write internal states to parcel.
+    *pausedState = std::make_shared<Parcel>();
+    return cancel();
 }
 
 media_status_t MediaTranscoder::resume() {
-    LOG(ERROR) << "Resume is not currently supported";
-    return AMEDIA_ERROR_UNSUPPORTED;
+    // TODO: restore internal states from parcel.
+    return start();
 }
 
-media_status_t MediaTranscoder::cancel(bool deleteDestinationFile) {
+media_status_t MediaTranscoder::cancel() {
     bool expected = false;
     if (!mCancelled.compare_exchange_strong(expected, true)) {
         // Already cancelled.
@@ -335,15 +344,6 @@
         transcoder->stop();
     }
 
-    // TODO(chz): file deletion should be done by upper level from the content URI.
-    if (deleteDestinationFile && !mDestinationPath.empty()) {
-        int error = unlink(mDestinationPath.c_str());
-        if (error) {
-            LOG(ERROR) << "Unable to delete destination file " << mDestinationPath.c_str() << ": "
-                       << error;
-            return AMEDIA_ERROR_IO;
-        }
-    }
     return AMEDIA_OK;
 }
 
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
new file mode 100644
index 0000000..67a8e10
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkCommon"
+
+#include <log/log.h>
+#include <media/NdkCommon.h>
+
+#include <cstdio>
+#include <cstring>
+#include <utility>
+
+/* TODO(b/153592281)
+ * Note: constants used by the native media tests but not available in media ndk api
+ */
+const char* AMEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+const char* AMEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+const char* AMEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char* AMEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char* AMEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
+const char* AMEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
+const char* AMEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
+
+/* TODO(b/153592281) */
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES = "max-bframes";
diff --git a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
index 7806208..a4cbf33 100644
--- a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
@@ -96,6 +96,9 @@
     MediaSampleInfo info;
     std::shared_ptr<MediaSample> sample;
 
+    // Notify the track format as soon as we start. It's same as the source format.
+    notifyTrackFormatAvailable();
+
     MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
             [bufferPool = mBufferPool](MediaSample* sample) {
                 bufferPool->returnBuffer(const_cast<uint8_t*>(sample->buffer));
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 3818545..8ee252f 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -86,6 +86,10 @@
         VideoTrackTranscoder* transcoder = static_cast<VideoTrackTranscoder*>(userdata);
         const char* kCodecName = (codec == transcoder->mDecoder ? "Decoder" : "Encoder");
         LOG(DEBUG) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+        if (codec == transcoder->mEncoder.get()) {
+            transcoder->mCodecMessageQueue.push(
+                    [transcoder, format] { transcoder->updateTrackFormat(format); });
+        }
     }
 
     static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t error,
@@ -135,6 +139,10 @@
     }
     AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
 
+    // Always encode without rotation. The rotation degree will be transferred directly to
+    // MediaSampleWriter track format, and MediaSampleWriter will call AMediaMuxer_setOrientationHint.
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_ROTATION, 0);
+
     mDestinationFormat = std::shared_ptr<AMediaFormat>(encoderFormat, &AMediaFormat_delete);
 
     // Create and configure the encoder.
@@ -311,21 +319,93 @@
     }
 }
 
+void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat) {
+    if (mActualOutputFormat != nullptr) {
+        LOG(WARNING) << "Ignoring duplicate format change.";
+        return;
+    }
+
+    AMediaFormat* formatCopy = AMediaFormat_new();
+    if (!formatCopy || AMediaFormat_copy(formatCopy, outputFormat) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy outputFormat";
+        AMediaFormat_delete(formatCopy);
+        mStatus = AMEDIA_ERROR_INVALID_PARAMETER;
+        return;
+    }
+
+    // Generate the actual track format for muxer based on the encoder output format,
+    // since many vital information comes in the encoder format (eg. CSD).
+    // Transfer necessary fields from the user-configured track format (derived from
+    // source track format and user transcoding request) where needed.
+
+    // Transfer SAR settings:
+    // If mDestinationFormat has SAR set, it means the original source has SAR specified
+    // at container level. This is supposed to override any SAR settings in the bitstream,
+    // thus should always be transferred to the container of the transcoded file.
+    int32_t sarWidth, sarHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_WIDTH, &sarWidth) &&
+        (sarWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_HEIGHT, &sarHeight) &&
+        (sarHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_WIDTH, sarWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_HEIGHT, sarHeight);
+    }
+    // Transfer DAR settings.
+    int32_t displayWidth, displayHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_WIDTH, &displayWidth) &&
+        (displayWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+                              &displayHeight) &&
+        (displayHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_WIDTH, displayWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, displayHeight);
+    }
+
+    // Transfer rotation settings.
+    // Note that muxer itself doesn't take rotation from the track format. It requires
+    // AMediaMuxer_setOrientationHint to set the rotation. Here we pass the rotation to
+    // MediaSampleWriter using the track format. MediaSampleWriter will then call
+    // AMediaMuxer_setOrientationHint as needed.
+    int32_t rotation;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+        (rotation != 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_ROTATION, rotation);
+    }
+
+    // Transfer track duration.
+    // Preserve the source track duration by sending it to MediaSampleWriter.
+    int64_t durationUs;
+    if (AMediaFormat_getInt64(mSourceFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs) &&
+        durationUs > 0) {
+        AMediaFormat_setInt64(formatCopy, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
+
+    // TODO: transfer other fields as required.
+
+    mActualOutputFormat = std::shared_ptr<AMediaFormat>(formatCopy, &AMediaFormat_delete);
+
+    notifyTrackFormatAvailable();
+}
+
 media_status_t VideoTrackTranscoder::runTranscodeLoop() {
-    media_status_t status = AMEDIA_OK;
+    // Push start decoder and encoder as two messages, so that these are subject to the
+    // stop request as well. If the job is cancelled (or paused) immediately after start,
+    // we don't need to waste time start then stop the codecs.
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mDecoder);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video decoder: " << status;
+            mStatus = status;
+        }
+    });
 
-    status = AMediaCodec_start(mDecoder);
-    if (status != AMEDIA_OK) {
-        LOG(ERROR) << "Unable to start video decoder: " << status;
-        return status;
-    }
-
-    status = AMediaCodec_start(mEncoder.get());
-    if (status != AMEDIA_OK) {
-        LOG(ERROR) << "Unable to start video encoder: " << status;
-        AMediaCodec_stop(mDecoder);
-        return status;
-    }
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mEncoder.get());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video encoder: " << status;
+            mStatus = status;
+        }
+    });
 
     // Process codec events until EOS is reached, transcoding is stopped or an error occurs.
     while (!mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
@@ -350,7 +430,7 @@
 }
 
 std::shared_ptr<AMediaFormat> VideoTrackTranscoder::getOutputFormat() const {
-    return mDestinationFormat;
+    return mActualOutputFormat;
 }
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
index 4d0264b..d971f3e 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
@@ -41,7 +41,7 @@
      * @param trackFormat Format of the new track.
      * @return A non-negative track index on success, or a negative number on failure.
      */
-    virtual ssize_t addTrack(const AMediaFormat* trackFormat) = 0;
+    virtual ssize_t addTrack(AMediaFormat* trackFormat) = 0;
 
     /** Starts the muxer. */
     virtual media_status_t start() = 0;
@@ -144,11 +144,20 @@
     media_status_t runWriterLoop();
 
     struct TrackRecord {
-        TrackRecord(const std::shared_ptr<MediaSampleQueue>& sampleQueue, size_t trackIndex)
-              : mSampleQueue(sampleQueue), mTrackIndex(trackIndex), mReachedEos(false) {}
+        TrackRecord(const std::shared_ptr<MediaSampleQueue>& sampleQueue, size_t trackIndex,
+                    int64_t durationUs)
+              : mSampleQueue(sampleQueue),
+                mTrackIndex(trackIndex),
+                mDurationUs(durationUs),
+                mFirstSampleTimeUs(0),
+                mFirstSampleTimeSet(false),
+                mReachedEos(false) {}
 
         std::shared_ptr<MediaSampleQueue> mSampleQueue;
         const size_t mTrackIndex;
+        int64_t mDurationUs;
+        int64_t mFirstSampleTimeUs;
+        bool mFirstSampleTimeSet;
         bool mReachedEos;
     };
 
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
index a71db67..60a9139 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
@@ -30,28 +30,7 @@
 
 namespace android {
 
-class MediaTrackTranscoder;
-
-/** Callback interface for MediaTrackTranscoder. */
-class MediaTrackTranscoderCallback {
-public:
-    /**
-     * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
-     * successfully.
-     * @param transcoder The MediaTrackTranscoder that finished the transcoding.
-     */
-    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder);
-
-    /**
-     * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
-     * @param transcoder The MediaTrackTranscoder that encountered the error.
-     * @param status The non-zero error code describing the encountered error.
-     */
-    virtual void onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status);
-
-protected:
-    virtual ~MediaTrackTranscoderCallback() = default;
-};
+class MediaTrackTranscoderCallback;
 
 /**
  * Base class for all track transcoders. MediaTrackTranscoder operates asynchronously on an internal
@@ -115,6 +94,9 @@
           : mOutputQueue(std::make_shared<MediaSampleQueue>()),
             mTranscoderCallback(transcoderCallback){};
 
+    // Called by subclasses when the actual track format becomes available.
+    void notifyTrackFormatAvailable();
+
     // configureDestinationFormat needs to be implemented by subclasses, and gets called on an
     // external thread before start.
     virtual media_status_t configureDestinationFormat(
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
new file mode 100644
index 0000000..654171e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+
+#include <media/NdkMediaError.h>
+
+namespace android {
+
+class MediaTrackTranscoder;
+
+/** Callback interface for MediaTrackTranscoder. */
+class MediaTrackTranscoderCallback {
+public:
+    /**
+     * Called when the MediaTrackTranscoder's actual track format becomes available.
+     * @param transcoder The MediaTrackTranscoder whose track format becomes available.
+     */
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder);
+    /**
+     * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
+     * successfully.
+     * @param transcoder The MediaTrackTranscoder that finished the transcoding.
+     */
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder);
+
+    /**
+     * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
+     * @param transcoder The MediaTrackTranscoder that encountered the error.
+     * @param status The non-zero error code describing the encountered error.
+     */
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status);
+
+protected:
+    virtual ~MediaTrackTranscoderCallback() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
index 2d18eea..7a36c8c 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -17,20 +17,22 @@
 #ifndef ANDROID_MEDIA_TRANSCODER_H
 #define ANDROID_MEDIA_TRANSCODER_H
 
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-#include <media/MediaSampleReader.h>
-#include <media/MediaSampleWriter.h>
-#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
 #include <media/NdkMediaError.h>
 #include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
 
 #include <atomic>
 #include <memory>
+#include <mutex>
 #include <unordered_set>
 
 namespace android {
 
+class MediaSampleReader;
+class MediaSampleWriter;
+class Parcel;
+
 class MediaTranscoder : public std::enable_shared_from_this<MediaTranscoder>,
                         public MediaTrackTranscoderCallback {
 public:
@@ -54,7 +56,7 @@
          *      resume.
          */
         virtual void onCodecResourceLost(const MediaTranscoder* transcoder,
-                                         const std::shared_ptr<const Parcelable>& pausedState) = 0;
+                                         const std::shared_ptr<const Parcel>& pausedState) = 0;
 
         virtual ~CallbackInterface() = default;
     };
@@ -66,10 +68,10 @@
      */
     static std::shared_ptr<MediaTranscoder> create(
             const std::shared_ptr<CallbackInterface>& callbacks,
-            const std::shared_ptr<Parcel>& pausedState = nullptr);
+            const std::shared_ptr<const Parcel>& pausedState = nullptr);
 
-    /** Configures source from path. */
-    media_status_t configureSource(const char* path);
+    /** Configures source from path fd. */
+    media_status_t configureSource(int fd);
 
     /** Gets the media formats of all tracks in the file. */
     std::vector<std::shared_ptr<AMediaFormat>> getTrackFormats() const;
@@ -83,8 +85,8 @@
      */
     media_status_t configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat);
 
-    /** Configures destination from path. */
-    media_status_t configureDestination(const char* path);
+    /** Configures destination from fd. */
+    media_status_t configureDestination(int fd);
 
     /** Starts transcoding. No configurations can be made once the transcoder has started. */
     media_status_t start();
@@ -99,27 +101,27 @@
      * release the transcoder instance, clear the paused state and delete the partial destination
      * file. The caller can optionally call cancel to let the transcoder clean up the partial
      * destination file.
+     *
+     * TODO: use NDK AParcel instead
+     * libbinder shouldn't be used by mainline modules. When transcoding goes mainline
+     * it needs to be replaced by stable AParcel.
      */
-    media_status_t pause(std::shared_ptr<const Parcelable>* pausedState);
+    media_status_t pause(std::shared_ptr<const Parcel>* pausedState);
 
     /** Resumes a paused transcoding. */
     media_status_t resume();
 
-    /** Cancels the transcoding. Once canceled the transcoding can not be restarted. returns error
-     * if file could not be deleted. */
-    media_status_t cancel(bool deleteDestinationFile = true);
+    /** Cancels the transcoding. Once canceled the transcoding can not be restarted. Client
+     * will be responsible for cleaning up the abandoned file. */
+    media_status_t cancel();
 
     virtual ~MediaTranscoder() = default;
 
 private:
-    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks)
-          : mCallbacks(callbacks),
-            mSampleReader(nullptr),
-            mSampleWriter(nullptr),
-            mSourceTrackFormats(),
-            mTrackTranscoders() {}
+    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks);
 
     // MediaTrackTranscoderCallback
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) override;
     virtual void onTrackFinished(const MediaTrackTranscoder* transcoder) override;
     virtual void onTrackError(const MediaTrackTranscoder* transcoder,
                               media_status_t status) override;
@@ -132,8 +134,9 @@
     std::unique_ptr<MediaSampleWriter> mSampleWriter;
     std::vector<std::shared_ptr<AMediaFormat>> mSourceTrackFormats;
     std::vector<std::unique_ptr<MediaTrackTranscoder>> mTrackTranscoders;
+    std::mutex mTracksAddedMutex;
+    std::unordered_set<const MediaTrackTranscoder*> mTracksAdded GUARDED_BY(mTracksAddedMutex);
 
-    std::string mDestinationPath;
     std::atomic_bool mCallbackSent = false;
     std::atomic_bool mCancelled = false;
 };
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
new file mode 100644
index 0000000..dcba812
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+#define ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+
+#include <media/NdkMediaFormat.h>
+
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP8;
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP9;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AV1;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_HEVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_MPEG4;
+extern const char* AMEDIA_MIMETYPE_VIDEO_H263;
+
+// TODO(b/146420990)
+// TODO: make MediaTranscoder use the consts from this header.
+typedef enum {
+    OUTPUT_FORMAT_START = 0,
+    OUTPUT_FORMAT_MPEG_4 = OUTPUT_FORMAT_START,
+    OUTPUT_FORMAT_WEBM = OUTPUT_FORMAT_START + 1,
+    OUTPUT_FORMAT_THREE_GPP = OUTPUT_FORMAT_START + 2,
+    OUTPUT_FORMAT_HEIF = OUTPUT_FORMAT_START + 3,
+    OUTPUT_FORMAT_OGG = OUTPUT_FORMAT_START + 4,
+    OUTPUT_FORMAT_LIST_END = OUTPUT_FORMAT_START + 4,
+} MuxerFormat;
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecConstants.h (are these going to be deprecated)
+static constexpr int COLOR_FormatYUV420SemiPlanar = 21;
+static constexpr int COLOR_FormatYUV420Flexible = 0x7F420888;
+static constexpr int COLOR_FormatSurface = 0x7f000789;
+
+// constants not defined in NDK
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES;
+static constexpr int TBD_AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 0x1;
+
+static constexpr int kBitrateModeConstant = 2;
+
+#endif  // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
index c47e4b7..1ba205b 100644
--- a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -73,6 +73,9 @@
     // Dequeues an encoded buffer from the encoder and adds it to the output queue.
     void dequeueOutputSample(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
 
+    // Updates the video track's actual format based on encoder output format.
+    void updateTrackFormat(AMediaFormat* outputFormat);
+
     AMediaCodec* mDecoder = nullptr;
     // Sample release callback holds a reference to the encoder, hence the shared_ptr.
     std::shared_ptr<AMediaCodec> mEncoder;
@@ -84,6 +87,7 @@
     MediaSampleInfo mSampleInfo;
     BlockingQueue<std::function<void()>> mCodecMessageQueue;
     std::shared_ptr<AMediaFormat> mDestinationFormat;
+    std::shared_ptr<AMediaFormat> mActualOutputFormat;
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index 23d1bab..a9a7e2e 100644
--- a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="false" />
         <option name="push-file"
             key="assets"
-            value="/data/local/tmp/TranscoderTestAssets" />
+            value="/data/local/tmp/TranscodingTestAssets" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
index 858fcb3..805095e 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
@@ -40,7 +40,7 @@
     void SetUp() override {
         LOG(DEBUG) << "MediaSampleReaderNDKTests set up";
         const char* sourcePath =
-                "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
 
         mExtractor = AMediaExtractor_new();
         ASSERT_NE(mExtractor, nullptr);
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
index 752f0c9..e3cb192 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
@@ -58,7 +58,7 @@
 class TestMuxer : public MediaSampleWriterMuxerInterface {
 public:
     // MuxerInterface
-    ssize_t addTrack(const AMediaFormat* trackFormat) override {
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
         mEventQueue.push_back(AddTrack(trackFormat));
         return mTrackCount++;
     }
@@ -131,7 +131,7 @@
 public:
     void init() {
         static const char* sourcePath =
-                "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
 
         mExtractor = AMediaExtractor_new();
         ASSERT_NE(mExtractor, nullptr);
@@ -374,6 +374,16 @@
         EXPECT_EQ(event.info.flags, sample->info.flags);
     }
 
+    // Verify EOS samples.
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        auto trackFormat = mediaSource.mTrackFormats[trackIndex % mediaSource.mTrackCount];
+        int64_t duration = 0;
+        AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &duration);
+
+        const AMediaCodecBufferInfo info = {0, 0, duration, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM};
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::WriteSample(trackIndex, nullptr, &info));
+    }
+
     EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Stop());
     EXPECT_TRUE(writer.stop());
 }
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
index 4d9386a..71d3a4e 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
@@ -67,7 +67,7 @@
 
     void initSampleReader() {
         const char* sourcePath =
-                "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
 
         const int sourceFd = open(sourcePath, O_RDONLY);
         ASSERT_GT(sourceFd, 0);
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
index c4a67bb..e68eaac 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -20,11 +20,38 @@
 #define LOG_TAG "MediaTranscoderTests"
 
 #include <android-base/logging.h>
+#include <fcntl.h>
 #include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
 #include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
 
 namespace android {
 
+#define DEFINE_FORMAT_VALUE_EQUAL_FUNC(_type, _typeName)                                  \
+    static bool equal##_typeName(const char* key, AMediaFormat* src, AMediaFormat* dst) { \
+        _type srcVal, dstVal;                                                             \
+        bool srcPresent = AMediaFormat_get##_typeName(src, key, &srcVal);                 \
+        bool dstPresent = AMediaFormat_get##_typeName(dst, key, &dstVal);                 \
+        return (srcPresent == dstPresent) && (!srcPresent || (srcVal == dstVal));         \
+    }
+
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int32_t, Int32);
+
+struct FormatVerifierEntry {
+    const char* key;
+    std::function<bool(const char*, AMediaFormat*, AMediaFormat*)> equal;
+};
+
+static const FormatVerifierEntry kFieldsToPreserve[] = {
+        {AMEDIAFORMAT_KEY_DURATION, equalInt64},       {AMEDIAFORMAT_KEY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_HEIGHT, equalInt32},         {AMEDIAFORMAT_KEY_FRAME_RATE, equalInt32},
+        {AMEDIAFORMAT_KEY_FRAME_COUNT, equalInt32},    {AMEDIAFORMAT_KEY_DISPLAY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_SAR_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_SAR_HEIGHT, equalInt32},     {AMEDIAFORMAT_KEY_ROTATION, equalInt32},
+};
+
 class TestCallbacks : public MediaTranscoder::CallbackInterface {
 public:
     virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
@@ -48,7 +75,7 @@
                                   int32_t progress __unused) override {}
 
     virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
-                                     const std::shared_ptr<const Parcelable>& pausedState
+                                     const std::shared_ptr<const Parcel>& pausedState
                                              __unused) override {}
 
     void waitForTranscodingFinished() {
@@ -66,8 +93,10 @@
     bool mFinished = false;
 };
 
-static const char* SOURCE_PATH =
-        "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+// Write-only, create file if non-existent, don't overwrite existing file.
+static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
+// User R+W permission.
+static constexpr int kFileMode = S_IRUSR | S_IWUSR;
 
 class MediaTranscoderTests : public ::testing::Test {
 public:
@@ -87,12 +116,13 @@
     void deleteFile(const char* path) { unlink(path); }
 
     using FormatConfigurationCallback = std::function<AMediaFormat*(AMediaFormat*)>;
-    media_status_t transcodeHelper(const char* destPath,
+    media_status_t transcodeHelper(const char* srcPath, const char* destPath,
                                    FormatConfigurationCallback formatCallback) {
         auto transcoder = MediaTranscoder::create(mCallbacks, nullptr);
         EXPECT_NE(transcoder, nullptr);
 
-        EXPECT_EQ(transcoder->configureSource(SOURCE_PATH), AMEDIA_OK);
+        const int srcFd = open(srcPath, O_RDONLY);
+        EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
 
         std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
         EXPECT_GT(trackFormats.size(), 0);
@@ -100,53 +130,150 @@
         for (int i = 0; i < trackFormats.size(); ++i) {
             AMediaFormat* format = formatCallback(trackFormats[i].get());
             EXPECT_EQ(transcoder->configureTrackFormat(i, format), AMEDIA_OK);
+
+            // Save original video track format for verification.
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mSourceVideoFormat = trackFormats[i];
+            }
+
             if (format != nullptr) {
                 AMediaFormat_delete(format);
             }
         }
         deleteFile(destPath);
-        EXPECT_EQ(transcoder->configureDestination(destPath), AMEDIA_OK);
+        const int dstFd = open(destPath, kOpenFlags, kFileMode);
+        EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
 
         media_status_t startStatus = transcoder->start();
         EXPECT_EQ(startStatus, AMEDIA_OK);
         if (startStatus == AMEDIA_OK) {
             mCallbacks->waitForTranscodingFinished();
         }
+        close(srcFd);
+        close(dstFd);
 
         return mCallbacks->mStatus;
     }
 
+    void testTranscodeVideo(const char* srcPath, const char* destPath, const char* dstMime) {
+        const int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+        EXPECT_EQ(
+                transcodeHelper(
+                        srcPath, destPath,
+                        [dstMime](AMediaFormat* sourceFormat) {
+                            AMediaFormat* format = nullptr;
+                            const char* mime = nullptr;
+                            AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+                            if (strncmp(mime, "video/", 6) == 0) {
+                                format = AMediaFormat_new();
+                                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+
+                                if (dstMime != nullptr) {
+                                    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, dstMime);
+                                }
+                            }
+                            return format;
+                        }),
+                AMEDIA_OK);
+
+        if (dstMime != nullptr) {
+            std::vector<FormatVerifierEntry> extraVerifiers = {
+                    {AMEDIAFORMAT_KEY_MIME,
+                     [dstMime](const char* key, AMediaFormat* src __unused, AMediaFormat* dst) {
+                         const char* mime = nullptr;
+                         AMediaFormat_getString(dst, key, &mime);
+                         return !strcmp(mime, dstMime);
+                     }},
+            };
+            verifyOutputFormat(destPath, &extraVerifiers);
+        } else {
+            verifyOutputFormat(destPath);
+        }
+    }
+
+    void verifyOutputFormat(const char* destPath,
+                            const std::vector<FormatVerifierEntry>* extraVerifiers = nullptr) {
+        int dstFd = open(destPath, O_RDONLY);
+        EXPECT_GT(dstFd, 0);
+        ssize_t fileSize = lseek(dstFd, 0, SEEK_END);
+        lseek(dstFd, 0, SEEK_SET);
+
+        std::shared_ptr<MediaSampleReader> sampleReader =
+                MediaSampleReaderNDK::createFromFd(dstFd, 0, fileSize);
+        ASSERT_NE(sampleReader, nullptr);
+
+        std::shared_ptr<AMediaFormat> videoFormat;
+        const size_t trackCount = sampleReader->getTrackCount();
+        for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+            AMediaFormat* trackFormat = sampleReader->getTrackFormat(static_cast<int>(trackIndex));
+            if (trackFormat != nullptr) {
+                const char* mime = nullptr;
+                AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+                if (strncmp(mime, "video/", 6) == 0) {
+                    LOG(INFO) << "Track # " << trackIndex << ": "
+                              << AMediaFormat_toString(trackFormat);
+                    videoFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                    break;
+                }
+            }
+        }
+
+        EXPECT_NE(videoFormat, nullptr);
+
+        LOG(INFO) << "source video format: " << AMediaFormat_toString(mSourceVideoFormat.get());
+        LOG(INFO) << "transcoded video format: " << AMediaFormat_toString(videoFormat.get());
+
+        for (int i = 0; i < (sizeof(kFieldsToPreserve) / sizeof(kFieldsToPreserve[0])); ++i) {
+            EXPECT_TRUE(kFieldsToPreserve[i].equal(kFieldsToPreserve[i].key,
+                                                   mSourceVideoFormat.get(), videoFormat.get()))
+                    << "Failed at key " << kFieldsToPreserve[i].key;
+        }
+
+        if (extraVerifiers != nullptr) {
+            for (int i = 0; i < extraVerifiers->size(); ++i) {
+                const FormatVerifierEntry& entry = (*extraVerifiers)[i];
+                EXPECT_TRUE(entry.equal(entry.key, mSourceVideoFormat.get(), videoFormat.get()));
+            }
+        }
+
+        close(dstFd);
+    }
+
     std::shared_ptr<TestCallbacks> mCallbacks;
+    std::shared_ptr<AMediaFormat> mSourceVideoFormat;
 };
 
 TEST_F(MediaTranscoderTests, TestPassthrough) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
     const char* destPath = "/data/local/tmp/MediaTranscoder_Passthrough.MP4";
 
-    EXPECT_EQ(transcodeHelper(destPath, [](AMediaFormat*) { return nullptr; }), AMEDIA_OK);
+    EXPECT_EQ(transcodeHelper(srcPath, destPath, [](AMediaFormat*) { return nullptr; }), AMEDIA_OK);
 
-    // TODO: Validate output file
+    verifyOutputFormat(destPath);
 }
 
-TEST_F(MediaTranscoderTests, TestBasicVideoTranscode) {
-    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode.MP4";
+TEST_F(MediaTranscoderTests, TestVideoTranscode_AvcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_AvcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, nullptr /*dstMime*/);
+}
 
-    EXPECT_EQ(transcodeHelper(
-                      destPath,
-                      [](AMediaFormat* sourceFormat) {
-                          AMediaFormat* format = nullptr;
-                          const char* mime = nullptr;
-                          AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
 
-                          if (strncmp(mime, "video/", 6) == 0) {
-                              const int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
-                              format = AMediaFormat_new();
-                              AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
-                          }
-                          return format;
-                      }),
-              AMEDIA_OK);
-
-    // TODO: Validate output file
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Rotation) {
+    const char* srcPath =
+            "/data/local/tmp/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Rotation.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
 }
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
index 316793a..b79f58c 100644
--- a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
@@ -41,7 +41,7 @@
 
     void initSourceAndExtractor() {
         const char* sourcePath =
-                "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
 
         mExtractor = AMediaExtractor_new();
         ASSERT_NE(mExtractor, nullptr);
diff --git a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
index 79c227b..54b42fe 100644
--- a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
+++ b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
@@ -15,6 +15,7 @@
  */
 
 #include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
 
 #include <condition_variable>
 #include <memory>
@@ -56,6 +57,8 @@
     ~TestCallback() = default;
 
     // MediaTrackTranscoderCallback
+    void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {}
+
     void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
         std::unique_lock<std::mutex> lock(mMutex);
         mTranscodingFinished = true;
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 6b1f640..1eb9e5a 100644
--- a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -42,7 +42,7 @@
     void SetUp() override {
         LOG(DEBUG) << "VideoTrackTranscoderTests set up";
         const char* sourcePath =
-                "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
 
         const int sourceFd = open(sourcePath, O_RDONLY);
         ASSERT_GT(sourceFd, 0);
diff --git a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
index 01beeee..241178d 100755
--- a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
@@ -18,7 +18,9 @@
 
   adb root && adb wait-for-device remount && adb sync
 fi
-adb push assets /data/local/tmp/TranscoderTestAssets
+
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/push_assets.sh
 
 echo "========================================"
 
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index fac6350..c180edf 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -70,9 +70,18 @@
     },
 
     header_libs: [
+        "libaudioclient_headers",
         "libstagefright_foundation_headers",
+        "media_ndk_headers",
     ],
-    shared_libs: ["libmediandk"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
     export_include_dirs: ["include"],
 }
 
@@ -144,8 +153,10 @@
 
     header_libs: [
         "libaudioclient_headers",
-        "libmedia_headers",
+        "libbase_headers",
+        "libmedia_datasource_headers",
         "media_ndk_headers",
+        "media_plugin_headers",
     ],
 
     cflags: [
@@ -162,6 +173,18 @@
             "signed-integer-overflow",
         ],
     },
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+        linux: {
+            cflags: [
+                "-DDISABLE_AUDIO_SYSTEM_OFFLOAD",
+            ],
+        }
+    },
 }
 
 cc_library_shared {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 39423c7..7c2a5ff 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -542,6 +542,7 @@
     mNumGrids = 0;
     mNextItemId = kItemIdBase;
     mHasRefs = false;
+    mResetStatus = OK;
     mPreAllocFirstTime = true;
     mPrevAllTracksTotalMetaDataSizeEstimate = 0;
 
@@ -566,7 +567,7 @@
         release();
     }
 
-    if (fallocate(mFd, FALLOC_FL_KEEP_SIZE, 0, 1) == 0) {
+    if (fallocate64(mFd, FALLOC_FL_KEEP_SIZE, 0, 1) == 0) {
         ALOGD("PreAllocation enabled");
         mPreAllocationEnabled = true;
     } else {
@@ -1027,6 +1028,11 @@
     return OK;
 }
 
+status_t MPEG4Writer::stop() {
+    // If reset was in progress, wait for it to complete.
+    return reset(true, true);
+}
+
 status_t MPEG4Writer::pause() {
     ALOGW("MPEG4Writer: pause is not supported");
     return ERROR_UNSUPPORTED;
@@ -1110,6 +1116,21 @@
     writeInt32(0x40000000);  // w
 }
 
+void MPEG4Writer::printWriteDurations() {
+    if (mWriteDurationPQ.empty()) {
+        return;
+    }
+    std::string writeDurationsString =
+            "Top " + std::to_string(mWriteDurationPQ.size()) + " write durations(microseconds):";
+    uint8_t i = 0;
+    while (!mWriteDurationPQ.empty()) {
+        writeDurationsString +=
+                " #" + std::to_string(++i) + ":" + std::to_string(mWriteDurationPQ.top().count());
+        mWriteDurationPQ.pop();
+    }
+    ALOGD("%s", writeDurationsString.c_str());
+}
+
 status_t MPEG4Writer::release() {
     ALOGD("release()");
     status_t err = OK;
@@ -1138,11 +1159,18 @@
     mStarted = false;
     free(mInMemoryCache);
     mInMemoryCache = NULL;
+
+    printWriteDurations();
+
     return err;
 }
 
-void MPEG4Writer::finishCurrentSession() {
-    reset(false /* stopSource */);
+status_t MPEG4Writer::finishCurrentSession() {
+    ALOGV("finishCurrentSession");
+    /* Don't wait if reset is in progress already, that avoids deadlock
+     * as finishCurrentSession() is called from control looper thread.
+     */
+    return reset(false, false);
 }
 
 status_t MPEG4Writer::switchFd() {
@@ -1164,11 +1192,32 @@
     return err;
 }
 
-status_t MPEG4Writer::reset(bool stopSource) {
+status_t MPEG4Writer::reset(bool stopSource, bool waitForAnyPreviousCallToComplete) {
     ALOGD("reset()");
-    std::lock_guard<std::mutex> l(mResetMutex);
+    std::unique_lock<std::mutex> lk(mResetMutex, std::defer_lock);
+    if (waitForAnyPreviousCallToComplete) {
+        /* stop=>reset from client needs the return value of reset call, hence wait here
+         * if a reset was in process already.
+         */
+        lk.lock();
+    } else if (!lk.try_lock()) {
+        /* Internal reset from control looper thread shouldn't wait for any reset in
+         * process already.
+         */
+        return INVALID_OPERATION;
+    }
+
+    if (mResetStatus != OK) {
+        /* Don't have to proceed if reset has finished with an error before.
+         * If there was no error before, proceeding reset would be harmless, as the
+         * the call would return from the mInitCheck condition below.
+         */
+        return mResetStatus;
+    }
+
     if (mInitCheck != OK) {
-        return OK;
+        mResetStatus = OK;
+        return mResetStatus;
     } else {
         if (!mWriterThreadStarted ||
             !mStarted) {
@@ -1180,7 +1229,8 @@
             if (writerErr != OK) {
                 retErr = writerErr;
             }
-            return retErr;
+            mResetStatus = retErr;
+            return mResetStatus;
         }
     }
 
@@ -1227,7 +1277,8 @@
     if (err != OK && err != ERROR_MALFORMED) {
         // Ignoring release() return value as there was an "err" already.
         release();
-        return err;
+        mResetStatus = err;
+        return mResetStatus;
     }
 
     // Fix up the size of the 'mdat' chunk.
@@ -1285,7 +1336,8 @@
     if (err == OK) {
         err = errRelease;
     }
-    return err;
+    mResetStatus = err;
+    return mResetStatus;
 }
 
 /*
@@ -1594,7 +1646,17 @@
 void MPEG4Writer::writeOrPostError(int fd, const void* buf, size_t count) {
     if (mWriteSeekErr == true)
         return;
+
+    auto beforeTP = std::chrono::high_resolution_clock::now();
     ssize_t bytesWritten = ::write(fd, buf, count);
+    auto afterTP = std::chrono::high_resolution_clock::now();
+    auto writeDuration =
+            std::chrono::duration_cast<std::chrono::microseconds>(afterTP - beforeTP).count();
+    mWriteDurationPQ.emplace(writeDuration);
+    if (mWriteDurationPQ.size() > kWriteDurationsCount) {
+        mWriteDurationPQ.pop();
+    }
+
     /* Write as much as possible during stop() execution when there was an error
      * (mWriteSeekErr == true) in the previous call to write() or lseek64().
      */
@@ -1862,7 +1924,7 @@
     ALOGV("preAllocateSize :%" PRIu64 " lastFileEndOffset:%" PRIu64, preAllocateSize,
           lastFileEndOffset);
 
-    int res = fallocate(mFd, FALLOC_FL_KEEP_SIZE, lastFileEndOffset, preAllocateSize);
+    int res = fallocate64(mFd, FALLOC_FL_KEEP_SIZE, lastFileEndOffset, preAllocateSize);
     if (res == -1) {
         ALOGE("fallocate err:%s, %d, fd:%d", strerror(errno), errno, mFd);
         sp<AMessage> msg = new AMessage(kWhatFallocateError, mReflector);
@@ -1889,7 +1951,7 @@
     ALOGD("ftruncate mPreAllocateFileEndOffset:%" PRId64 " mOffset:%" PRIu64
           " mMdatEndOffset:%" PRIu64 " diff:%" PRId64, mPreAllocateFileEndOffset, mOffset,
           mMdatEndOffset, mPreAllocateFileEndOffset - endOffset);
-    if(ftruncate(mFd, endOffset) == -1) {
+    if (ftruncate64(mFd, endOffset) == -1) {
         ALOGE("ftruncate err:%s, %d, fd:%d", strerror(errno), errno, mFd);
         status = false;
         /* No need to post and handle(stop & notify client) error like it's done in preAllocate(),
@@ -2426,31 +2488,27 @@
             int fd = mNextFd;
             mNextFd = -1;
             mLock.unlock();
-            finishCurrentSession();
-            initInternal(fd, false /*isFirstSession*/);
-            start(mStartMeta.get());
-            mSwitchPending = false;
-            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+            if (finishCurrentSession() == OK) {
+                initInternal(fd, false /*isFirstSession*/);
+                status_t status = start(mStartMeta.get());
+                mSwitchPending = false;
+                if (status == OK)  {
+                    notify(MEDIA_RECORDER_EVENT_INFO,
+                           MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+                }
+            }
             break;
         }
-        // ::write() or lseek64() wasn't a success, file could be malformed
+        /* ::write() or lseek64() wasn't a success, file could be malformed.
+         * Or fallocate() failed. reset() and notify client on both the cases.
+         */
+        case kWhatFallocateError: // fallthrough
         case kWhatIOError: {
-            ALOGE("kWhatIOError");
             int32_t err;
             CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
-            break;
-        }
-        // fallocate() failed, hence stop() and notify app.
-        case kWhatFallocateError: {
-            ALOGE("kWhatFallocateError");
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            //TODO: introduce a suitable MEDIA_RECORDER_ERROR_* instead MEDIA_RECORDER_ERROR_UNKNOWN?
+            // If reset already in process, don't wait for it complete to avoid deadlock.
+            reset(true, false);
+            //TODO: new MEDIA_RECORDER_ERROR_**** instead MEDIA_RECORDER_ERROR_UNKNOWN ?
             notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
             break;
         }
@@ -2458,7 +2516,7 @@
          * Responding with other options could be added later if required.
          */
         case kWhatNoIOErrorSoFar: {
-            ALOGD("kWhatNoIOErrorSoFar");
+            ALOGV("kWhatNoIOErrorSoFar");
             sp<AMessage> response = new AMessage;
             response->setInt32("err", OK);
             sp<AReplyToken> replyID;
@@ -4687,10 +4745,18 @@
 
 // This is useful if the pixel is not square
 void MPEG4Writer::Track::writePaspBox() {
-    mOwner->beginBox("pasp");
-    mOwner->writeInt32(1 << 16);  // hspacing
-    mOwner->writeInt32(1 << 16);  // vspacing
-    mOwner->endBox();  // pasp
+    // Do not write 'pasp' box unless the track format specifies it.
+    // According to ISO/IEC 14496-12 (ISO base media file format), 'pasp' box
+    // is optional. If present, it overrides the SAR from the video CSD. Only
+    // set it if the track format specifically requests that.
+    int32_t hSpacing, vSpacing;
+    if (mMeta->findInt32(kKeySARWidth, &hSpacing) && (hSpacing > 0)
+            && mMeta->findInt32(kKeySARHeight, &vSpacing) && (vSpacing > 0)) {
+        mOwner->beginBox("pasp");
+        mOwner->writeInt32(hSpacing);  // hspacing
+        mOwner->writeInt32(vSpacing);  // vspacing
+        mOwner->endBox();  // pasp
+    }
 }
 
 int64_t MPEG4Writer::Track::getStartTimeOffsetTimeUs() const {
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index f1b6e8c..5a2a910 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -114,6 +114,13 @@
         return -EINVAL;
     }
 
+    /* As mAdapterLock is unlocked while waiting for signalBufferReturned,
+     * a new buffer for the same track could be pushed from another thread
+     * in the client process, mBufferGatingMutex will help to hold that
+     * until the previous buffer is processed.
+     */
+    std::unique_lock<std::mutex> lk(mBufferGatingMutex);
+
     Mutex::Autolock autoLock(mAdapterLock);
     if (!mStarted) {
         ALOGE("pushBuffer called before start");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 6b264d5..e975ee6 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1483,9 +1483,9 @@
     return PostAndAwaitResponse(msg, &response);
 }
 
-status_t MediaCodec::releaseAsync() {
+status_t MediaCodec::releaseAsync(const sp<AMessage> &notify) {
     sp<AMessage> msg = new AMessage(kWhatRelease, this);
-    msg->setInt32("async", 1);
+    msg->setMessage("async", notify);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
@@ -2696,6 +2696,11 @@
                     if (mReplyID != nullptr) {
                         (new AMessage)->postReply(mReplyID);
                     }
+                    if (mAsyncReleaseCompleteNotification != nullptr) {
+                        flushMediametrics();
+                        mAsyncReleaseCompleteNotification->post();
+                        mAsyncReleaseCompleteNotification.clear();
+                    }
                     break;
                 }
 
@@ -3082,8 +3087,8 @@
                 break;
             }
 
-            int32_t async = 0;
-            if (msg->findInt32("async", &async) && async) {
+            sp<AMessage> asyncNotify;
+            if (msg->findMessage("async", &asyncNotify) && asyncNotify != nullptr) {
                 if (mSurface != NULL) {
                     if (!mReleaseSurface) {
                         mReleaseSurface.reset(new ReleaseSurface);
@@ -3115,10 +3120,11 @@
                 pushBlankBuffersToNativeWindow(mSurface.get());
             }
 
-            if (async) {
+            if (asyncNotify != nullptr) {
                 mResourceManagerProxy->markClientForPendingRemoval();
                 (new AMessage)->postReply(mReplyID);
                 mReplyID = 0;
+                mAsyncReleaseCompleteNotification = asyncNotify;
             }
 
             break;
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index cab4ebd..8bbffd4 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -175,16 +175,23 @@
 
 status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                                      int64_t timeUs, uint32_t flags) {
-    Mutex::Autolock autoLock(mMuxerLock);
-
     if (buffer.get() == NULL) {
         ALOGE("WriteSampleData() get an NULL buffer.");
         return -EINVAL;
     }
-
-    if (mState != STARTED) {
-        ALOGE("WriteSampleData() is called in invalid state %d", mState);
-        return INVALID_OPERATION;
+    {
+        /* As MediaMuxer's writeSampleData handles inputs from multiple tracks,
+         * limited the scope of mMuxerLock to this inner block so that the
+         * current track's buffer does not wait until the completion
+         * of processing of previous buffer of the same or another track.
+         * It's the responsibility of individual track - MediaAdapter object
+         * to gate its buffers.
+         */
+        Mutex::Autolock autoLock(mMuxerLock);
+        if (mState != STARTED) {
+            ALOGE("WriteSampleData() is called in invalid state %d", mState);
+            return INVALID_OPERATION;
+        }
     }
 
     if (trackIndex >= mTrackList.size()) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index ef69827..d67874f 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -2178,7 +2178,11 @@
     }
     // Check if offload is possible for given format, stream type, sample rate,
     // bit rate, duration, video and streaming
+#ifdef DISABLE_AUDIO_SYSTEM_OFFLOAD
+    return false;
+#else
     return AudioSystem::isOffloadSupported(info);
+#endif
 }
 
 HLSTime::HLSTime(const sp<AMessage>& meta) :
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dd2eed3..6571162 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -295,12 +295,12 @@
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
-            <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
-            <Limit name="size" min="2x2" max="512x512" />
+            <!-- profiles and levels:  ProfileMain : MainTierLevel3 -->
+            <Limit name="size" min="2x2" max="960x544" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="8x8" />
-            <Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
-            <Limit name="blocks-per-second" range="1-122880" />
+            <Limit name="block-count" range="1-8160" /> <!-- max 960x544 -->
+            <Limit name="blocks-per-second" range="1-244880" />
             <Limit name="frame-rate" range="1-120" />
             <Limit name="bitrate" range="1-10000000" />
             <Limit name="complexity" range="0-10"  default="0" />
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 32b2075..b63353c 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -2,6 +2,7 @@
     name: "libstagefright_flacdec",
     vendor_available: true,
     min_sdk_version: "29",
+    host_supported: true,
 
     srcs: [
         "FLACDecoder.cpp",
@@ -33,6 +34,13 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
     ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index cef0bc6..f5e9532 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -433,7 +433,7 @@
             if (mBuffer == nullptr) {
                 mBufferDataSize = 0;
                 mBufferLen = 0;
-                ALOGE("decodeOneFrame: failed to allocate memory for input buffer");
+                ALOGE("addDataToBuffer: failed to allocate memory for input buffer");
                 return NO_MEMORY;
             }
             mBufferLen = mBufferDataSize + inBufferLen;
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/libstagefright/foundation/tests/colorutils/Android.bp
new file mode 100644
index 0000000..d77f405
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/Android.bp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "ColorUtilsTest",
+    gtest: true,
+
+    srcs: [
+        "ColorUtilsTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libmediandk",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
new file mode 100644
index 0000000..0d802b4
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
@@ -0,0 +1,773 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtilsTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <stdio.h>
+
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+const size_t kHDRBufferSize = 25;
+const uint16_t kHDRInfoTestValue1 = 420;
+const uint16_t kHDRInfoTestValue2 = 42069;
+
+using namespace android;
+
+typedef ColorAspects CA;
+
+class ColorRangeTest : public ::testing::TestWithParam</* ColorRange */ CA::Range> {
+  public:
+    ColorRangeTest() { mRange = GetParam(); };
+
+    CA::Range mRange;
+};
+
+class ColorTransferTest : public ::testing::TestWithParam</* ColorTransfer */ CA::Transfer> {
+  public:
+    ColorTransferTest() { mTransfer = GetParam(); };
+
+    CA::Transfer mTransfer;
+};
+
+class ColorStandardTest : public ::testing::TestWithParam<std::pair<
+                                  /* Primaries */ CA::Primaries,
+                                  /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+  public:
+    ColorStandardTest() {
+        mPrimaries = GetParam().first;
+        mMatrixCoeffs = GetParam().second;
+    };
+
+    CA::Primaries mPrimaries;
+    CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class IsoToPlatformAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                         /* Primaries */ CA::Primaries,
+                                         /* Transfer */ CA::Transfer,
+                                         /* MatrixCoeffs */ CA::MatrixCoeffs,
+                                         /* Standard */ int32_t,
+                                         /* Transfer */ int32_t>> {
+  public:
+    IsoToPlatformAspectsTest() {
+        mPrimaries = std::get<0>(GetParam());
+        mTransfer = std::get<1>(GetParam());
+        mMatrixCoeffs = std::get<2>(GetParam());
+        mPlatformStandard = std::get<3>(GetParam());
+        mPlatformTransfer = std::get<4>(GetParam());
+    };
+
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    int32_t mPlatformStandard;
+    int32_t mPlatformTransfer;
+};
+
+class ColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                 /* Primaries */ CA::Primaries,
+                                 /* ColorTransfer */ CA::Transfer,
+                                 /* MatrixCoeffs */ CA::MatrixCoeffs,
+                                 /* ColorRange */ CA::Range,
+                                 /* ColorStandard */ CA::Standard>> {
+  public:
+    ColorAspectsTest() {
+        mPrimaries = std::get<0>(GetParam());
+        mTransfer = std::get<1>(GetParam());
+        mMatrixCoeffs = std::get<2>(GetParam());
+        mRange = std::get<3>(GetParam());
+        mStandard = std::get<4>(GetParam());
+    };
+
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    CA::Range mRange;
+    CA::Standard mStandard;
+};
+
+class DefaultColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                        /* Width */ int32_t,
+                                        /* Height */ int32_t,
+                                        /* Primaries */ CA::Primaries,
+                                        /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+  public:
+    DefaultColorAspectsTest() {
+        mWidth = std::get<0>(GetParam());
+        mHeight = std::get<1>(GetParam());
+        mPrimaries = std::get<2>(GetParam());
+        mMatrixCoeffs = std::get<3>(GetParam());
+    };
+
+    int32_t mWidth;
+    int32_t mHeight;
+    CA::Primaries mPrimaries;
+    CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class DataSpaceTest : public ::testing::TestWithParam<std::tuple<
+                              /* ColorRange */ CA::Range,
+                              /* Primaries */ CA::Primaries,
+                              /* ColorTransfer */ CA::Transfer,
+                              /* MatrixCoeffs */ CA::MatrixCoeffs,
+                              /* v0_android_dataspace */ android_dataspace,
+                              /* android_dataspace */ android_dataspace>> {
+  public:
+    DataSpaceTest() {
+        mRange = std::get<0>(GetParam());
+        mPrimaries = std::get<1>(GetParam());
+        mTransfer = std::get<2>(GetParam());
+        mMatrixCoeffs = std::get<3>(GetParam());
+        mDataSpaceV0 = std::get<4>(GetParam());
+        mDataSpace = std::get<5>(GetParam());
+    };
+
+    CA::Range mRange;
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    android_dataspace mDataSpaceV0;
+    android_dataspace mDataSpace;
+};
+
+TEST_P(ColorRangeTest, WrapColorRangeTest) {
+    int32_t range = ColorUtils::wrapColorAspectsIntoColorRange(mRange);
+    CA::Range unwrappedRange;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorRange(range, &unwrappedRange);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorRange failed";
+    EXPECT_EQ(unwrappedRange, mRange) << "Returned ColorRange doesn't match";
+    ALOGV("toString test: Range: %s", asString(mRange, "default"));
+}
+
+TEST_P(ColorTransferTest, WrapColorTransferTest) {
+    int32_t transfer = ColorUtils::wrapColorAspectsIntoColorTransfer(mTransfer);
+    CA::Transfer unwrappedTransfer;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorTransfer(transfer, &unwrappedTransfer);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorTransfer failed";
+    EXPECT_EQ(unwrappedTransfer, mTransfer) << "Returned ColorTransfer doesn't match";
+    ALOGV("toString test: Transfer: %s", asString(mTransfer, "default"));
+}
+
+TEST_P(ColorStandardTest, WrapColorStandardTest) {
+    int32_t standard = ColorUtils::wrapColorAspectsIntoColorStandard(mPrimaries, mMatrixCoeffs);
+    CA::Primaries unwrappedPrimaries;
+    CA::MatrixCoeffs unwrappedMatrixCoeffs;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorStandard(standard, &unwrappedPrimaries,
+                                                                      &unwrappedMatrixCoeffs);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorStandard failed";
+    EXPECT_EQ(unwrappedPrimaries, mPrimaries) << "Returned primaries doesn't match";
+    EXPECT_EQ(unwrappedMatrixCoeffs, mMatrixCoeffs) << "Returned  matrixCoeffs doesn't match";
+}
+
+TEST_P(ColorAspectsTest, PlatformAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t range = -1;
+    int32_t standard = -1;
+    int32_t transfer = -1;
+    status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(aspects, &range,
+                                                                            &standard, &transfer);
+    ASSERT_EQ(status, OK) << "Conversion of ColorAspects to PlatformAspects failed";
+
+    CA returnedAspects;
+    status = ColorUtils::convertPlatformColorAspectsToCodecAspects(range, standard, transfer,
+                                                                   returnedAspects);
+    ASSERT_EQ(status, OK) << "Conversion of PlatformAspects to ColorAspects failed";
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+            << "range mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+            << "primaries mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+            << "transfer mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+            << "matrixCoeffs mismatch for conversion between PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, IsoAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t primaries = -1;
+    int32_t colorTransfer = -1;
+    int32_t matrixCoeffs = -1;
+    bool fullRange = false;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &primaries, &colorTransfer,
+                                                     &matrixCoeffs, &fullRange);
+
+    CA returnedAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(primaries, colorTransfer, matrixCoeffs,
+                                                     fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+            << "range mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+            << "primaries mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+            << "transfer mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+            << "matrixCoeffs mismatch for conversion between IsoAspects";
+}
+
+TEST_P(IsoToPlatformAspectsTest, IsoAspectsToPlatformAspectsTest) {
+    CA aspects;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t isoPrimaries = -1;
+    int32_t isoTransfer = -1;
+    int32_t isoMatrixCoeffs = -1;
+    bool fullrange = false;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &isoPrimaries, &isoTransfer,
+                                                     &isoMatrixCoeffs, &fullrange);
+
+    int32_t range = -1;
+    int32_t standard = -1;
+    int32_t transfer = -1;
+    ColorUtils::convertIsoColorAspectsToPlatformAspects(isoPrimaries, isoTransfer, isoMatrixCoeffs,
+                                                        fullrange, &range, &standard, &transfer);
+    if (fullrange) {
+        EXPECT_EQ(range, ColorUtils::kColorRangeFull)
+                << "range incorrect converting to PlatformAspects";
+    }
+    EXPECT_EQ(standard, mPlatformStandard) << "standard incorrect converting to PlatformAspects";
+    EXPECT_EQ(transfer, mPlatformTransfer) << "transfer incorrect converting to PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, PackColorAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+    uint32_t packedColorAspects = ColorUtils::packToU32(aspects);
+
+    CA unpackedAspects = ColorUtils::unpackToColorAspects(packedColorAspects);
+    EXPECT_EQ(unpackedAspects.mRange, mRange) << "range mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mPrimaries, mPrimaries) << "primaries mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mTransfer, mTransfer) << "transfer mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mMatrixCoeffs, mMatrixCoeffs)
+            << "matrixCoeffs mismatch after unpacking";
+    ALOGV("toString test: Standard: %s", asString(mStandard, "default"));
+}
+
+TEST_P(DefaultColorAspectsTest, DefaultColorAspectsTest) {
+    CA aspects;
+    aspects.mRange = CA::RangeUnspecified;
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+    aspects.mTransfer = CA::TransferUnspecified;
+
+    ColorUtils::setDefaultCodecColorAspectsIfNeeded(aspects, mWidth, mHeight);
+    EXPECT_EQ(aspects.mRange, CA::RangeLimited) << "range not set to default";
+    EXPECT_EQ(aspects.mPrimaries, mPrimaries) << "primaries not set to default";
+    EXPECT_EQ(aspects.mMatrixCoeffs, mMatrixCoeffs) << "matrixCoeffs not set to default";
+    EXPECT_EQ(aspects.mTransfer, CA::TransferSMPTE170M) << "transfer not set to default";
+}
+
+TEST_P(DataSpaceTest, DataSpaceTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, false);
+    EXPECT_EQ(dataSpace, mDataSpace) << "Returned incorrect dataspace";
+
+    bool status = ColorUtils::convertDataSpaceToV0(dataSpace);
+    ASSERT_TRUE(status) << "Returned v0 dataspace is not aspect-only";
+    EXPECT_EQ(dataSpace, mDataSpaceV0) << "Returned incorrect v0 dataspace";
+}
+
+TEST(ColorUtilsUnitTest, AspectsChangedTest) {
+    CA origAspects;
+    origAspects.mRange = CA::Range::RangeFull;
+    origAspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+    origAspects.mTransfer = CA::Transfer::TransferLinear;
+    origAspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+    CA aspects;
+    aspects.mRange = CA::Range::RangeFull;
+    aspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+    aspects.mTransfer = CA::Transfer::TransferLinear;
+    aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+    bool status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_FALSE(status) << "ColorAspects comparison check failed";
+
+    aspects.mRange = CA::Range::RangeLimited;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mRange, CA::Range::RangeUnspecified) << "range should have been unspecified";
+    aspects.mRange = CA::Range::RangeFull;
+
+    aspects.mTransfer = CA::Transfer::TransferSRGB;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mTransfer, CA::Transfer::TransferUnspecified)
+            << "transfer should have been unspecified";
+    aspects.mTransfer = CA::Transfer::TransferLinear;
+
+    aspects.mPrimaries = CA::Primaries::PrimariesBT2020;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+            << "primaries should have been unspecified";
+    EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+            << "matrixCoeffs should have been unspecified";
+
+    aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixSMPTE240M;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+            << "primaries should have been unspecified";
+    EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+            << "matrixCoeffs should have been unspecified";
+}
+
+TEST(ColorUtilsUnitTest, ColorConfigFromFormatTest) {
+    int range = -1;
+    int standard = -1;
+    int transfer = -1;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+    ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+    EXPECT_EQ(range | standard | transfer, 0) << "color config didn't default to 0";
+
+    format->setInt32(KEY_COLOR_RANGE, CA::Range::RangeFull);
+    format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+    format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+    ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+
+    range = standard = transfer = -1;
+    sp<AMessage> copyFormat = new AMessage();
+    ASSERT_NE(copyFormat, nullptr) << "failed to create AMessage";
+    ColorUtils::copyColorConfig(format, copyFormat);
+    bool status = copyFormat->findInt32(KEY_COLOR_RANGE, &range);
+    ASSERT_TRUE(status) << "ColorConfig range entry missing";
+    status = copyFormat->findInt32(KEY_COLOR_STANDARD, &standard);
+    ASSERT_TRUE(status) << "ColorConfig standard entry missing";
+    status = copyFormat->findInt32(KEY_COLOR_TRANSFER, &transfer);
+    ASSERT_TRUE(status) << "ColorConfig transfer entry missing";
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatchd";
+
+    range = standard = transfer = -1;
+    ColorUtils::getColorConfigFromFormat(copyFormat, &range, &standard, &transfer);
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+}
+
+TEST_P(ColorAspectsTest, FormatTest) {
+    CA aspects;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+    ColorUtils::setColorAspectsIntoFormat(aspects, format, true);
+
+    CA returnedAspects;
+    ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+    ColorUtils::setColorAspectsIntoFormat(aspects, format);
+
+    memset(&returnedAspects, 0, sizeof(returnedAspects));
+    ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+}
+
+TEST(ColorUtilsUnitTest, HDRStaticInfoTest) {
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+    HDRStaticInfo returnedInfoHDR;
+    bool status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+    ASSERT_FALSE(status) << "HDR info should be absent in empty format";
+
+    HDRStaticInfo infoHDR;
+    infoHDR.sType1.mMaxDisplayLuminance = kHDRInfoTestValue2;
+    infoHDR.sType1.mMinDisplayLuminance = kHDRInfoTestValue1;
+    infoHDR.sType1.mMaxContentLightLevel = kHDRInfoTestValue2;
+    infoHDR.sType1.mMaxFrameAverageLightLevel = kHDRInfoTestValue1;
+    infoHDR.sType1.mR.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mR.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mG.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mG.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mB.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mB.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mW.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mW.y = kHDRInfoTestValue2;
+    ColorUtils::setHDRStaticInfoIntoFormat(infoHDR, format);
+
+    status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+    ASSERT_TRUE(status) << "Failed to get HDR info from format";
+    ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+
+    AMediaFormat *mediaFormat = AMediaFormat_new();
+    ASSERT_NE(mediaFormat, nullptr) << "Unable to create AMediaFormat";
+    ColorUtils::setHDRStaticInfoIntoAMediaFormat(infoHDR, mediaFormat);
+    memset(&returnedInfoHDR, 0, sizeof(returnedInfoHDR));
+    status = ColorUtils::getHDRStaticInfoFromFormat(mediaFormat->mFormat, &returnedInfoHDR);
+    AMediaFormat_delete(mediaFormat);
+    ASSERT_TRUE(status) << "Failed to get HDR info from media format";
+    ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+}
+
+TEST(ColorUtilsUnitTest, SanityTest) {
+    CA::Primaries unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+    CA::MatrixCoeffs unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixOther + 1);
+    int32_t colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, CA::MatrixUnspecified);
+    EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+            << "Standard unspecified expected";
+    colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesOther, unmappedMatrixCoeffs);
+    EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+            << "Standard unspecified expected";
+    colorStandard = ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesBT601_6_525,
+                                                                  CA::MatrixBT2020);
+    EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+            << "Standard greater than extended start expected";
+    unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesBT2020 + 1);
+    unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixBT2020Constant + 1);
+    colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, unmappedMatrixCoeffs);
+    EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+            << "Standard greater than extended start expected";
+
+    CA aspects;
+    int32_t colorRange = -1;
+    colorStandard = -1;
+    int32_t colorTransfer = -1;
+    aspects.mPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+    status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(
+            aspects, &colorRange, &colorStandard, &colorTransfer);
+    EXPECT_NE(status, OK) << "invalid colorAspects value accepted";
+
+    int32_t colorPrimaries = -1;
+    colorTransfer = -1;
+    int32_t colorMatrixCoeffs = -1;
+    bool fullRange = false;
+    aspects.mPrimaries = CA::PrimariesOther;
+    aspects.mTransfer = CA::TransferOther;
+    aspects.mMatrixCoeffs = CA::MatrixOther;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &colorPrimaries, &colorTransfer,
+                                                     &colorMatrixCoeffs, &fullRange);
+    CA returnedAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+                                                     colorMatrixCoeffs, fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+            << "expected unspecified Primaries";
+    EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+            << "expected unspecified Transfer";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+            << "expected unspecified MatrixCoeffs";
+
+    // invalid values, other value equals 0xFF
+    colorPrimaries = CA::PrimariesOther;
+    colorTransfer = CA::TransferOther;
+    colorMatrixCoeffs = CA::MatrixOther;
+    fullRange = false;
+    memset(&returnedAspects, 0, sizeof(returnedAspects));
+    ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+                                                     colorMatrixCoeffs, fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+            << "expected unspecified Primaries";
+    EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+            << "expected unspecified Transfer";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+            << "expected unspecified MatrixCoeffs";
+
+    CA::Primaries primaries = CA::PrimariesUnspecified;
+    CA::MatrixCoeffs matrixCoeffs = CA::MatrixUnspecified;
+    status = ColorUtils::unwrapColorAspectsFromColorStandard(ColorUtils::kColorStandardVendorStart,
+                                                             &primaries, &matrixCoeffs);
+    EXPECT_EQ(status, OK) << "unwrapping aspects from color standard failed";
+
+    primaries = CA::PrimariesUnspecified;
+    matrixCoeffs = CA::MatrixUnspecified;
+    status = ColorUtils::unwrapColorAspectsFromColorStandard(
+            ColorUtils::kColorStandardVendorStart * 4, &primaries, &matrixCoeffs);
+    EXPECT_NE(status, OK) << "unwrapping aspects from color standard failed";
+
+    colorRange = ColorUtils::wrapColorAspectsIntoColorRange((CA::Range)(CA::RangeOther + 1));
+    EXPECT_EQ(colorRange, ColorUtils::kColorRangeUnspecified) << "expected unspecified color range";
+
+    CA::Range range;
+    status = ColorUtils::unwrapColorAspectsFromColorRange(
+            ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1, &range);
+    EXPECT_NE(status, OK) << "invalid range value accepted";
+    EXPECT_EQ(range, CA::RangeOther) << "returned unexpected range value";
+
+    colorTransfer =
+            ColorUtils::wrapColorAspectsIntoColorTransfer((CA::Transfer)(CA::TransferOther + 1));
+    EXPECT_EQ(colorTransfer, ColorUtils::kColorTransferUnspecified)
+            << "expected unspecified color transfer";
+
+    CA::Transfer transfer;
+    status = ColorUtils::unwrapColorAspectsFromColorTransfer(
+            ColorUtils::kColorTransferVendorStart + CA::TransferOther + 1, &transfer);
+    EXPECT_NE(status, OK) << "invalid transfer value accepted";
+    EXPECT_EQ(transfer, CA::TransferOther) << "expected other color transfer";
+}
+
+TEST(ColorUtilsUnitTest, HDRInfoSanityTest) {
+    HDRStaticInfo hdrInfo;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+    bool boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "HDRStaticInfo should not be present";
+
+    sp<ABuffer> invalidSizeHDRInfoBuffer = new ABuffer(kHDRBufferSize - 1);
+    ASSERT_NE(invalidSizeHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+    format->setBuffer(KEY_HDR_STATIC_INFO, invalidSizeHDRInfoBuffer);
+    memset(&hdrInfo, 0, sizeof(hdrInfo));
+    boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+    sp<ABuffer> invalidHDRInfoBuffer = new ABuffer(kHDRBufferSize);
+    ASSERT_NE(invalidHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+    uint8_t *data = invalidHDRInfoBuffer->data();
+    *data = HDRStaticInfo::kType1 + 1;
+    format->setBuffer(KEY_HDR_STATIC_INFO, invalidHDRInfoBuffer);
+    memset(&hdrInfo, 0, sizeof(hdrInfo));
+    boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+    CA aspects;
+    format->setInt32(KEY_COLOR_RANGE, ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1);
+    format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+    format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+    ColorUtils::getColorAspectsFromFormat(format, aspects);
+    EXPECT_EQ(aspects.mRange, CA::RangeOther) << "unexpected range";
+}
+
+TEST(ColorUtilsUnitTest, DataSpaceSanityTest) {
+    CA aspects;
+    aspects.mRange = CA::RangeUnspecified;
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+    aspects.mTransfer = CA::TransferUnspecified;
+    android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+    EXPECT_EQ(dataSpace, 0) << "expected invalid dataspace";
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixBT2020Constant;
+    dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+    EXPECT_NE(dataSpace, 0) << "unexpected value";
+}
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorRangeTest,
+                         ::testing::Values(
+                                 // ColorRange
+                                 CA::Range::RangeLimited, CA::Range::RangeFull,
+                                 CA::Range::RangeUnspecified, CA::Range::RangeOther));
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorTransferTest,
+                         ::testing::Values(
+                                 // ColorTransfer
+                                 CA::Transfer::TransferUnspecified, CA::Transfer::TransferLinear,
+                                 CA::Transfer::TransferSRGB, CA::Transfer::TransferSMPTE170M,
+                                 CA::Transfer::TransferGamma22, CA::Transfer::TransferGamma28,
+                                 CA::Transfer::TransferST2084, CA::Transfer::TransferHLG,
+                                 CA::Transfer::TransferSMPTE240M, CA::Transfer::TransferXvYCC,
+                                 CA::Transfer::TransferBT1361, CA::Transfer::TransferST428,
+                                 CA::Transfer::TransferOther));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, ColorStandardTest,
+        ::testing::Values(
+                // Primaries, MatrixCoeffs
+                std::make_pair(CA::Primaries::PrimariesUnspecified,
+                               CA::MatrixCoeffs::MatrixUnspecified),
+                std::make_pair(CA::Primaries::PrimariesBT709_5,
+                               CA::MatrixCoeffs::MatrixBT709_5),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+                               CA::MatrixCoeffs::MatrixBT601_6),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+                               CA::MatrixCoeffs::MatrixBT709_5),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+                               CA::MatrixCoeffs::MatrixBT601_6),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+                               CA::MatrixCoeffs::MatrixSMPTE240M),
+                std::make_pair(CA::Primaries::PrimariesBT2020,
+                               CA::MatrixCoeffs::MatrixBT2020),
+                std::make_pair(CA::Primaries::PrimariesBT2020,
+                               CA::MatrixCoeffs::MatrixBT2020Constant),
+                std::make_pair(CA::Primaries::PrimariesBT470_6M,
+                               CA::MatrixCoeffs::MatrixBT470_6M),
+                std::make_pair(CA::Primaries::PrimariesGenericFilm,
+                               CA::MatrixCoeffs::MatrixBT2020)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, ColorAspectsTest,
+        ::testing::Values(
+                // Primaries, ColorTransfer, MatrixCoeffs, ColorRange, ColorStandard
+                std::make_tuple(CA::Primaries::PrimariesUnspecified,
+                                CA::Transfer::TransferUnspecified,
+                                CA::MatrixCoeffs::MatrixUnspecified, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT709_5, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT709_5, CA::Range::RangeFull,
+                                CA::Standard::StandardBT709),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_625, CA::Transfer::TransferSRGB,
+                                CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+                                CA::Range::RangeFull, CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma22,
+                                CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma28,
+                                CA::MatrixCoeffs::MatrixSMPTE240M, CA::Range::RangeFull,
+                                CA::Standard::StandardBT470M),
+                std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferST2084,
+                                CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_525),
+                std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferHLG,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_525),
+                std::make_tuple(CA::Primaries::PrimariesBT470_6M, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT470_6M, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesGenericFilm, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_625)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, IsoToPlatformAspectsTest,
+        ::testing::Values(
+                // Primaries, Transfer, MatrixCoeffs, Standard, Transfer
+                std::make_tuple(CA::PrimariesUnspecified, CA::TransferUnspecified,
+                                CA::MatrixUnspecified, ColorUtils::kColorStandardUnspecified,
+                                ColorUtils::kColorTransferUnspecified),
+                std::make_tuple(CA::PrimariesBT709_5, CA::TransferLinear, CA::MatrixBT709_5,
+                                ColorUtils::kColorStandardBT709, ColorUtils::kColorTransferLinear),
+                std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSRGB, CA::MatrixBT601_6,
+                                ColorUtils::kColorStandardBT601_625,
+                                ColorUtils::kColorTransferSRGB),
+                std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSMPTE170M, CA::MatrixBT709_5,
+                                ColorUtils::kColorStandardBT601_625_Unadjusted,
+                                ColorUtils::kColorTransferSMPTE_170M),
+                std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma22, CA::MatrixBT601_6,
+                                ColorUtils::kColorStandardBT601_525,
+                                ColorUtils::kColorTransferGamma22),
+                std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma28, CA::MatrixSMPTE240M,
+                                ColorUtils::kColorStandardBT601_525_Unadjusted,
+                                ColorUtils::kColorTransferGamma28),
+                std::make_tuple(CA::PrimariesBT2020, CA::TransferST2084, CA::MatrixBT2020,
+                                ColorUtils::kColorStandardBT2020, ColorUtils::kColorTransferST2084),
+                std::make_tuple(CA::PrimariesBT2020, CA::TransferHLG, CA::MatrixBT2020Constant,
+                                ColorUtils::kColorStandardBT2020Constant,
+                                ColorUtils::kColorTransferHLG),
+                std::make_tuple(CA::PrimariesBT470_6M, CA::TransferUnspecified, CA::MatrixBT470_6M,
+                                ColorUtils::kColorStandardBT470M,
+                                ColorUtils::kColorTransferUnspecified),
+                std::make_tuple(CA::PrimariesGenericFilm, CA::TransferLinear, CA::MatrixBT2020,
+                                ColorUtils::kColorStandardFilm, ColorUtils::kColorTransferLinear)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, DefaultColorAspectsTest,
+        ::testing::Values(
+                // Width, Height, Primaries, MatrixCoeffs
+                std::make_tuple(3840, 3840, CA::PrimariesBT2020, CA::MatrixBT2020),
+                std::make_tuple(720, 576, CA::PrimariesBT601_6_625, CA::MatrixBT601_6),
+                std::make_tuple(480, 360, CA::PrimariesBT601_6_525, CA::MatrixBT601_6),
+                std::make_tuple(480, 1920, CA::PrimariesBT709_5, CA::MatrixBT709_5)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, DataSpaceTest,
+        ::testing::Values(
+                // ColorRange, Primaries, ColorTransfer, MatrixCoeffs, v0_android_dataspace,
+                // android_dataspace
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSRGB, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_SRGB, HAL_DATASPACE_SRGB),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferLinear, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_SRGB_LINEAR, HAL_DATASPACE_SRGB_LINEAR),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_JFIF, HAL_DATASPACE_JFIF),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixSMPTE240M,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT2020,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525)));
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index a1fe57c..2582ed0 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -27,6 +27,7 @@
 #include <media/stagefright/foundation/AHandlerReflector.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <mutex>
+#include <queue>
 
 namespace android {
 
@@ -45,7 +46,7 @@
 
     // Returns INVALID_OPERATION if there is no source or track.
     virtual status_t start(MetaData *param = NULL);
-    virtual status_t stop() { return reset(); }
+    virtual status_t stop();
     virtual status_t pause();
     virtual bool reachedEOS();
     virtual status_t dump(int fd, const Vector<String16>& args);
@@ -125,12 +126,19 @@
     bool mWriteSeekErr;
     bool mFallocateErr;
     bool mPreAllocationEnabled;
+    status_t mResetStatus;
+    // Queue to hold top long write durations
+    std::priority_queue<std::chrono::microseconds, std::vector<std::chrono::microseconds>,
+                        std::greater<std::chrono::microseconds>> mWriteDurationPQ;
+    const uint8_t kWriteDurationsCount = 5;
 
     sp<ALooper> mLooper;
     sp<AHandlerReflector<MPEG4Writer> > mReflector;
 
     Mutex mLock;
+    // Serialize reset calls from client of MPEG4Writer and MP4WtrCtrlHlpLooper.
     std::mutex mResetMutex;
+    // Serialize preallocation calls from different track threads.
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
@@ -149,6 +157,7 @@
     int64_t estimateMoovBoxSize(int32_t bitRate);
     int64_t estimateFileLevelMetaSize(MetaData *params);
     void writeCachedBoxToFile(const char *type);
+    void printWriteDurations();
 
     struct Chunk {
         Track               *mTrack;        // Owner
@@ -298,7 +307,7 @@
     void writeGeoDataBox();
     void writeLatitude(int degreex10000);
     void writeLongitude(int degreex10000);
-    void finishCurrentSession();
+    status_t finishCurrentSession();
 
     void addDeviceMeta();
     void writeHdlr(const char *handlerType);
@@ -331,7 +340,7 @@
     void sendSessionSummary();
     status_t release();
     status_t switchFd();
-    status_t reset(bool stopSource = true);
+    status_t reset(bool stopSource = true, bool waitForAnyPreviousCallToComplete = true);
 
     static uint32_t getMpeg4Time();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 177a9e9..c7d7765 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -58,6 +58,7 @@
 
 private:
     Mutex mAdapterLock;
+    std::mutex mBufferGatingMutex;
     // Make sure the read() wait for the incoming buffer.
     Condition mBufferReadCond;
     // Make sure the pushBuffer() wait for the current buffer consumed.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 5f5d88a..1f8e780 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -146,7 +146,7 @@
     // object.
     status_t release();
 
-    status_t releaseAsync();
+    status_t releaseAsync(const sp<AMessage> &notify);
 
     status_t flush();
 
@@ -390,6 +390,7 @@
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
     sp<AMessage> mOnFrameRenderedNotification;
+    sp<AMessage> mAsyncReleaseCompleteNotification;
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index fbb2d0c..1b71a2b 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -1,12 +1,11 @@
-cc_library_static {
-    name: "libstagefright_mpeg2support",
+cc_defaults {
+    name: "libstagefright_mpeg2support_defaults",
 
     srcs: [
         "AnotherPacketSource.cpp",
         "ATSParser.cpp",
         "CasManager.cpp",
         "ESQueue.cpp",
-        "HlsSampleDecryptor.cpp",
     ],
 
     include_dirs: [
@@ -28,7 +27,6 @@
     },
 
     shared_libs: [
-        "libcrypto",
         "libhidlmemory",
         "android.hardware.cas.native@1.0",
         "android.hidl.memory@1.0",
@@ -36,9 +34,10 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
         "libaudioclient_headers",
         "media_ndk_headers",
+        "libstagefright_foundation_headers",
     ],
 
     export_include_dirs: ["."],
@@ -49,3 +48,30 @@
 
     min_sdk_version: "29",
 }
+
+
+cc_library_static {
+    name: "libstagefright_mpeg2support",
+    defaults: [
+        "libstagefright_mpeg2support_defaults",
+    ],
+    cflags: [
+        "-DENABLE_CRYPTO",
+    ],
+    shared_libs: [
+        "libcrypto",
+    ],
+    srcs: [
+        "HlsSampleDecryptor.cpp",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_mpeg2support_nocrypto",
+    defaults: [
+        "libstagefright_mpeg2support_defaults",
+    ],
+    apex_available: [
+        "com.android.media",
+    ],
+}
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index e751a3e..801dba1 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -36,7 +36,7 @@
 #include <inttypes.h>
 #include <netinet/in.h>
 
-#ifndef __ANDROID_APEX__
+#ifdef ENABLE_CRYPTO
 #include "HlsSampleDecryptor.h"
 #endif
 
@@ -55,10 +55,10 @@
     // Create the decryptor anyway since we don't know the use-case unless key is provided
     // Won't decrypt if key info not available (e.g., scanner/extractor just parsing ts files)
     mSampleDecryptor = isSampleEncrypted() ?
-#ifdef __ANDROID_APEX__
-        new SampleDecryptor
-#else
+#ifdef ENABLE_CRYPTO
         new HlsSampleDecryptor
+#else
+        new SampleDecryptor
 #endif
         : NULL;
 }
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index f1c599f..4c0add4 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -49,8 +49,32 @@
     int32_t channelCount;
 };
 
+enum inputId {
+    // audio streams
+    AAC_1,
+    AAC_ADTS_1,
+    AMR_NB_1,
+    AMR_WB_1,
+    FLAC_1,
+    OPUS_1,
+    VORBIS_1,
+    // video streams
+    AV1_1,
+    AVC_1,
+    H263_1,
+    HEVC_1,
+    MPEG4_1,
+    VP8_1,
+    VP9_1,
+    // heif stream
+    HEIC_1,
+    UNUSED_ID,
+    UNKNOWN_ID,
+};
+
 // LookUpTable of clips and metadata for component testing
 static const struct InputData {
+    inputId inpId;
     const char *mime;
     string inputFile;
     string info;
@@ -58,61 +82,67 @@
     int32_t secondParam;
     bool isAudio;
 } kInputData[] = {
-        {MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
-         "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AAC, "bbb_aac_stereo_128kbps_48000hz.aac",
+        {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "bbb_aac_stereo_128kbps_48000hz.aac",
          "bbb_aac_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
+        {AAC_ADTS_1, MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
          "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+        {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
          "sine_amrnb_1ch_12kbps_8000hz.info", 8000, 1, true},
-        {MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+        {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
          "bbb_amrwb_1ch_14kbps_16000hz.info", 16000, 1, true},
-        {MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
-         "bbb_vorbis_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
+        {FLAC_1, MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
          "bbb_flac_stereo_680kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
-         "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
-         "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_176x144_300kbps_60fps.h264",
+        {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
+         "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
+        {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+         "bbb_vorbis_stereo_128kbps_48000hz.info", 48000, 2, true},
+
+        {AV1_1, MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144,
+         false},
+        {AVC_1, MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_176x144_300kbps_60fps.h264",
          "bbb_avc_176x144_300kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_176x144_176kbps_60fps.hevc",
-         "bbb_hevc_176x144_176kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
+        {H263_1, MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
          "bbb_h263_352x288_300kbps_12fps.info", 352, 288, false},
-        {MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
+        {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_176x144_176kbps_60fps.hevc",
+         "bbb_hevc_176x144_176kbps_60fps.info", 176, 144, false},
+        {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
          "bbb_mpeg4_352x288_512kbps_30fps.info", 352, 288, false},
+        {VP8_1, MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
+         "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
+        {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
+         "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
+
+        {HEIC_1, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "bbb_hevc_176x144_176kbps_60fps.hevc",
+         "bbb_heic_176x144_176kbps_60fps.info", 176, 144, false},
 };
 
 class WriterTest {
   public:
-    WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+    WriterTest() : mWriter(nullptr), mFileMeta(nullptr) {}
 
     ~WriterTest() {
         if (mFileMeta) {
             mFileMeta.clear();
             mFileMeta = nullptr;
         }
-        if (mCurrentTrack) {
-            mCurrentTrack->stop();
-            mCurrentTrack.clear();
-            mCurrentTrack = nullptr;
-        }
         if (mWriter) {
             mWriter.clear();
             mWriter = nullptr;
         }
-        mBufferInfo.clear();
-        if (mInputStream.is_open()) mInputStream.close();
         if (gEnv->cleanUp()) remove(OUTPUT_FILE_NAME);
+
+        for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+            mBufferInfo[idx].clear();
+            if (mCurrentTrack[idx]) {
+                mCurrentTrack[idx]->stop();
+                mCurrentTrack[idx].clear();
+                mCurrentTrack[idx] = nullptr;
+            }
+            if (mInputStream[idx].is_open()) mInputStream[idx].close();
+        }
     }
 
     void setupWriterType(string writerFormat) {
-        mNumCsds = 0;
-        mInputFrameId = 0;
         mWriterName = unknown_comp;
         mDisableTest = false;
         static const std::map<std::string, standardWriters> mapWriter = {
@@ -128,11 +158,11 @@
         }
     }
 
-    void getInputBufferInfo(string inputFileName, string inputInfo);
+    void getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx = 0);
 
     int32_t createWriter(int32_t fd);
 
-    int32_t addWriterSource(bool isAudio, configFormat params);
+    int32_t addWriterSource(bool isAudio, configFormat params, int32_t idx = 0);
 
     enum standardWriters {
         OGG,
@@ -149,38 +179,42 @@
     standardWriters mWriterName;
     sp<MediaWriter> mWriter;
     sp<MetaData> mFileMeta;
-    sp<MediaAdapter> mCurrentTrack;
+    sp<MediaAdapter> mCurrentTrack[kMaxTrackCount]{};
 
     bool mDisableTest;
-    int32_t mNumCsds;
-    int32_t mInputFrameId;
-    ifstream mInputStream;
-    vector<BufferInfo> mBufferInfo;
+    int32_t mNumCsds[kMaxTrackCount]{};
+    int32_t mInputFrameId[kMaxTrackCount]{};
+    ifstream mInputStream[kMaxTrackCount]{};
+    vector<BufferInfo> mBufferInfo[kMaxTrackCount];
 };
 
-class WriteFunctionalityTest : public WriterTest,
-                               public ::testing::TestWithParam<pair<string, int32_t>> {
+class WriteFunctionalityTest
+    : public WriterTest,
+      public ::testing::TestWithParam<tuple<string /* writerFormat*/, inputId /* inputId0*/,
+                                            inputId /* inputId1*/, float /* BufferInterval*/>> {
   public:
-    virtual void SetUp() override { setupWriterType(GetParam().first); }
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
 };
 
-void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
+void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx) {
     std::ifstream eleInfo;
     eleInfo.open(inputInfo.c_str());
     ASSERT_EQ(eleInfo.is_open(), true);
     int32_t bytesCount = 0;
     uint32_t flags = 0;
     int64_t timestamp = 0;
+    int32_t numCsds = 0;
     while (1) {
         if (!(eleInfo >> bytesCount)) break;
         eleInfo >> flags;
         eleInfo >> timestamp;
-        mBufferInfo.push_back({bytesCount, flags, timestamp});
-        if (flags == CODEC_CONFIG_FLAG) mNumCsds++;
+        mBufferInfo[idx].push_back({bytesCount, flags, timestamp});
+        if (flags == CODEC_CONFIG_FLAG) numCsds++;
     }
     eleInfo.close();
-    mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
-    ASSERT_EQ(mInputStream.is_open(), true);
+    mNumCsds[idx] = numCsds;
+    mInputStream[idx].open(inputFileName.c_str(), std::ifstream::binary);
+    ASSERT_EQ(mInputStream[idx].is_open(), true);
 }
 
 int32_t WriterTest::createWriter(int32_t fd) {
@@ -226,10 +260,10 @@
     return 0;
 }
 
-int32_t WriterTest::addWriterSource(bool isAudio, configFormat params) {
-    if (mInputFrameId) return -1;
+int32_t WriterTest::addWriterSource(bool isAudio, configFormat params, int32_t idx) {
+    if (mInputFrameId[idx]) return -1;
     sp<AMessage> format = new AMessage;
-    if (mInputStream.is_open()) {
+    if (mInputStream[idx].is_open()) {
         format->setString("mime", params.mime);
         if (isAudio) {
             format->setInt32("channel-count", params.channelCount);
@@ -238,25 +272,34 @@
             format->setInt32("width", params.width);
             format->setInt32("height", params.height);
         }
-
-        int32_t status =
-                writeHeaderBuffers(mInputStream, mBufferInfo, mInputFrameId, format, mNumCsds);
-        if (status != 0) return -1;
+        if (mNumCsds[idx]) {
+            int32_t status = writeHeaderBuffers(mInputStream[idx], mBufferInfo[idx],
+                                                mInputFrameId[idx], format, mNumCsds[idx]);
+            if (status != 0) return -1;
+        }
     }
+
     sp<MetaData> trackMeta = new MetaData;
     convertMessageToMetaData(format, trackMeta);
-    mCurrentTrack = new MediaAdapter(trackMeta);
-    if (mCurrentTrack == nullptr) {
+    mCurrentTrack[idx] = new MediaAdapter(trackMeta);
+    if (mCurrentTrack[idx] == nullptr) {
         ALOGE("MediaAdapter returned nullptr");
         return -1;
     }
-    status_t result = mWriter->addSource(mCurrentTrack);
+    status_t result = mWriter->addSource(mCurrentTrack[idx]);
     return result;
 }
 
 void getFileDetails(string &inputFilePath, string &info, configFormat &params, bool &isAudio,
-                    int32_t streamIndex = 0) {
-    if (streamIndex >= sizeof(kInputData) / sizeof(kInputData[0])) {
+                    inputId inpId) {
+    int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+    int32_t streamIndex = 0;
+    for (; streamIndex < inputDataSize; streamIndex++) {
+        if (inpId == kInputData[streamIndex].inpId) {
+            break;
+        }
+    }
+    if (streamIndex == inputDataSize) {
         return;
     }
     inputFilePath += kInputData[streamIndex].inputFile;
@@ -284,14 +327,14 @@
 
     // Creating writer within a test scope. Destructor should be called when the test ends
     ASSERT_EQ((status_t)OK, createWriter(fd))
-            << "Failed to create writer for output format:" << GetParam().first;
+            << "Failed to create writer for output format:" << get<0>(GetParam());
 }
 
 TEST_P(WriteFunctionalityTest, WriterTest) {
     if (mDisableTest) return;
     ALOGV("Checks if for a given input, a valid muxed file has been created or not");
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -300,25 +343,49 @@
     int32_t status = createWriter(fd);
     ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
 
-    string inputFile = gEnv->getRes();
-    string inputInfo = gEnv->getRes();
-    configFormat param;
-    bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
-    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+    inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+    ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
 
-    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
-    status = addWriterSource(isAudio, param);
-    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    int32_t numTracks = 1;
+    if (inpId[1] != UNUSED_ID) {
+        numTracks++;
+    }
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        string inputFile = gEnv->getRes();
+        string inputInfo = gEnv->getRes();
+        configFormat param;
+        bool isAudio;
+        getFileDetails(inputFile, inputInfo, param, isAudio, inpId[idx]);
+        ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+        ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+        status = addWriterSource(isAudio, param, idx);
+        ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    }
 
     status = mWriter->start(mFileMeta.get());
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size());
-    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
-    mCurrentTrack->stop();
+    float interval = get<3>(GetParam());
+    ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
 
+    size_t range = 0;
+    int32_t loopCount = 0;
+    int32_t offset[kMaxTrackCount]{};
+    while (loopCount < ceil(1.0 / interval)) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            range = mBufferInfo[idx].size() * interval;
+            status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+                                         mCurrentTrack[idx], offset[idx], range);
+            ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+            offset[idx] += range;
+        }
+        loopCount++;
+    }
+    for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+        if (mCurrentTrack[idx]) {
+            mCurrentTrack[idx]->stop();
+        }
+    }
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
@@ -328,7 +395,7 @@
     if (mDisableTest) return;
     ALOGV("Validates the pause() api of writers");
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -341,8 +408,10 @@
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    inputId inpId = get<1>(GetParam());
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
     ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
     ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -351,8 +420,8 @@
 
     status = mWriter->start(mFileMeta.get());
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size() / 4);
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     bool isPaused = false;
@@ -362,19 +431,19 @@
         isPaused = true;
     }
     // In the pause state, writers shouldn't write anything. Testing the writers for the same
-    int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                                  mInputFrameId, numFramesPaused, isPaused);
+    int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], numFramesPaused, isPaused);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     if (isPaused) {
         status = mWriter->start(mFileMeta.get());
         ASSERT_EQ((status_t)OK, status);
     }
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                                  mInputFrameId, mBufferInfo.size());
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
-    mCurrentTrack->stop();
+    mCurrentTrack[0]->stop();
 
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
@@ -392,7 +461,7 @@
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
     ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     int32_t status = createWriter(fd);
     ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
 
@@ -400,8 +469,10 @@
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    inputId inpId = get<1>(GetParam());
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
     ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
     ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -418,8 +489,8 @@
         mWriter->start(mFileMeta.get());
     }
 
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                              mBufferInfo.size() / 4);
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     for (int32_t count = 0; count < kMaxCount; count++) {
@@ -428,20 +499,20 @@
     }
 
     mWriter->pause();
-    int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                              mInputFrameId, numFramesPaused, true);
+    int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], numFramesPaused, true);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     for (int32_t count = 0; count < kMaxCount; count++) {
         mWriter->start(mFileMeta.get());
     }
 
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                              mInputFrameId, mBufferInfo.size());
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
-    mCurrentTrack->stop();
+    mCurrentTrack[0]->stop();
 
     // first stop should succeed.
     status = mWriter->stop();
@@ -454,15 +525,13 @@
     close(fd);
 }
 
-class ListenerTest : public WriterTest,
-                     public ::testing::TestWithParam<
-                             tuple<string /* writerFormat*/, int32_t /* inputFileIdx*/,
-                                   float /* FileSizeLimit*/, float /* FileDurationLimit*/>> {
+class ListenerTest
+    : public WriterTest,
+      public ::testing::TestWithParam<tuple<
+              string /* writerFormat*/, inputId /* inputId0*/, inputId /* inputId1*/,
+              float /* FileSizeLimit*/, float /* FileDurationLimit*/, float /* BufferInterval*/>> {
   public:
-    virtual void SetUp() override {
-        tuple<string, int32_t, float, float> params = GetParam();
-        setupWriterType(get<0>(params));
-    }
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
 };
 
 TEST_P(ListenerTest, SetMaxFileLimitsTest) {
@@ -470,8 +539,7 @@
     if (mDisableTest || mWriterName != MPEG4) return;
     ALOGV("Validates writer when max file limits are set");
 
-    tuple<string, int32_t, float, float> params = GetParam();
-    string writerFormat = get<0>(params);
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -480,30 +548,42 @@
     int32_t status = createWriter(fd);
     ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
 
-    string inputFile = gEnv->getRes();
-    string inputInfo = gEnv->getRes();
-    configFormat param;
-    bool isAudio;
-    int32_t inputFileIdx = get<1>(params);
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
-    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+    inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+    ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
 
-    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
-    status = addWriterSource(isAudio, param);
-    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    size_t inputFileSize = 0;
+    int64_t lastFrameTimeStampUs = INT_MAX;
+    int32_t numTracks = 1;
+    if (inpId[1] != UNUSED_ID) {
+        numTracks++;
+    }
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        string inputFile = gEnv->getRes();
+        string inputInfo = gEnv->getRes();
+        configFormat param;
+        bool isAudio;
+        getFileDetails(inputFile, inputInfo, param, isAudio, inpId[idx]);
+        ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
-    // Read file properties
-    struct stat buf;
-    status = stat(inputFile.c_str(), &buf);
-    ASSERT_EQ(0, status);
+        ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+        status = addWriterSource(isAudio, param, idx);
+        ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
 
-    float fileSizeLimit = get<2>(params);
-    float fileDurationLimit = get<3>(params);
+        // Read file properties
+        struct stat buf;
+        status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(0, status);
+
+        inputFileSize += buf.st_size;
+        if (lastFrameTimeStampUs > mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs) {
+            lastFrameTimeStampUs = mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs;
+        }
+    }
+
+    float fileSizeLimit = get<3>(GetParam());
+    float fileDurationLimit = get<4>(GetParam());
     int64_t maxFileSize = 0;
     int64_t maxFileDuration = 0;
-
-    size_t inputFileSize = buf.st_size;
-    int64_t lastFrameTimeStampUs = mBufferInfo[mBufferInfo.size() - 1].timeUs;
     if (fileSizeLimit > 0) {
         maxFileSize = (int64_t)(fileSizeLimit * inputFileSize);
         mWriter->setMaxFileSize(maxFileSize);
@@ -518,14 +598,33 @@
 
     mWriter->setListener(listener);
     status = mWriter->start(mFileMeta.get());
-
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size(), false, listener);
-    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+    float interval = get<5>(GetParam());
+    ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
+
+    size_t range = 0;
+    int32_t loopCount = 0;
+    int32_t offset[kMaxTrackCount]{};
+    while (loopCount < ceil(1.0 / interval)) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            range = mBufferInfo[idx].size() * interval;
+            status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+                                         mCurrentTrack[idx], offset[idx], range, false, listener);
+            ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+            offset[idx] += range;
+        }
+        loopCount++;
+    }
+
     ASSERT_TRUE(mWriter->reachedEOS()) << "EOS not signalled.";
 
-    mCurrentTrack->stop();
+    for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+        if (mCurrentTrack[idx]) {
+            mCurrentTrack[idx]->stop();
+        }
+    }
+
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
@@ -552,23 +651,57 @@
 
 // TODO: (b/150923387)
 // Add WEBM input
-INSTANTIATE_TEST_SUITE_P(
-        ListenerTestAll, ListenerTest,
-        ::testing::Values(make_tuple("ogg", 0, 0.7, 0.3), make_tuple("aac", 1, 0.6, 0.7),
-                          make_tuple("mpeg4", 1, 0.4, 0.3), make_tuple("amrnb", 3, 0.2, 0.6),
-                          make_tuple("amrwb", 4, 0.5, 0.5), make_tuple("mpeg2Ts", 1, 0.2, 1)));
+INSTANTIATE_TEST_SUITE_P(ListenerTestAll, ListenerTest,
+                         ::testing::Values(make_tuple("aac", AAC_1, UNUSED_ID, 0.6, 0.7, 1),
+                                           make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 0.2, 0.6, 1),
+                                           make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 0.5, 0.5, 1),
+                                           make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 0.2, 1, 1),
+                                           make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.4, 0.3, 0.25),
+                                           make_tuple("ogg", OPUS_1, UNUSED_ID, 0.7, 0.3, 1)));
 
 // TODO: (b/144476164)
 // Add AAC_ADTS, FLAC, AV1 input
-INSTANTIATE_TEST_SUITE_P(WriterTestAll, WriteFunctionalityTest,
-                         ::testing::Values(make_pair("ogg", 0), make_pair("webm", 0),
-                                           make_pair("aac", 1), make_pair("mpeg4", 1),
-                                           make_pair("amrnb", 3), make_pair("amrwb", 4),
-                                           make_pair("webm", 5), make_pair("webm", 7),
-                                           make_pair("webm", 8), make_pair("mpeg4", 9),
-                                           make_pair("mpeg4", 10), make_pair("mpeg4", 12),
-                                           make_pair("mpeg4", 13), make_pair("mpeg2Ts", 1),
-                                           make_pair("mpeg2Ts", 9)));
+INSTANTIATE_TEST_SUITE_P(
+        WriterTestAll, WriteFunctionalityTest,
+        ::testing::Values(
+                make_tuple("aac", AAC_1, UNUSED_ID, 1),
+
+                make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 1),
+                make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 1),
+
+                // TODO(b/144902018): Enable test for mpeg2ts
+                // make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 1),
+                // make_tuple("mpeg2Ts", AVC_1, UNUSED_ID, 1),
+                // TODO(b/156355857): Add multitrack for mpeg2ts
+                // make_tuple("mpeg2Ts", AAC_1, AVC_1, 0.50),
+                // make_tuple("mpeg2Ts", AVC_1, AAC_1, 0.25),
+
+                make_tuple("mpeg4", AAC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AMR_NB_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AMR_WB_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AVC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", H263_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", HEIC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", HEVC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", MPEG4_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AAC_1, AVC_1, 0.25),
+                make_tuple("mpeg4", AVC_1, AAC_1, 0.75),
+                make_tuple("mpeg4", AMR_WB_1, AAC_1, 0.75),
+                make_tuple("mpeg4", HEVC_1, AMR_WB_1, 0.25),
+                make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+                make_tuple("mpeg4", MPEG4_1, AAC_1, 0.75),
+                make_tuple("mpeg4", AMR_NB_1, AMR_WB_1, 0.25),
+                make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+                make_tuple("mpeg4", MPEG4_1, HEVC_1, 0.75),
+
+                make_tuple("ogg", OPUS_1, UNUSED_ID, 1),
+
+                make_tuple("webm", OPUS_1, UNUSED_ID, 1),
+                make_tuple("webm", VORBIS_1, UNUSED_ID, 1),
+                make_tuple("webm", VP8_1, UNUSED_ID, 1),
+                make_tuple("webm", VP9_1, UNUSED_ID, 1),
+                make_tuple("webm", VP8_1, OPUS_1, 0.50),
+                make_tuple("webm", VORBIS_1, VP8_1, 0.25)));
 
 int main(int argc, char **argv) {
     gEnv = new WriterTestEnvironment();
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index 5e19973..5e79298 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -31,6 +31,7 @@
 
 #define CODEC_CONFIG_FLAG 32
 
+constexpr uint32_t kMaxTrackCount = 2;
 constexpr uint32_t kMaxCSDStrlen = 16;
 constexpr uint32_t kMaxCount = 20;
 
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index a968890..afca7c4 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -15,13 +15,14 @@
     srcs: ["main_mediaserver.cpp"],
 
     shared_libs: [
-        "libresourcemanagerservice",
+        "android.hardware.media.omx@1.0",
+        "libandroidicu",
+        "libbinder",
+        "libhidlbase",
         "liblog",
         "libmediaplayerservice",
+        "libresourcemanagerservice",
         "libutils",
-        "libbinder",
-        "libandroidicu",
-        "android.hardware.media.omx@1.0",
     ],
 
     static_libs: [
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 7b22b05..316732b 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -22,6 +22,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
+#include <hidl/HidlTransportSupport.h>
 #include <utils/Log.h>
 #include "RegisterExtensions.h"
 
@@ -42,6 +43,8 @@
     MediaPlayerService::instantiate();
     ResourceManagerService::instantiate();
     registerExtensions();
+    ::android::hardware::configureRpcThreadpool(16, false);
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
+    ::android::hardware::joinRpcThreadpool();
 }
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index 14319c4..a1cd9e3 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -36,6 +36,7 @@
 #ifndef _NDK_MEDIA_EXTRACTOR_H
 #define _NDK_MEDIA_EXTRACTOR_H
 
+#include <stdbool.h>
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index a094cfc..6371de4 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -36,6 +36,7 @@
 #ifndef _NDK_MEDIA_FORMAT_H
 #define _NDK_MEDIA_FORMAT_H
 
+#include <stdbool.h>
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
new file mode 100644
index 0000000..ca1123c
--- /dev/null
+++ b/media/utils/fuzzers/Android.bp
@@ -0,0 +1,51 @@
+cc_defaults {
+    name: "libmediautils_fuzzer_defaults",
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wno-c++2a-extensions",
+    ],
+
+    header_libs: [
+        "bionic_libc_platform_headers",
+        "libmedia_headers",
+    ],
+
+    include_dirs: [
+        // For DEBUGGER_SIGNAL
+        "system/core/debuggerd/include",
+    ],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_battery_notifier",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["BatteryNotifierFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_scheduling_policy_service",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["SchedulingPolicyServiceFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_service_utilities",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["ServiceUtilitiesFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_time_check",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["TimeCheckFuzz.cpp"],
+}
diff --git a/media/utils/fuzzers/BatteryNotifierFuzz.cpp b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
new file mode 100644
index 0000000..00b3cce
--- /dev/null
+++ b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <functional>
+#include <string>
+#include <vector>
+
+#include <utils/String8.h>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/BatteryNotifier.h"
+
+static constexpr int kMaxOperations = 30;
+static constexpr int kMaxStringLength = 500;
+using android::BatteryNotifier;
+
+std::vector<std::function<void(std::string /*flashlight_name*/, std::string /*camera_name*/,
+                               uid_t /*video_id*/, uid_t /*audio_id*/, uid_t /*light_id*/,
+                               uid_t /*camera_id*/)>>
+    operations = {
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetVideo();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetAudio();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetFlashlight();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetCamera();
+        },
+        [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStartVideo(video_id);
+        },
+        [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStopVideo(video_id);
+        },
+        [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStartAudio(audio_id);
+        },
+        [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStopAudio(audio_id);
+        },
+        [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+            android::String8 name(flashlight_name.c_str());
+            BatteryNotifier::getInstance().noteFlashlightOn(name, light_id);
+        },
+        [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+            android::String8 name(flashlight_name.c_str());
+            BatteryNotifier::getInstance().noteFlashlightOff(name, light_id);
+        },
+        [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+            android::String8 name(camera_name.c_str());
+            BatteryNotifier::getInstance().noteStartCamera(name, camera_id);
+        },
+        [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+            android::String8 name(camera_name.c_str());
+            BatteryNotifier::getInstance().noteStopCamera(name, camera_id);
+        },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    std::string camera_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+    std::string flashlight_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+    uid_t video_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t audio_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t light_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t camera_id = data_provider.ConsumeIntegral<uid_t>();
+    size_t ops_run = 0;
+    while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+        uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+        operations[op](flashlight_name, camera_name, video_id, audio_id, light_id, camera_id);
+    }
+    return 0;
+}
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
new file mode 100644
index 0000000..4521853
--- /dev/null
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "BatteryNotifierFuzzer"
+#include <binder/IBatteryStats.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <android/log.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include "fuzzer/FuzzedDataProvider.h"
+using android::IBatteryStats;
+using android::IBinder;
+using android::IInterface;
+using android::IServiceManager;
+using android::sp;
+using android::String16;
+using android::defaultServiceManager;
+using android::requestCpusetBoost;
+using android::requestPriority;
+sp<IBatteryStats> getBatteryService() {
+    sp<IBatteryStats> batteryStatService;
+    const sp<IServiceManager> sm(defaultServiceManager());
+    if (sm != nullptr) {
+        const String16 name("batterystats");
+        batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
+        if (batteryStatService == nullptr) {
+            ALOGW("batterystats service unavailable!");
+            return nullptr;
+        }
+    }
+    return batteryStatService;
+}
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    sp<IBatteryStats> batteryStatService = getBatteryService();
+    // There is some state here, but it's mostly focused around thread-safety, so
+    // we won't worry about order.
+    int32_t priority = data_provider.ConsumeIntegral<int32_t>();
+    bool is_for_app = data_provider.ConsumeBool();
+    bool async = data_provider.ConsumeBool();
+    requestPriority(getpid(), gettid(), priority, is_for_app, async);
+    // TODO: Verify and re-enable in AOSP (R).
+    // bool enable = data_provider.ConsumeBool();
+    // We are just using batterystats to avoid the need
+    // to register a new service.
+    // requestCpusetBoost(enable, IInterface::asBinder(batteryStatService));
+    return 0;
+}
+
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
new file mode 100644
index 0000000..3d141b5
--- /dev/null
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+
+#include <functional>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/ServiceUtilities.h"
+
+static constexpr int kMaxOperations = 50;
+static constexpr int kMaxStringLen = 256;
+
+const std::vector<std::function<void(FuzzedDataProvider*, android::MediaPackageManager)>>
+    operations = {
+        [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+            uid_t uid = data_provider->ConsumeIntegral<uid_t>();
+            pm.allowPlaybackCapture(uid);
+        },
+        [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+            int spaces = data_provider->ConsumeIntegral<int>();
+
+            // Dump everything into /dev/null
+            int fd = open("/dev/null", O_WRONLY);
+            pm.dump(fd, spaces);
+            close(fd);
+        },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    uid_t uid = data_provider.ConsumeIntegral<uid_t>();
+    pid_t pid = data_provider.ConsumeIntegral<pid_t>();
+
+    // There is not state here, and order is not significant,
+    // so we can simply call all of the target functions
+    android::isServiceUid(uid);
+    android::isAudioServerUid(uid);
+    android::isAudioServerOrSystemServerUid(uid);
+    android::isAudioServerOrMediaServerUid(uid);
+    std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+    android::String16 opPackageName(packageNameStr.c_str());
+    android::recordingAllowed(opPackageName, pid, uid);
+    android::startRecording(opPackageName, pid, uid);
+    android::finishRecording(opPackageName, uid);
+    android::captureAudioOutputAllowed(pid, uid);
+    android::captureMediaOutputAllowed(pid, uid);
+    android::captureHotwordAllowed(opPackageName, pid, uid);
+    android::modifyPhoneStateAllowed(uid, pid);
+    android::bypassInterruptionPolicyAllowed(uid, pid);
+    android::settingsAllowed();
+    android::modifyAudioRoutingAllowed();
+    android::modifyDefaultAudioEffectsAllowed();
+    android::dumpAllowed();
+
+    // MediaPackageManager does have state, so we need the fuzzer to decide order
+    android::MediaPackageManager packageManager;
+    size_t ops_run = 0;
+    while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+        uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+        operations[op](&data_provider, packageManager);
+    }
+
+    return 0;
+}
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
new file mode 100644
index 0000000..eeb6ba6
--- /dev/null
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <chrono>
+#include <thread>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/TimeCheck.h"
+
+static constexpr int kMaxStringLen = 256;
+
+// While it might be interesting to test long-running
+// jobs, it seems unlikely it'd lead to the types of crashes
+// we're looking for, and would mean a significant increase in fuzzer time.
+// Therefore, we are setting a low cap.
+static constexpr uint32_t kMaxTimeoutMs = 1000;
+static constexpr uint32_t kMinTimeoutMs = 200;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+
+    // There's essentially 5 operations that we can access in this class
+    // 1. The time it takes to run this operation. As mentioned above,
+    //    long-running tasks are not good for fuzzing, but there will be
+    //    some change in the run time.
+    uint32_t timeoutMs =
+        data_provider.ConsumeIntegralInRange<uint32_t>(kMinTimeoutMs, kMaxTimeoutMs);
+    uint8_t pid_size = data_provider.ConsumeIntegral<uint8_t>();
+    std::vector<pid_t> pids(pid_size);
+    for (auto& pid : pids) {
+        pid = data_provider.ConsumeIntegral<pid_t>();
+    }
+
+    // 2. We also have setAudioHalPids, which is populated with the pids set
+    // above.
+    android::TimeCheck::setAudioHalPids(pids);
+    std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+
+    // 3. The constructor, which is fuzzed here:
+    android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+    // We will leave some buffer to avoid sleeping too long
+    uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
+
+    // We want to make sure we can cover the time out functionality.
+    if (sleep_amount_ms) {
+        auto ms = std::chrono::milliseconds(sleep_amount_ms);
+        std::this_thread::sleep_for(ms);
+    }
+
+    // 4. Finally, the destructor on timecheck. These seem to be the only factors
+    // in play.
+    return 0;
+}
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index d13cb8f..7e06096 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -164,6 +164,10 @@
         stream = outStream;
         mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
         status = stream->getFrameSize(&mHalFrameSize);
+        LOG_ALWAYS_FATAL_IF(status != OK, "Error retrieving frame size from HAL: %d", status);
+        LOG_ALWAYS_FATAL_IF(mHalFrameSize <= 0, "Error frame size was %zu but must be greater than"
+                " zero", mHalFrameSize);
+
     }
 
     return status;
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 3eacc8c..bf2e953 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -40,6 +40,7 @@
 #include <audio_utils/channels.h>
 #include <audio_utils/format.h>
 #include <audio_utils/mono_blend.h>
+#include <cutils/bitops.h>
 #include <media/AudioMixer.h>
 #include "FastMixer.h"
 #include "TypedLogger.h"
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 7e63851..c252d77 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -29,6 +29,7 @@
 #include <linux/futex.h>
 #include <sys/stat.h>
 #include <sys/syscall.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -2091,12 +2092,6 @@
         outputFlags = (audio_output_flags_t)(outputFlags | AUDIO_OUTPUT_FLAG_FAST);
     }
 
-    // Set DIRECT flag if current thread is DirectOutputThread. This can happen when the playback is
-    // rerouted to direct output thread by dynamic audio policy.
-    if (mType == DIRECT) {
-        *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT);
-    }
-
     // Check if requested flags are compatible with output stream flags
     if ((*flags & outputFlags) != *flags) {
         ALOGW("createTrack_l(): mismatch between requested flags (%08x) and output flags (%08x)",
@@ -6061,10 +6056,6 @@
     bool trackPaused = false;
     bool trackStopped = false;
 
-    if ((mType == DIRECT) && audio_is_linear_pcm(mFormat) && !usesHwAvSync()) {
-        return !mStandby;
-    }
-
     // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
     // after a timeout and we will enter standby then.
     if (mTracks.size() > 0) {
@@ -8435,13 +8426,14 @@
     }
     result = mInput->stream->getFrameSize(&mFrameSize);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+    LOG_ALWAYS_FATAL_IF(mFrameSize <= 0, "Error frame size was %zu but must be greater than zero",
+            mFrameSize);
     result = mInput->stream->getBufferSize(&mBufferSize);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving buffer size from HAL: %d", result);
     mFrameCount = mBufferSize / mFrameSize;
-    ALOGV("%p RecordThread params: mChannelCount=%u, mFormat=%#x, mFrameSize=%lld, "
-            "mBufferSize=%lld, mFrameCount=%lld",
-            this, mChannelCount, mFormat, (long long)mFrameSize, (long long)mBufferSize,
-            (long long)mFrameCount);
+    ALOGV("%p RecordThread params: mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
+            "mBufferSize=%zu, mFrameCount=%zu",
+            this, mChannelCount, mFormat, mFrameSize, mBufferSize, mFrameCount);
     // This is the formula for calculating the temporary buffer size.
     // With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
     // 1 full output buffer, regardless of the alignment of the available input.
@@ -9017,6 +9009,8 @@
     LOG_ALWAYS_FATAL_IF(!audio_is_linear_pcm(mFormat), "HAL format %#x is not linear pcm", mFormat);
     result = mHalStream->getFrameSize(&mFrameSize);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+    LOG_ALWAYS_FATAL_IF(mFrameSize <= 0, "Error frame size was %zu but must be greater than zero",
+            mFrameSize);
     result = mHalStream->getBufferSize(&mBufferSize);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving buffer size from HAL: %d", result);
     mFrameCount = mBufferSize / mFrameSize;
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index 12bd341..af16448 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -68,6 +68,7 @@
     }
 
     void logConstructor(pid_t creatorPid, uid_t creatorUid,
+            const std::string& traits = {},
             audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT) const {
         // Once this item is logged by the server, the client can add properties.
         // no lock required, all local or const variables.
@@ -76,7 +77,8 @@
             .setUid(creatorUid)
             .set(AMEDIAMETRICS_PROP_ALLOWUID, (int32_t)creatorUid)
             .set(AMEDIAMETRICS_PROP_EVENT,
-                    AMEDIAMETRICS_PROP_PREFIX_SERVER AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR);
+                    AMEDIAMETRICS_PROP_PREFIX_SERVER AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+            .set(AMEDIAMETRICS_PROP_TRAITS, traits);
         // log streamType from the service, since client doesn't know chosen streamType.
         if (streamType != AUDIO_STREAM_DEFAULT) {
             item.set(AMEDIAMETRICS_PROP_STREAMTYPE, toString(streamType).c_str());
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 0021e17..d366bb7 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -602,7 +602,8 @@
     }
 
     // Once this item is logged by the server, the client can add properties.
-    mTrackMetrics.logConstructor(creatorPid, uid, streamType);
+    const char * const traits = sharedBuffer == 0 ? "" : "static";
+    mTrackMetrics.logConstructor(creatorPid, uid, traits, streamType);
 }
 
 AudioFlinger::PlaybackThread::Track::~Track()
@@ -804,7 +805,7 @@
     status_t status = mServerProxy->obtainBuffer(&buf);
     buffer->frameCount = buf.mFrameCount;
     buffer->raw = buf.mRaw;
-    if (buf.mFrameCount == 0 && !isStopping() && !isStopped() && !isPaused()) {
+    if (buf.mFrameCount == 0 && !isStopping() && !isStopped() && !isPaused() && !isOffloaded()) {
         ALOGV("%s(%d): underrun,  framesReady(%zu) < framesDesired(%zd), state: %d",
                 __func__, mId, buf.mFrameCount, desiredFrames, mState);
         mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0c5d1d0..923310c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -35,6 +35,7 @@
 
 namespace android {
 
+class AudioPolicyMix;
 class DeviceDescriptor;
 class HwAudioOutputDescriptor;
 class SwAudioOutputDescriptor;
@@ -90,11 +91,12 @@
                           product_strategy_t strategy, VolumeSource volumeSource,
                           audio_output_flags_t flags,
                           bool isPreferredDeviceForExclusiveUse,
-                          std::vector<wp<SwAudioOutputDescriptor>> secondaryOutputs) :
+                          std::vector<wp<SwAudioOutputDescriptor>> secondaryOutputs,
+                          wp<AudioPolicyMix> primaryMix) :
         ClientDescriptor(portId, uid, sessionId, attributes, config, preferredDeviceId,
                          isPreferredDeviceForExclusiveUse),
         mStream(stream), mStrategy(strategy), mVolumeSource(volumeSource), mFlags(flags),
-        mSecondaryOutputs(std::move(secondaryOutputs)) {}
+        mSecondaryOutputs(std::move(secondaryOutputs)), mPrimaryMix(primaryMix) {}
     ~TrackClientDescriptor() override = default;
 
     using ClientDescriptor::dump;
@@ -108,6 +110,9 @@
         return mSecondaryOutputs;
     };
     VolumeSource volumeSource() const { return mVolumeSource; }
+    const sp<AudioPolicyMix> getPrimaryMix() const {
+        return mPrimaryMix.promote();
+    };
 
     void setActive(bool active) override
     {
@@ -136,7 +141,7 @@
     const VolumeSource mVolumeSource;
     const audio_output_flags_t mFlags;
     const std::vector<wp<SwAudioOutputDescriptor>> mSecondaryOutputs;
-
+    const wp<AudioPolicyMix> mPrimaryMix;
     /**
      * required for duplicating thread, prevent from removing active client from an output
      * involved in a duplication.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d5272bc..d6d472b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -690,7 +690,9 @@
         const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
         if (outputDesc->isActive(volumeSource, inPastMs, sysTime)
                 && (!(outputDesc->devices()
-                        .containsDeviceAmongTypes(getAllOutRemoteDevices())))) {
+                        .containsDeviceAmongTypes(getAllOutRemoteDevices())
+                        || outputDesc->devices()
+                            .onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_TELEPHONY_TX)))) {
             return true;
         }
     }
@@ -722,7 +724,11 @@
         const sp<SwAudioOutputDescriptor> otherDesc = valueAt(i);
         if (desc->sharesHwModuleWith(otherDesc) &&
                 otherDesc->isStrategyActive(ps, inPastMs, sysTime)) {
-            return true;
+            if (desc == otherDesc
+                    || !otherDesc->devices()
+                            .onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
+                return true;
+            }
         }
     }
     return false;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index fc1a59f..b6de4be 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -28,7 +28,7 @@
 
 void AudioPolicyMix::dump(String8 *dst, int spaces, int index) const
 {
-    dst->appendFormat("%*sAudio Policy Mix %d:\n", spaces, "", index + 1);
+    dst->appendFormat("%*sAudio Policy Mix %d (%p):\n", spaces, "", index + 1, this);
     std::string mixTypeLiteral;
     if (!MixTypeConverter::toString(mMixType, mixTypeLiteral)) {
         ALOGE("%s: failed to convert mix type %d", __FUNCTION__, mMixType);
@@ -44,6 +44,9 @@
 
     dst->appendFormat("%*s- device address: %s\n", spaces, "", mDeviceAddress.string());
 
+    dst->appendFormat("%*s- output: %d\n", spaces, "",
+            mOutput == nullptr ? 0 : mOutput->mIoHandle);
+
     int indexCriterion = 0;
     for (const auto &criterion : mCriteria) {
         dst->appendFormat("%*s- Criterion %d: ", spaces + 2, "", indexCriterion++);
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 95822b9..afc4d01 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -23,6 +23,7 @@
 #include <TypeConverter.h>
 #include "AudioOutputDescriptor.h"
 #include "AudioPatch.h"
+#include "AudioPolicyMix.h"
 #include "ClientDescriptor.h"
 #include "DeviceDescriptor.h"
 #include "HwModule.h"
@@ -55,6 +56,12 @@
     ClientDescriptor::dump(dst, spaces, index);
     dst->appendFormat("%*s- Stream: %d flags: %08x\n", spaces, "", mStream, mFlags);
     dst->appendFormat("%*s- Refcount: %d\n", spaces, "", mActivityCount);
+    dst->appendFormat("%*s- DAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
+    dst->appendFormat("%*s- DAP Secondary Outputs:\n", spaces, "");
+    for (auto desc : mSecondaryOutputs) {
+        dst->appendFormat("%*s  - %d\n", spaces, "",
+                desc.promote() == nullptr ? 0 : desc.promote()->mIoHandle);
+    }
 }
 
 std::string TrackClientDescriptor::toShortString() const
@@ -88,7 +95,7 @@
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
-        {} /* Sources do not support secondary outputs*/), mSrcDevice(srcDevice)
+        {} /* Sources do not support secondary outputs*/, nullptr), mSrcDevice(srcDevice)
 {
 }
 
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
     </ProductStrategy>
 
     <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
          <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
             <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
-        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
-            <Attributes>
-                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
-                <Usage value="AUDIO_USAGE_ASSISTANT"/>
-            </Attributes>
-        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
     </ProductStrategy>
 
     <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
          <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
             <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
-        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
-            <Attributes>
-                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
-                <Usage value="AUDIO_USAGE_ASSISTANT"/>
-            </Attributes>
-        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 559d406..7492cd8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -42,6 +42,7 @@
 #include <set>
 #include <unordered_set>
 #include <vector>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <utils/Log.h>
 #include <media/AudioParameter.h>
@@ -1103,14 +1104,15 @@
     };
     *portId = PolicyAudioPort::getNextUniqueId();
 
+    sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
     sp<TrackClientDescriptor> clientDesc =
         new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
                                   sanitizedRequestedPortId, *stream,
                                   mEngine->getProductStrategyForAttributes(resultAttr),
                                   toVolumeSource(resultAttr),
                                   *flags, isRequestedDeviceForExclusiveUse,
-                                  std::move(weakSecondaryOutputDescs));
-    sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
+                                  std::move(weakSecondaryOutputDescs),
+                                  outputDesc->mPolicyMix);
     outputDesc->addClient(clientDesc);
 
     ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
@@ -2891,7 +2893,7 @@
 {
     ALOGV("registerPolicyMixes() %zu mix(es)", mixes.size());
     status_t res = NO_ERROR;
-
+    bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
     // examine each mix's route type
     for (size_t i = 0; i < mixes.size(); i++) {
@@ -3010,11 +3012,16 @@
                         i, type, address.string());
                 res = INVALID_OPERATION;
                 break;
+            } else {
+                checkOutputs = true;
             }
         }
     }
     if (res != NO_ERROR) {
         unregisterPolicyMixes(mixes);
+    } else if (checkOutputs) {
+        checkForDeviceAndOutputChanges();
+        updateCallAndOutputRouting();
     }
     return res;
 }
@@ -3023,6 +3030,7 @@
 {
     ALOGV("unregisterPolicyMixes() num mixes %zu", mixes.size());
     status_t res = NO_ERROR;
+    bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
     // examine each mix's route type
     for (const auto& mix : mixes) {
@@ -3063,9 +3071,15 @@
             if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
                 res = INVALID_OPERATION;
                 continue;
+            } else {
+                checkOutputs = true;
             }
         }
     }
+    if (res == NO_ERROR && checkOutputs) {
+        checkForDeviceAndOutputChanges();
+        updateCallAndOutputRouting();
+    }
     return res;
 }
 
@@ -5240,32 +5254,38 @@
     SortedVector<audio_io_handle_t> srcOutputs = getOutputsForDevices(oldDevices, mPreviousOutputs);
     SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
 
-    // also take into account external policy-related changes: add all outputs which are
-    // associated with policies in the "before" and "after" output vectors
-    ALOGVV("%s(): policy related outputs", __func__);
-    bool hasDynamicPolicy = false;
-    for (size_t i = 0 ; i < mPreviousOutputs.size() ; i++) {
-        const sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueAt(i);
-        if (desc != 0 && desc->mPolicyMix != NULL) {
-            srcOutputs.add(desc->mIoHandle);
-            hasDynamicPolicy = true;
-            ALOGVV(" previous outputs: adding %d", desc->mIoHandle);
+    uint32_t maxLatency = 0;
+    bool invalidate = false;
+    // take into account dynamic audio policies related changes: if a client is now associated
+    // to a different policy mix than at creation time, invalidate corresponding stream
+    for (size_t i = 0; i < mPreviousOutputs.size() && !invalidate; i++) {
+        const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
+        if (desc->isDuplicated()) {
+            continue;
         }
-    }
-    for (size_t i = 0 ; i < mOutputs.size() ; i++) {
-        const sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-        if (desc != 0 && desc->mPolicyMix != NULL) {
-            dstOutputs.add(desc->mIoHandle);
-            hasDynamicPolicy = true;
-            ALOGVV(" new outputs: adding %d", desc->mIoHandle);
+        for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
+            if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
+                continue;
+            }
+            sp<AudioPolicyMix> primaryMix;
+            status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(),
+                    client->flags(), primaryMix, nullptr);
+            if (status != OK) {
+                continue;
+            }
+            if (client->getPrimaryMix() != primaryMix) {
+                invalidate = true;
+                if (desc->isStrategyActive(psId)) {
+                    maxLatency = desc->latency();
+                }
+                break;
+            }
         }
     }
 
-    if (srcOutputs != dstOutputs) {
+    if (srcOutputs != dstOutputs || invalidate) {
         // get maximum latency of all source outputs to determine the minimum mute time guaranteeing
         // audio from invalidated tracks will be rendered when unmuting
-        uint32_t maxLatency = 0;
-        bool invalidate = hasDynamicPolicy;
         for (audio_io_handle_t srcOut : srcOutputs) {
             sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
             if (desc == nullptr) continue;
@@ -5442,6 +5462,12 @@
         }
     }
 
+    // Do not retrieve engine device for outputs through MSD
+    // TODO: support explicit routing requests by resetting MSD patch to engine device.
+    if (outputDesc->devices() == getMsdAudioOutDevices()) {
+        return outputDesc->devices();
+    }
+
     // Honor explicit routing requests only if no client using default routing is active on this
     // input: a specific app can not force routing for other apps by setting a preferred device.
     bool active; // unused
@@ -5766,15 +5792,6 @@
     DeviceVector filteredDevices = outputDesc->filterSupportedDevices(devices);
     DeviceVector prevDevices = outputDesc->devices();
 
-    // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
-    // output profile or if new device is not supported AND previous device(s) is(are) still
-    // available (otherwise reset device must be done on the output)
-    if (!devices.isEmpty() && filteredDevices.isEmpty() &&
-            !mAvailableOutputDevices.filter(prevDevices).empty()) {
-        ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
-        return 0;
-    }
-
     ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
 
     if (!filteredDevices.isEmpty()) {
@@ -5789,6 +5806,17 @@
         muteWaitMs = 0;
     }
 
+    // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
+    // output profile or if new device is not supported AND previous device(s) is(are) still
+    // available (otherwise reset device must be done on the output)
+    if (!devices.isEmpty() && filteredDevices.isEmpty() &&
+            !mAvailableOutputDevices.filter(prevDevices).empty()) {
+        ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
+        // restore previous device after evaluating strategy mute state
+        outputDesc->setDevices(prevDevices);
+        return muteWaitMs;
+    }
+
     // Do not change the routing if:
     //      the requested device is AUDIO_DEVICE_NONE
     //      OR the requested device is the same as current device
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 9577160..34d07b6 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -593,7 +593,7 @@
     }
 
     // including successes gets very verbose
-    // but once we cut over to westworld, log them all.
+    // but once we cut over to statsd, log them all.
     if (status != NO_ERROR) {
 
         static constexpr char kAudioPolicy[] = "audiopolicy";
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 9b61e74..e847f9f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -566,11 +566,13 @@
 
         auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mLock) {
             bool canCaptureCall = recordClient->canCaptureOutput;
-            bool canCaptureCommunication = recordClient->canCaptureOutput
-                || recordClient->uid == mPhoneStateOwnerUid
-                || isServiceUid(mPhoneStateOwnerUid);
-            return !(isInCall && !canCaptureCall)
-                && !(isInCommunication && !canCaptureCommunication);
+            return !(isInCall && !canCaptureCall);
+//TODO(b/160260850): restore restriction to mode owner once fix for misbehaving apps is merged
+//            bool canCaptureCommunication = recordClient->canCaptureOutput
+//                || recordClient->uid == mPhoneStateOwnerUid
+//                || isServiceUid(mPhoneStateOwnerUid);
+//            return !(isInCall && !canCaptureCall)
+//                && !(isInCommunication && !canCaptureCommunication);
         };
 
         // By default allow capture if:
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index e1721ea..bdddf06 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -75,6 +75,10 @@
     status_t createAudioPatch(const struct audio_patch *patch,
                               audio_patch_handle_t *handle,
                               int /*delayMs*/) override {
+        auto iter = mActivePatches.find(*handle);
+        if (iter != mActivePatches.end()) {
+            mActivePatches.erase(*handle);
+        }
         *handle = mNextPatchHandle++;
         mActivePatches.insert(std::make_pair(*handle, *patch));
         return NO_ERROR;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index daa1edb..b5de1b7 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -496,9 +496,6 @@
         clientToDisconnect->notifyError(
                 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
                 CaptureResultExtras{});
-        // Ensure not in binder RPC so client disconnect PID checks work correctly
-        LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
-                "onDeviceStatusChanged must be called from the camera service process!");
         clientToDisconnect->disconnect();
     }
 }
@@ -1377,7 +1374,12 @@
             Mutex::Autolock l(mLogLock);
             mEventLog.add(msg);
 
-            return -EBUSY;
+            auto current = mActiveClientManager.get(cameraId);
+            if (current != nullptr) {
+                return -EBUSY; // CAMERA_IN_USE
+            } else {
+                return -EUSERS; // MAX_CAMERAS_IN_USE
+            }
         }
 
         for (auto& i : evicted) {
@@ -1638,7 +1640,7 @@
                     cameraId.string(), clientName8.string(), clientPid);
         }
 
-        // Enforce client permissions and do basic sanity checks
+        // Enforce client permissions and do basic validity checks
         if(!(ret = validateConnectLocked(cameraId, clientName8,
                 /*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
             return ret;
@@ -1669,6 +1671,10 @@
                     return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
                             "Higher-priority client using camera, ID \"%s\" currently unavailable",
                             cameraId.string());
+                case -EUSERS:
+                    return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
+                            "Too many cameras already open, cannot open camera \"%s\"",
+                            cameraId.string());
                 default:
                     return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                             "Unexpected error %s (%d) opening camera \"%s\"",
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index 6fdac68..ca6cc58 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -1,7 +1,12 @@
 {
   "presubmit": [
     {
-       "name": "cameraservice_test"
+      "name": "cameraservice_test"
+    }
+  ],
+  "imports": [
+    {
+      "path": "frameworks/av/camera"
     }
   ]
 }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index e01e86d..09e2c3f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1777,6 +1777,14 @@
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
             ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
                     __FUNCTION__, errorCode, resultExtras.requestId);
+
+            if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
+                    (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
+                Mutex::Autolock al(mLatestRequestMutex);
+
+                mLatestFailedRequestId = resultExtras.requestId;
+                mLatestRequestSignal.signal();
+            }
             mCaptureSequencer->notifyError(errorCode, resultExtras);
             return;
         default:
@@ -2303,7 +2311,7 @@
 
 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
     Mutex::Autolock l(mLatestRequestMutex);
-    while (mLatestRequestId != requestId) {
+    while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
         nsecs_t startTime = systemTime();
 
         auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -2312,7 +2320,7 @@
         timeout -= (systemTime() - startTime);
     }
 
-    return OK;
+    return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
 }
 
 void Camera2Client::notifyRequestId(int32_t requestId) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index c5f0428..f8da0b6 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -237,6 +237,7 @@
     mutable Mutex mLatestRequestMutex;
     Condition mLatestRequestSignal;
     int32_t mLatestRequestId = -1;
+    int32_t mLatestFailedRequestId = -1;
     status_t waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout);
     status_t waitUntilCurrentRequestIdLocked();
 };
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 20333d1..dbc863b 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -230,7 +230,7 @@
     previewFpsRange[1] = fastInfo.bestStillCaptureFpsRange[1];
 
     // PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but
-    // still have to do something sane for them
+    // still have to do something reasonable for them
 
     // NOTE: Not scaled like FPS range values are.
     int previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index e7e26da..5cd16ee 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -205,7 +205,7 @@
     virtual void notifyRepeatingRequestError(long lastFrameNumber);
 
     // utility function to convert AIDL SessionConfiguration to HIDL
-    // streamConfiguration. Also checks for sanity of SessionConfiguration and
+    // streamConfiguration. Also checks for validity of SessionConfiguration and
     // returns a non-ok binder::Status if the passed in session configuration
     // isn't valid.
     static binder::Status
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 1a0881f..a63f402 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -620,7 +620,8 @@
         if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
             ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
                     mAppSegmentFrameNumbers.front());
-            mInputYuvBuffers.erase(it);
+            mInputAppSegmentBuffers.erase(it);
+            mAppSegmentFrameNumbers.pop();
             continue;
         }
 
@@ -664,6 +665,7 @@
             ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
                     mMainImageFrameNumbers.front());
             mInputYuvBuffers.erase(it);
+            mMainImageFrameNumbers.pop();
             continue;
         }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 01e70d8..4a509aa 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -60,6 +60,7 @@
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 #include "utils/CameraThreadState.h"
+#include "utils/TraceHFR.h"
 
 #include <algorithm>
 #include <tuple>
@@ -756,10 +757,9 @@
         for (size_t i = 0; i < mInFlightMap.size(); i++) {
             InFlightRequest r = mInFlightMap.valueAt(i);
             lines.appendFormat("      Frame %d |  Timestamp: %" PRId64 ", metadata"
-                    " arrived: %s, buffers left: %d, buffers returned with STATUS_ERROR: %d, "
-                    " buffers notified with error: %d\n", mInFlightMap.keyAt(i),
+                    " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
                     r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
-                    r.numBuffersLeft, r.numErrorBuffersReturned, r.numErrorBuffersNotified);
+                    r.numBuffersLeft);
         }
     }
     write(fd, lines.string(), lines.size());
@@ -2835,7 +2835,7 @@
 }
 
 void Camera3Device::removeInFlightMapEntryLocked(int idx) {
-    ATRACE_CALL();
+    ATRACE_HFR_CALL();
     nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
     mInFlightMap.removeItemsAt(idx, 1);
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index e1d35e8..01ca006 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -21,6 +21,7 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include "Camera3OutputStream.h"
+#include "utils/TraceHFR.h"
 
 #ifndef container_of
 #define container_of(ptr, type, member) \
@@ -160,7 +161,7 @@
 
 status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer,
         const std::vector<size_t>&) {
-    ATRACE_CALL();
+    ATRACE_HFR_CALL();
 
     ANativeWindowBuffer* anb;
     int fenceFd = -1;
@@ -190,7 +191,7 @@
 status_t Camera3OutputStream::returnBufferLocked(
         const camera3_stream_buffer &buffer,
         nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
-    ATRACE_CALL();
+    ATRACE_HFR_CALL();
 
     status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
 
@@ -516,7 +517,7 @@
 }
 
 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
-    ATRACE_CALL();
+    ATRACE_HFR_CALL();
     status_t res;
 
     if ((res = getBufferPreconditionCheckLocked()) != OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 4994393..eea5ef1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -395,7 +395,7 @@
 
 void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
     InFlightRequestMap& inflightMap = states.inflightMap;
-    InFlightRequest &request = inflightMap.editValueAt(idx);
+    const InFlightRequest &request = inflightMap.valueAt(idx);
     const uint32_t frameNumber = inflightMap.keyAt(idx);
 
     nsecs_t sensorTimestamp = request.sensorTimestamp;
@@ -406,7 +406,7 @@
     //      all input and output buffers, all result metadata, shutter callback
     //      arrived.
     // In the case of an unsuccessful request:
-    //      all input and output buffers, as well as error notifications, arrived.
+    //      all input and output buffers, as well as request/result error notifications, arrived.
     if (request.numBuffersLeft == 0 &&
             (request.skipResultMetadata ||
             (request.haveResultMetadata && shutterTimestamp != 0))) {
@@ -430,26 +430,24 @@
         assert(request.requestStatus != OK ||
                request.pendingOutputBuffers.size() == 0);
 
-        size_t bufferErrorCnt = returnOutputBuffers(
+        returnOutputBuffers(
             states.useHalBufManager, states.listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
-            request.outputSurfaces, request.resultExtras);
+            request.outputSurfaces, request.resultExtras,
+            request.errorBufStrategy);
 
-        request.numErrorBuffersReturned += bufferErrorCnt;
-        if (request.numErrorBuffersReturned == request.numErrorBuffersNotified) {
-            removeInFlightMapEntryLocked(states, idx);
-            ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
-
-            // Note down the just completed frame number
-            if (request.hasInputBuffer) {
-                states.lastCompletedReprocessFrameNumber = frameNumber;
-            } else if (request.zslCapture) {
-                states.lastCompletedZslFrameNumber = frameNumber;
-            } else {
-                states.lastCompletedRegularFrameNumber = frameNumber;
-            }
+        // Note down the just completed frame number
+        if (request.hasInputBuffer) {
+            states.lastCompletedReprocessFrameNumber = frameNumber;
+        } else if (request.zslCapture) {
+            states.lastCompletedZslFrameNumber = frameNumber;
+        } else {
+            states.lastCompletedRegularFrameNumber = frameNumber;
         }
+
+        removeInFlightMapEntryLocked(states, idx);
+        ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
     }
 
     states.inflightIntf.checkInflightMapLengthLocked();
@@ -499,10 +497,13 @@
         InFlightRequest &request = states.inflightMap.editValueAt(idx);
         ALOGVV("%s: got InFlightRequest requestId = %" PRId32
                 ", frameNumber = %" PRId64 ", burstId = %" PRId32
-                ", partialResultCount = %d, hasCallback = %d, num_output_buffers %d",
+                ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
+                ", usePartialResult = %d",
                 __FUNCTION__, request.resultExtras.requestId,
                 request.resultExtras.frameNumber, request.resultExtras.burstId,
-                result->partial_result, request.hasCallback, result->num_output_buffers);
+                result->partial_result, states.numPartialResults,
+                request.hasCallback, result->num_output_buffers,
+                states.usePartialResult);
         // Always update the partial count to the latest one if it's not 0
         // (buffers only). When framework aggregates adjacent partial results
         // into one, the latest partial count will be used.
@@ -567,6 +568,7 @@
                     request.collectedPartialResult);
             }
             request.haveResultMetadata = true;
+            request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
         }
 
         uint32_t numBuffersReturned = result->num_output_buffers;
@@ -593,19 +595,14 @@
             request.sensorTimestamp = entry.data.i64[0];
         }
 
-        // If shutter event isn't received yet, append the output buffers to
-        // the in-flight request. Otherwise, return the output buffers to
-        // streams.
-        if (shutterTimestamp == 0) {
-            request.pendingOutputBuffers.appendArray(result->output_buffers,
+        // If shutter event isn't received yet, do not return the pending output
+        // buffers.
+        request.pendingOutputBuffers.appendArray(result->output_buffers,
                 result->num_output_buffers);
-        } else {
-            bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
-            auto numErrorBuffers = returnOutputBuffers(states.useHalBufManager, states.listener,
-                result->output_buffers, result->num_output_buffers,
-                shutterTimestamp, timestampIncreasing,
-                request.outputSurfaces, request.resultExtras);
-            request.numErrorBuffersReturned += numErrorBuffers;
+        if (shutterTimestamp != 0) {
+            returnAndRemovePendingOutputBuffers(
+                states.useHalBufManager, states.listener,
+                request);
         }
 
         if (result->result != NULL && !isPartialResult) {
@@ -643,7 +640,6 @@
                       "  its stream:%s (%d)",  __FUNCTION__,
                       frameNumber, strerror(-res), res);
             }
-
         } else {
             ALOGW("%s: Input buffer should be NULL if there is no input"
                     " buffer sent in the request, skipping input buffer return.",
@@ -799,45 +795,59 @@
     processCaptureResult(states, &r);
 }
 
-size_t returnOutputBuffers(
+void returnOutputBuffers(
         bool useHalBufManager,
         sp<NotificationListener> listener,
         const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
         nsecs_t timestamp, bool timestampIncreasing,
         const SurfaceMap& outputSurfaces,
-        const CaptureResultExtras &inResultExtras) {
+        const CaptureResultExtras &inResultExtras,
+        ERROR_BUF_STRATEGY errorBufStrategy) {
 
-    size_t numErrorBuffers = 0;
     for (size_t i = 0; i < numBuffers; i++)
     {
+        Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
+        int streamId = stream->getId();
+
+        // Call notify(ERROR_BUFFER) if necessary.
+        if (outputBuffers[i].status == CAMERA3_BUFFER_STATUS_ERROR &&
+                errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
+            if (listener != nullptr) {
+                CaptureResultExtras extras = inResultExtras;
+                extras.errorStreamId = streamId;
+                listener->notifyError(
+                        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
+                        extras);
+            }
+        }
+
         if (outputBuffers[i].buffer == nullptr) {
             if (!useHalBufManager) {
                 // With HAL buffer management API, HAL sometimes will have to return buffers that
                 // has not got a output buffer handle filled yet. This is though illegal if HAL
                 // buffer management API is not being used.
                 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
-            } else {
-                if (outputBuffers[i].status == CAMERA3_BUFFER_STATUS_ERROR) {
-                    numErrorBuffers++;
-                }
             }
             continue;
         }
 
-        Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
-        int streamId = stream->getId();
         const auto& it = outputSurfaces.find(streamId);
         status_t res = OK;
-        if (it != outputSurfaces.end()) {
-            res = stream->returnBuffer(
-                    outputBuffers[i], timestamp, timestampIncreasing, it->second,
-                    inResultExtras.frameNumber);
-        } else {
-            res = stream->returnBuffer(
-                    outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
-                    inResultExtras.frameNumber);
-        }
 
+        // Do not return the buffer if the buffer status is error, and the error
+        // buffer strategy is CACHE.
+        if (outputBuffers[i].status != CAMERA3_BUFFER_STATUS_ERROR ||
+                errorBufStrategy != ERROR_BUF_CACHE) {
+            if (it != outputSurfaces.end()) {
+                res = stream->returnBuffer(
+                        outputBuffers[i], timestamp, timestampIncreasing, it->second,
+                        inResultExtras.frameNumber);
+            } else {
+                res = stream->returnBuffer(
+                        outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
+                        inResultExtras.frameNumber);
+            }
+        }
         // Note: stream may be deallocated at this point, if this buffer was
         // the last reference to it.
         if (res == NO_INIT || res == DEAD_OBJECT) {
@@ -863,13 +873,30 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
                         extras);
             }
-        } else {
-            if (outputBuffers[i].status == CAMERA3_BUFFER_STATUS_ERROR) {
-               numErrorBuffers++;
-            }
         }
     }
-    return numErrorBuffers;
+}
+
+void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
+        sp<NotificationListener> listener, InFlightRequest& request) {
+    bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+    returnOutputBuffers(useHalBufManager, listener,
+            request.pendingOutputBuffers.array(),
+            request.pendingOutputBuffers.size(),
+            request.shutterTimestamp, timestampIncreasing,
+            request.outputSurfaces, request.resultExtras,
+            request.errorBufStrategy);
+
+    // Remove error buffers that are not cached.
+    for (auto iter = request.pendingOutputBuffers.begin();
+            iter != request.pendingOutputBuffers.end(); ) {
+        if (request.errorBufStrategy != ERROR_BUF_CACHE ||
+                iter->status != CAMERA3_BUFFER_STATUS_ERROR) {
+            iter = request.pendingOutputBuffers.erase(iter);
+        } else {
+            iter++;
+        }
+    }
 }
 
 void notifyShutter(CaptureOutputStates& states, const camera3_shutter_msg_t &msg) {
@@ -938,14 +965,8 @@
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
                     r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
             }
-            bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
-            size_t bufferErrorCnt = returnOutputBuffers(
-                    states.useHalBufManager, states.listener,
-                    r.pendingOutputBuffers.array(),
-                    r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
-                    r.outputSurfaces, r.resultExtras);
-            r.numErrorBuffersReturned += bufferErrorCnt;
-            r.pendingOutputBuffers.clear();
+            returnAndRemovePendingOutputBuffers(
+                    states.useHalBufManager, states.listener, r);
 
             removeInFlightRequestIfReadyLocked(states, idx);
         }
@@ -999,7 +1020,6 @@
             break;
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
-        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
             {
                 std::lock_guard<std::mutex> l(states.inflightLock);
                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
@@ -1026,30 +1046,13 @@
                     }
 
                     if (!physicalDeviceResultError) {
+                        r.skipResultMetadata = true;
                         if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
                                 == errorCode) {
-                            r.skipResultMetadata = true;
-                        } else if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
-                                == errorCode) {
-                            r.numErrorBuffersNotified ++;
+                            r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
                         } else {
                             // errorCode is ERROR_CAMERA_REQUEST
-                            if (!r.skipResultMetadata) {
-                                // In case HAL calls multiples ERROR_REQUEST
-                                // callback, only count the pending buffer
-                                // notify error counter once. And also handle
-                                // the case where ERROR_BUFFERs are sent before
-                                // ERROR_REQUEST, even though it's not allowed
-                                // by the HAL API.
-                                if (r.numErrorBuffersNotified != 0) {
-                                    ALOGW("Camera %s: %s: HAL should not notify ERROR_REQUEST"
-                                            " and ERROR_BUFFER for the same request",
-                                            states.cameraId.string(), __FUNCTION__);
-                                }
-                                r.numErrorBuffersNotified =
-                                        r.numOutputBuffers - r.numErrorBuffersNotified;
-                                r.skipResultMetadata = true;
-                            }
+                            r.errorBufStrategy = ERROR_BUF_RETURN;
                         }
 
                         // Check whether the buffers returned. If they returned,
@@ -1071,6 +1074,10 @@
                         states.cameraId.string(), __FUNCTION__);
             }
             break;
+        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+            // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
+            // callback to the app. Rather, use STATUS_ERROR of image buffers.
+            break;
         default:
             // SET_ERR calls notifyError
             SET_ERR("Unknown error message from HAL: %d", msg.error_code);
@@ -1379,21 +1386,18 @@
     { // First return buffers cached in mInFlightMap
         std::lock_guard<std::mutex> l(states.inflightLock);
         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
-            InFlightRequest &request = states.inflightMap.editValueAt(idx);
-            size_t bufferErrorCnt = returnOutputBuffers(
+            const InFlightRequest &request = states.inflightMap.valueAt(idx);
+            returnOutputBuffers(
                 states.useHalBufManager, states.listener,
                 request.pendingOutputBuffers.array(),
                 request.pendingOutputBuffers.size(), 0,
                 /*timestampIncreasing*/true, request.outputSurfaces,
-                request.resultExtras);
-            request.numErrorBuffersReturned += bufferErrorCnt;
+                request.resultExtras, request.errorBufStrategy);
             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
-                    " arrived: %s, buffers left: %d, buffers returned with STATUS_ERROR: %d, "
-                    " buffers notified with error: %d\n", __FUNCTION__,
+                    " arrived: %s, buffers left: %d.\n", __FUNCTION__,
                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
                     request.haveResultMetadata ? "true" : "false",
-                    request.numBuffersLeft, request.numErrorBuffersReturned,
-                    request.numErrorBuffersNotified);
+                    request.numBuffersLeft);
         }
 
         states.inflightMap.clear();
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 53f78f2..9946312 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -44,9 +44,10 @@
     /**
      * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
      */
-    // helper function to return the output buffers to output streams.
-    // returns the number of STATUS_ERROR buffers
-    size_t returnOutputBuffers(
+
+    // helper function to return the output buffers to output streams. The
+    // function also optionally calls notify(ERROR_BUFFER).
+    void returnOutputBuffers(
             bool useHalBufManager,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
             const camera3_stream_buffer_t *outputBuffers,
@@ -54,7 +55,16 @@
             // The following arguments are only meant for surface sharing use case
             const SurfaceMap& outputSurfaces = SurfaceMap{},
             // Used to send buffer error callback when failing to return buffer
-            const CaptureResultExtras &resultExtras = CaptureResultExtras{});
+            const CaptureResultExtras &resultExtras = CaptureResultExtras{},
+            ERROR_BUF_STRATEGY errorBufStrategy = ERROR_BUF_RETURN);
+
+    // helper function to return the output buffers to output streams, and
+    // remove the returned buffers from the inflight request's pending buffers
+    // vector.
+    void returnAndRemovePendingOutputBuffers(
+            bool useHalBufManager,
+            sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
+            InFlightRequest& request);
 
     // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
     // callbacks
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index e54a99b..20f6168 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -22,6 +22,7 @@
 #include <utils/Trace.h>
 #include "device3/Camera3Stream.h"
 #include "device3/StatusTracker.h"
+#include "utils/TraceHFR.h"
 
 #include <cutils/properties.h>
 
@@ -601,7 +602,7 @@
 status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer,
         nsecs_t waitBufferTimeout,
         const std::vector<size_t>& surface_ids) {
-    ATRACE_CALL();
+    ATRACE_HFR_CALL();
     Mutex::Autolock l(mLock);
     status_t res = OK;
 
@@ -682,7 +683,7 @@
 status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
         nsecs_t timestamp, bool timestampIncreasing,
          const std::vector<size_t>& surface_ids, uint64_t frameNumber) {
-    ATRACE_CALL();
+    ATRACE_HFR_CALL();
     Mutex::Autolock l(mLock);
 
     // Check if this buffer is outstanding.
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 3cb8324..da4f228 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -32,7 +32,18 @@
 
 namespace camera3 {
 
+typedef enum {
+    // Cache the buffers with STATUS_ERROR within InFlightRequest
+    ERROR_BUF_CACHE,
+    // Return the buffers with STATUS_ERROR to the buffer queue
+    ERROR_BUF_RETURN,
+    // Return the buffers with STATUS_ERROR to the buffer queue, and call
+    // notify(ERROR_BUFFER) as well
+    ERROR_BUF_RETURN_NOTIFY
+} ERROR_BUF_STRATEGY;
+
 struct InFlightRequest {
+
     // Set by notify() SHUTTER call.
     nsecs_t shutterTimestamp;
     // Set by process_capture_result().
@@ -43,18 +54,8 @@
     // Decremented by calls to process_capture_result with valid output
     // and input buffers
     int     numBuffersLeft;
-    // Total number of output buffers for this request
-    int     numOutputBuffers;
 
     // The inflight request is considered complete if all buffers are returned
-    // and numErrorBuffersReturned == numErrorBuffersNotified.
-
-    // The number of buffers returned with STATUS_ERROR;
-    int numErrorBuffersReturned;
-    // The number of buffers that are notified as error:
-    //   +1 for each notifyError(ERROR_BUFFER), and
-    //   +numOutputBuffers for notifyError(ERROR_REQUEST)
-    int numErrorBuffersNotified;
 
     CaptureResultExtras resultExtras;
     // If this request has any input buffer
@@ -92,6 +93,10 @@
     // REQUEST/RESULT error.
     bool skipResultMetadata;
 
+    // Whether the buffers with STATUS_ERROR should be cached as pending buffers,
+    // returned to the buffer queue, or returned to the buffer queue and notify with ERROR_BUFFER.
+    ERROR_BUF_STRATEGY errorBufStrategy;
+
     // The physical camera ids being requested.
     std::set<String8> physicalCameraIds;
 
@@ -123,13 +128,11 @@
             requestStatus(OK),
             haveResultMetadata(false),
             numBuffersLeft(0),
-            numOutputBuffers(0),
-            numErrorBuffersReturned(0),
-            numErrorBuffersNotified(0),
             hasInputBuffer(false),
             hasCallback(true),
             maxExpectedDuration(kDefaultExpectedDuration),
             skipResultMetadata(false),
+            errorBufStrategy(ERROR_BUF_CACHE),
             stillCapture(false),
             zslCapture(false),
             rotateAndCropAuto(false) {
@@ -145,14 +148,12 @@
             requestStatus(OK),
             haveResultMetadata(false),
             numBuffersLeft(numBuffers),
-            numOutputBuffers(hasInput ? numBuffers-1 : numBuffers),
-            numErrorBuffersReturned(0),
-            numErrorBuffersNotified(0),
             resultExtras(extras),
             hasInputBuffer(hasInput),
             hasCallback(hasAppCallback),
             maxExpectedDuration(maxDuration),
             skipResultMetadata(false),
+            errorBufStrategy(ERROR_BUF_CACHE),
             physicalCameraIds(physicalCameraIdSet),
             stillCapture(isStillCapture),
             zslCapture(isZslCapture),
diff --git a/services/camera/libcameraservice/tests/ClientManagerTest.cpp b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
new file mode 100644
index 0000000..6a38427
--- /dev/null
+++ b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "ClientManagerTest"
+
+#include "../utils/ClientManager.h"
+#include <gtest/gtest.h>
+
+using namespace android::resource_policy;
+
+struct TestClient {
+    TestClient(int id, int32_t cost, const std::set<int>& conflictingKeys, int32_t ownerId,
+            int32_t score, int32_t state, bool isVendorClient) :
+            mId(id), mCost(cost), mConflictingKeys(conflictingKeys),
+            mOwnerId(ownerId), mScore(score), mState(state), mIsVendorClient(isVendorClient) {};
+    int mId;
+    int32_t mCost;    // Int 0..100
+    std::set<int> mConflictingKeys;
+    int32_t mOwnerId; // PID
+    int32_t mScore;   // Priority
+    int32_t mState;   // Foreground/background etc
+    bool mIsVendorClient;
+};
+
+using TestClientDescriptor = ClientDescriptor<int, TestClient>;
+using TestDescriptorPtr = std::shared_ptr<TestClientDescriptor>;
+
+TestDescriptorPtr makeDescFromTestClient(const TestClient& tc) {
+    return std::make_shared<TestClientDescriptor>(/*ID*/tc.mId, tc, tc.mCost, tc.mConflictingKeys,
+            tc.mScore, tc.mOwnerId, tc.mState, tc.mIsVendorClient);
+}
+
+class TestClientManager : public ClientManager<int, TestClient> {
+public:
+    TestClientManager() {}
+    virtual ~TestClientManager() {}
+};
+
+
+// Test ClientMager behavior when there is only one single owner
+// The expected behavior is that if one owner (application or vendor) is trying
+// to open second camera, it may succeed or not, but the first opened camera
+// should never be evicted.
+TEST(ClientManagerTest, SingleOwnerMultipleCamera) {
+
+    TestClientManager cm;
+    TestClient cam0Client(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+    auto cam0Desc = makeDescFromTestClient(cam0Client);
+    auto evicted = cm.addAndEvict(cam0Desc);
+    ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+
+    TestClient cam1Client(/*ID*/1, /*cost*/100, /*conflicts*/{0},
+            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+    auto cam1Desc = makeDescFromTestClient(cam1Client);
+
+    // 1. Check with conflicting devices, new client would be evicted
+    auto wouldBeEvicted = cm.wouldEvict(cam1Desc);
+    ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+    ASSERT_EQ(wouldBeEvicted[0]->getKey(), cam1Desc->getKey()) << "cam1 must be evicted";
+
+    cm.removeAll();
+
+    TestClient cam2Client(/*ID*/2, /*cost*/100, /*conflicts*/{},
+            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+    auto cam2Desc = makeDescFromTestClient(cam2Client);
+    evicted = cm.addAndEvict(cam2Desc);
+    ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+
+    TestClient cam3Client(/*ID*/3, /*cost*/100, /*conflicts*/{},
+            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+    auto cam3Desc = makeDescFromTestClient(cam3Client);
+
+    // 2. Check without conflicting devices, the pre-existing client won't be evicted
+    // In this case, the new client would be granted, but could later be rejected by HAL due to
+    // resource cost.
+    wouldBeEvicted = cm.wouldEvict(cam3Desc);
+    ASSERT_EQ(wouldBeEvicted.size(), 0u) << "Evicted list must be empty";
+
+    cm.removeAll();
+
+    evicted = cm.addAndEvict(cam0Desc);
+    ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+
+    TestClient cam0ClientNew(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+    auto cam0DescNew = makeDescFromTestClient(cam0ClientNew);
+    wouldBeEvicted = cm.wouldEvict(cam0DescNew);
+
+    // 3. Check opening the same camera twice will evict the older client
+    ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+    ASSERT_EQ(wouldBeEvicted[0], cam0Desc) << "cam0 (old) must be evicted";
+}
+
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 35d25bf..64be6c5 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -496,6 +496,20 @@
                 evictList.clear();
                 evictList.push_back(client);
                 return evictList;
+            } else if (conflicting && owner == curOwner) {
+                // Pre-existing conflicting client with the same client owner exists
+                // Open the same device twice -> most recent open wins
+                // Otherwise let the existing client wins to avoid behaviors difference
+                // due to how HAL advertising conflicting devices (which is hidden from
+                // application)
+                if (curKey == key) {
+                    evictList.push_back(i);
+                    totalCost -= curCost;
+                } else {
+                    evictList.clear();
+                    evictList.push_back(client);
+                    return evictList;
+                }
             } else if (conflicting || ((totalCost > mMaxCost && curCost > 0) &&
                     (curPriority >= priority) &&
                     !(highestPriorityOwner == owner && owner == curOwner))) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index fb519d9..cfb9f17 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -32,7 +32,7 @@
 class SessionConfigurationUtils {
 public:
     // utility function to convert AIDL SessionConfiguration to HIDL
-    // streamConfiguration. Also checks for sanity of SessionConfiguration and
+    // streamConfiguration. Also checks for validity of SessionConfiguration and
     // returns a non-ok binder::Status if the passed in session configuration
     // isn't valid.
     static binder::Status
diff --git a/services/camera/libcameraservice/utils/TraceHFR.h b/services/camera/libcameraservice/utils/TraceHFR.h
new file mode 100644
index 0000000..3a1900f
--- /dev/null
+++ b/services/camera/libcameraservice/utils/TraceHFR.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_ENABLE_HFR_TRACES_H_
+#define ANDROID_SERVERS_ENABLE_HFR_TRACES_H_
+
+#include <utils/Trace.h>
+
+#ifdef HFR_ENABLE_TRACING
+#define ATRACE_HFR_CALL() ATRACE_CALL()
+#else
+#define ATRACE_HFR_CALL()
+#endif
+
+#endif
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index f4c1924..05bbbc7 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -15,26 +15,12 @@
         "libmedia_codecserviceregistrant",
     ],
 
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
-
     header_libs: [
         "libmedia_headers",
     ],
 
     init_rc: ["mediaswcodec.rc"],
 
-    required: ["mediaswcodec.policy"],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index 05b7d22..03e1e41 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -35,17 +35,6 @@
         "liblog",
         "libavservices_minijail",
     ],
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
     init_rc: ["mediaextractor.rc"],
 
     cflags: [
diff --git a/services/mediametrics/AnalyticsState.h b/services/mediametrics/AnalyticsState.h
index b648947..09c0b4c 100644
--- a/services/mediametrics/AnalyticsState.h
+++ b/services/mediametrics/AnalyticsState.h
@@ -93,7 +93,7 @@
         int32_t ll = lines;
 
         if (ll > 0) {
-            ss << "TransactionLog:\n";
+            ss << "TransactionLog: gc(" << mTransactionLog.getGarbageCollectionCount() << ")\n";
             --ll;
         }
         if (ll > 0) {
@@ -102,7 +102,7 @@
             ll -= l;
         }
         if (ll > 0) {
-            ss << "TimeMachine:\n";
+            ss << "TimeMachine: gc(" << mTimeMachine.getGarbageCollectionCount() << ")\n";
             --ll;
         }
         if (ll > 0) {
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index f819f1b..f033d5c 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -102,6 +102,7 @@
         "libutils",
     ],
     header_libs: [
+        "libaudioutils_headers",
         "libmediametrics_headers",
     ],
 
@@ -120,6 +121,7 @@
         "AudioAnalytics.cpp",
         "AudioPowerUsage.cpp",
         "AudioTypes.cpp",
+        "cleaner.cpp",
         "iface_statsd.cpp",
         "MediaMetricsService.cpp",
         "statsd_audiopolicy.cpp",
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 800f099..29801a4 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -31,30 +31,133 @@
 
 #define PROP_AUDIO_ANALYTICS_CLOUD_ENABLED "persist.audio.analytics.cloud.enabled"
 
-// Enable for testing of delivery to statsd
-//#define STATSD
+namespace android::mediametrics {
 
-// Transmit to statsd in integer or strings
-//#define USE_INT
+// Enable for testing of delivery to statsd. Caution if this is enabled, all protos MUST exist.
+#define STATSD_ENABLE
 
-#ifdef USE_INT
-using short_enum_type_t = int32_t;
-using long_enum_type_t = int64_t;
-#define ENUM_EXTRACT(x) (x)
+#ifdef STATSD_ENABLE
+#define CONDITION(INT_VALUE) (INT_VALUE)  // allow value
 #else
-using short_enum_type_t = std::string;
-using long_enum_type_t = std::string;
-#define ENUM_EXTRACT(x) (x).c_str()
+#define CONDITION(INT_VALUE) (int(0))     // mask value since the proto may not be defined yet.
 #endif
 
-using android::base::DEBUG;
+// Maximum length of a device name.
+static constexpr size_t STATSD_DEVICE_NAME_MAX_LENGTH = 32;
 
-namespace android::mediametrics {
+// Transmit Enums to statsd in integer or strings  (this must match the atoms.proto)
+static constexpr bool STATSD_USE_INT_FOR_ENUM = false;
+
+// derive types based on integer or strings.
+using short_enum_type_t = std::conditional_t<STATSD_USE_INT_FOR_ENUM, int32_t, std::string>;
+using long_enum_type_t = std::conditional_t<STATSD_USE_INT_FOR_ENUM, int64_t, std::string>;
+
+// Convert std::string to char *
+template <typename T>
+auto ENUM_EXTRACT(const T& x) {
+    if constexpr (std::is_same_v<std::decay_t<T>, std::string>) {
+        return x.c_str();
+    } else {
+        return x;
+    }
+}
+
+static constexpr const auto LOG_LEVEL = android::base::VERBOSE;
+
+static constexpr int PREVIOUS_STATE_EXPIRE_SEC = 60 * 60; // 1 hour.
+
+/*
+ * For logging purposes, we list all of the MediaMetrics atom fields,
+ * which can then be associated with consecutive arguments to the statsd write.
+ */
+
+static constexpr const char * const AudioRecordDeviceUsageFields[] = {
+    "mediametrics_audiorecorddeviceusage_reported", // proto number
+    "devices",
+    "device_names",
+    "device_time_nanos",
+    "encoding",
+    "frame_count",
+    "interval_count",
+    "sample_rate",
+    "flags",
+    "package_name",
+    "selected_device_id",
+    "caller",
+    "source",
+};
+
+static constexpr const char * const AudioThreadDeviceUsageFields[] = {
+    "mediametrics_audiothreaddeviceusage_reported",
+    "devices",
+    "device_names",
+    "device_time_nanos",
+    "encoding",
+    "frame_count",
+    "interval_count",
+    "sample_rate",
+    "flags",
+    "xruns",
+    "type",
+};
+
+static constexpr const char * const AudioTrackDeviceUsageFields[] = {
+    "mediametrics_audiotrackdeviceusage_reported",
+    "devices",
+    "device_names",
+    "device_time_nanos",
+    "encoding",
+    "frame_count",
+    "interval_count",
+    "sample_rate",
+    "flags",
+    "xruns",
+    "package_name",
+    "device_latency_millis",
+    "device_startup_millis",
+    "device_volume",
+    "selected_device_id",
+    "stream_type",
+    "usage",
+    "content_type",
+    "caller",
+    "traits",
+};
+
+static constexpr const char * const AudioDeviceConnectionFields[] = {
+    "mediametrics_audiodeviceconnection_reported",
+    "input_devices",
+    "output_devices",
+    "device_names",
+    "result",
+    "time_to_connect_millis",
+    "connection_count",
+};
+
+/**
+ * sendToStatsd is a helper method that sends the arguments to statsd
+ * and returns a pair { result, summary_string }.
+ */
+template <size_t N, typename ...Types>
+std::pair<int, std::string> sendToStatsd(const char * const (& fields)[N], Types ... args)
+{
+    int result = 0;
+    std::stringstream ss;
+
+#ifdef STATSD_ENABLE
+    result = android::util::stats_write(args...);
+    ss << "result:" << result;
+#endif
+    ss << " { ";
+    stringutils::fieldPrint(ss, fields, args...);
+    ss << "}";
+    return { result, ss.str() };
+}
 
 AudioAnalytics::AudioAnalytics()
     : mDeliverStatistics(property_get_bool(PROP_AUDIO_ANALYTICS_CLOUD_ENABLED, true))
 {
-    SetMinimumLogSeverity(DEBUG); // for LOG().
+    SetMinimumLogSeverity(android::base::DEBUG); // for LOG().
     ALOGD("%s", __func__);
 
     // Add action to save AnalyticsState if audioserver is restarted.
@@ -72,6 +175,19 @@
                 // to end of full expression.
                 mAnalyticsState->clear();  // TODO: filter the analytics state.
                 // Perhaps report this.
+
+                // Set up a timer to expire the previous audio state to save space.
+                // Use the transaction log size as a cookie to see if it is the
+                // same as before.  A benign race is possible where a state is cleared early.
+                const size_t size = mPreviousAnalyticsState->transactionLog().size();
+                mTimedAction.postIn(
+                        std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
+                    if (mPreviousAnalyticsState->transactionLog().size() == size) {
+                        ALOGD("expiring previous audio state after %d seconds.",
+                                PREVIOUS_STATE_EXPIRE_SEC);
+                        mPreviousAnalyticsState->clear();  // removes data from the state.
+                    }
+                });
             }));
 
     // Handle device use record statistics
@@ -220,11 +336,26 @@
         ll -= l;
     }
 
+    if (ll > 0) {
+        // Print the statsd atoms we sent out.
+        const std::string statsd = mStatsdLog.dumpToString("  " /* prefix */, ll - 1);
+        const size_t n = std::count(statsd.begin(), statsd.end(), '\n') + 1; // we control this.
+        if ((size_t)ll >= n) {
+            if (n == 1) {
+                ss << "Statsd atoms: empty or truncated\n";
+            } else {
+                ss << "Statsd atoms:\n" << statsd;
+            }
+            ll -= n;
+        }
+    }
+
     if (ll > 0 && prefix == nullptr) {
         auto [s, l] = mAudioPowerUsage.dump(ll);
         ss << s;
         ll -= l;
     }
+
     return { ss.str(), lines - ll };
 }
 
@@ -312,20 +443,25 @@
 
     // Get connected device name if from bluetooth.
     bool isBluetooth = false;
-    std::string deviceNames; // we only have one device name at this time.
+
+    std::string inputDeviceNames;  // not filled currently.
+    std::string outputDeviceNames;
     if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
         isBluetooth = true;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
-            "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &deviceNames);
+            "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &outputDeviceNames);
         // Remove | if present
-        stringutils::replace(deviceNames, "|", '?');
+        stringutils::replace(outputDeviceNames, "|", '?');
+        if (outputDeviceNames.size() > STATSD_DEVICE_NAME_MAX_LENGTH) {
+            outputDeviceNames.resize(STATSD_DEVICE_NAME_MAX_LENGTH); // truncate
+        }
     }
 
     switch (itemType) {
     case RECORD: {
         std::string callerName;
-        mAudioAnalytics.mAnalyticsState->timeMachine().get(
-                key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName);
+        const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
 
         std::string packageName;
         int64_t versionCode = 0;
@@ -350,10 +486,10 @@
         const auto flagsForStats = types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags);
         const auto sourceForStats = types::lookup<types::SOURCE_TYPE, short_enum_type_t>(source);
 
-        LOG(DEBUG) << "key:" << key
+        LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
               << " inputDevices:" << inputDevices << "(" << inputDeviceBits
-              << ") deviceNames:" << deviceNames
+              << ") inputDeviceNames:" << inputDeviceNames
               << " deviceTimeNs:" << deviceTimeNs
               << " encoding:" << encoding << "(" << encodingForStats
               << ") frameCount:" << frameCount
@@ -364,12 +500,12 @@
               << " selectedDeviceId:" << selectedDeviceId
               << " callerName:" << callerName << "(" << callerNameForStats
               << ") source:" << source << "(" << sourceForStats << ")";
-#ifdef STATSD
-        if (mAudioAnalytics.mDeliverStatistics) {
-            (void)android::util::stats_write(
-                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED
+        if (clientCalled  // only log if client app called AudioRecord.
+                && mAudioAnalytics.mDeliverStatistics) {
+            const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
+                    CONDITION(android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
-                    , deviceNames.c_str()
+                    , inputDeviceNames.c_str()
                     , deviceTimeNs
                     , ENUM_EXTRACT(encodingForStats)
                     , frameCount
@@ -382,8 +518,9 @@
                     , ENUM_EXTRACT(callerNameForStats)
                     , ENUM_EXTRACT(sourceForStats)
                     );
+            ALOGV("%s: statsd %s", __func__, str.c_str());
+            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
         }
-#endif
     } break;
     case THREAD: {
         std::string type;
@@ -400,11 +537,12 @@
                         : types::lookup<types::OUTPUT_FLAG, short_enum_type_t>(flags));
         const auto typeForStats = types::lookup<types::THREAD_TYPE, short_enum_type_t>(type);
 
-        LOG(DEBUG) << "key:" << key
+        LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
               << " inputDevices:" << inputDevices << "(" << inputDeviceBits
               << ") outputDevices:" << outputDevices << "(" << outputDeviceBits
-              << ") deviceNames:" << deviceNames
+              << ") inputDeviceNames:" << inputDeviceNames
+              << " outputDeviceNames:" << outputDeviceNames
               << " deviceTimeNs:" << deviceTimeNs
               << " encoding:" << encoding << "(" << encodingForStats
               << ") frameCount:" << frameCount
@@ -414,13 +552,11 @@
               << " flags:" << flags << "(" << flagsForStats
               << ") type:" << type << "(" << typeForStats
               << ")";
-#ifdef STATSD
         if (mAudioAnalytics.mDeliverStatistics) {
-            (void)android::util::stats_write(
-                android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED
-                , ENUM_EXTRACT(inputDeviceBits)
-                , ENUM_EXTRACT(outputDeviceBits)
-                , deviceNames.c_str()
+            const auto [ result, str ] = sendToStatsd(AudioThreadDeviceUsageFields,
+                CONDITION(android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
+                , isInput ? ENUM_EXTRACT(inputDeviceBits) : ENUM_EXTRACT(outputDeviceBits)
+                , isInput ? inputDeviceNames.c_str() : outputDeviceNames.c_str()
                 , deviceTimeNs
                 , ENUM_EXTRACT(encodingForStats)
                 , frameCount
@@ -430,13 +566,15 @@
                 , underrun
                 , ENUM_EXTRACT(typeForStats)
             );
+            ALOGV("%s: statsd %s", __func__, str.c_str());
+            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
         }
-#endif
     } break;
     case TRACK: {
         std::string callerName;
-        mAudioAnalytics.mAnalyticsState->timeMachine().get(
-                key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName);
+        const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
+
         std::string contentType;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_CONTENTTYPE, &contentType);
@@ -470,6 +608,9 @@
         std::string streamType;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_STREAMTYPE, &streamType);
+        std::string traits;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_TRAITS, &traits);
         int32_t underrun = 0;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_UNDERRUN, &underrun);
@@ -485,12 +626,14 @@
         const auto flagsForStats = types::lookup<types::OUTPUT_FLAG, short_enum_type_t>(flags);
         const auto streamTypeForStats =
                 types::lookup<types::STREAM_TYPE, short_enum_type_t>(streamType);
+        const auto traitsForStats =
+                 types::lookup<types::TRACK_TRAITS, short_enum_type_t>(traits);
         const auto usageForStats = types::lookup<types::USAGE, short_enum_type_t>(usage);
 
-        LOG(DEBUG) << "key:" << key
+        LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
               << " outputDevices:" << outputDevices << "(" << outputDeviceBits
-              << ") deviceNames:" << deviceNames
+              << ") outputDeviceNames:" << outputDeviceNames
               << " deviceTimeNs:" << deviceTimeNs
               << " encoding:" << encoding << "(" << encodingForStats
               << ") frameCount:" << frameCount
@@ -508,14 +651,15 @@
               << " playbackSpeed:" << playbackSpeed
               << " selectedDeviceId:" << selectedDeviceId
               << " streamType:" << streamType << "(" << streamTypeForStats
+              << ") traits:" << traits << "(" << traitsForStats
               << ") usage:" << usage << "(" << usageForStats
               << ")";
-#ifdef STATSD
-        if (mAudioAnalytics.mDeliverStatistics) {
-            (void)android::util::stats_write(
-                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED
+        if (clientCalled // only log if client app called AudioTracks
+                && mAudioAnalytics.mDeliverStatistics) {
+            const auto [ result, str ] = sendToStatsd(AudioTrackDeviceUsageFields,
+                    CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
                     , ENUM_EXTRACT(outputDeviceBits)
-                    , deviceNames.c_str()
+                    , outputDeviceNames.c_str()
                     , deviceTimeNs
                     , ENUM_EXTRACT(encodingForStats)
                     , frameCount
@@ -523,7 +667,6 @@
                     , sampleRate
                     , ENUM_EXTRACT(flagsForStats)
                     , underrun
-
                     , packageName.c_str()
                     , (float)deviceLatencyMs
                     , (float)deviceStartupMs
@@ -533,9 +676,11 @@
                     , ENUM_EXTRACT(usageForStats)
                     , ENUM_EXTRACT(contentTypeForStats)
                     , ENUM_EXTRACT(callerNameForStats)
+                    , ENUM_EXTRACT(traitsForStats)
                     );
+            ALOGV("%s: statsd %s", __func__, str.c_str());
+            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
         }
-#endif
         } break;
     }
 
@@ -592,23 +737,26 @@
         const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
                 "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
 
-        LOG(DEBUG) << "key:" << key
+        LOG(LOG_LEVEL) << "key:" << key
                 << " A2DP SUCCESS"
                 << " outputDevices:" << outputDeviceBits
+                << " deviceName:" << mA2dpDeviceName
                 << " connectionTimeMs:" <<  connectionTimeMs;
-#ifdef STATSD
         if (mAudioAnalytics.mDeliverStatistics) {
             const long_enum_type_t inputDeviceBits{};
-            (void)android::util::stats_write(
-                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED
+
+            const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
+                    CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
                     , ENUM_EXTRACT(outputDeviceBits)
+                    , mA2dpDeviceName.c_str()
                     , types::DEVICE_CONNECTION_RESULT_SUCCESS
                     , connectionTimeMs
                     , /* connection_count */ 1
                     );
+            ALOGV("%s: statsd %s", __func__, str.c_str());
+            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
         }
-#endif
     }
 }
 
@@ -620,13 +768,17 @@
     std::string state;
     item->get(AMEDIAMETRICS_PROP_STATE, &state);
     if (state != "connected") return;
+
+    std::string name;
+    item->get(AMEDIAMETRICS_PROP_NAME, &name);
     {
         std::lock_guard l(mLock);
         mA2dpConnectionRequestNs = atNs;
         ++mA2dpConnectionRequests;
+        mA2dpDeviceName = name;
     }
-    ALOGD("(key=%s) a2dp connection request atNs:%lld",
-            key.c_str(), (long long)atNs);
+    ALOGD("(key=%s) a2dp connection name:%s request atNs:%lld",
+            key.c_str(), name.c_str(), (long long)atNs);
     // TODO: attempt to cancel a timed event, rather than let it expire.
     mAudioAnalytics.mTimedAction.postIn(std::chrono::seconds(5), [this](){ expire(); });
 }
@@ -635,29 +787,29 @@
     std::lock_guard l(mLock);
     if (mA2dpConnectionRequestNs == 0) return; // ignore (this was an internal connection).
 
-#ifdef STATSD
     const long_enum_type_t inputDeviceBits{};
-#endif
     const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
             "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
 
     if (mA2dpConnectionServiceNs == 0) {
         ++mA2dpConnectionJavaServiceCancels;  // service did not connect to A2DP
 
-        LOG(DEBUG) << "A2DP CANCEL"
-                << " outputDevices:" << outputDeviceBits;
-#ifdef STATSD
+        LOG(LOG_LEVEL) << "A2DP CANCEL"
+                << " outputDevices:" << outputDeviceBits
+                << " deviceName:" << mA2dpDeviceName;
         if (mAudioAnalytics.mDeliverStatistics) {
-            (void)android::util::stats_write(
-                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED
+            const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
+                    CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
                     , ENUM_EXTRACT(outputDeviceBits)
+                    , mA2dpDeviceName.c_str()
                     , types::DEVICE_CONNECTION_RESULT_JAVA_SERVICE_CANCEL
                     , /* connection_time_ms */ 0.f
                     , /* connection_count */ 1
                     );
+            ALOGV("%s: statsd %s", __func__, str.c_str());
+            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
         }
-#endif
         return;
     }
 
@@ -668,20 +820,22 @@
     mA2dpConnectionServiceNs = 0;
     ++mA2dpConnectionUnknowns;  // connection result unknown
 
-    LOG(DEBUG) << "A2DP UNKNOWN"
-            << " outputDevices:" << outputDeviceBits;
-#ifdef STATSD
+    LOG(LOG_LEVEL) << "A2DP UNKNOWN"
+            << " outputDevices:" << outputDeviceBits
+            << " deviceName:" << mA2dpDeviceName;
     if (mAudioAnalytics.mDeliverStatistics) {
-        (void)android::util::stats_write(
-                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED
+        const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
+                CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                 , ENUM_EXTRACT(inputDeviceBits)
                 , ENUM_EXTRACT(outputDeviceBits)
+                , mA2dpDeviceName.c_str()
                 , types::DEVICE_CONNECTION_RESULT_UNKNOWN
                 , /* connection_time_ms */ 0.f
                 , /* connection_count */ 1
                 );
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
     }
-#endif
 }
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index 138ddcc..df097b1 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
 #include "AnalyticsActions.h"
 #include "AnalyticsState.h"
 #include "AudioPowerUsage.h"
@@ -116,12 +117,14 @@
 
     // AnalyticsState is individually locked, and we use SharedPtrWrap
     // to allow safe access even if the shared pointer changes underneath.
-
+    // These wrap pointers always point to a valid state object.
     SharedPtrWrap<AnalyticsState> mAnalyticsState;
     SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
 
     TimedAction mTimedAction; // locked internally
 
+    SimpleLog mStatsdLog{16 /* log lines */}; // locked internally
+
     // DeviceUse is a nested class which handles audio device usage accounting.
     // We define this class at the end to ensure prior variables all properly constructed.
     // TODO: Track / Thread interaction
@@ -173,6 +176,7 @@
         AudioAnalytics &mAudioAnalytics;
 
         mutable std::mutex mLock;
+        std::string mA2dpDeviceName;
         int64_t mA2dpConnectionRequestNs GUARDED_BY(mLock) = 0;  // Time for BT service request.
         int64_t mA2dpConnectionServiceNs GUARDED_BY(mLock) = 0;  // Time audio service agrees.
 
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index c441110..cca6b41 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -34,7 +34,7 @@
 #define PROP_AUDIO_METRICS_DISABLED "persist.media.audio_metrics.power_usage_disabled"
 #define AUDIO_METRICS_DISABLED_DEFAULT (false)
 
-// property to set how long to send audio power use metrics data to westworld, default is 24hrs
+// property to set how long to send audio power use metrics data to statsd, default is 24hrs
 #define PROP_AUDIO_METRICS_INTERVAL_HR "persist.media.audio_metrics.interval_hr"
 #define INTERVAL_HR_DEFAULT (24)
 
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 2a2dbaf..aa44447 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -20,7 +20,30 @@
 
 namespace android::mediametrics::types {
 
-std::unordered_map<std::string, int64_t>& getAudioDeviceInMap() {
+const std::unordered_map<std::string, int32_t>& getAudioCallerNameMap() {
+    // DO NOT MODIFY VALUES (OK to add new ones).
+    // This may be found in frameworks/av/media/libmediametrics/include/MediaMetricsConstants.h
+    static std::unordered_map<std::string, int32_t> map{
+        {"unknown",       0},           // callerName not set
+        {"aaudio",        1},           // Native AAudio
+        {"java",          2},           // Java API layer
+        {"media",         3},           // libmedia (mediaplayer)
+        {"opensles",      4},           // Open SLES
+        {"rtp",           5},           // RTP communication
+        {"soundpool",     6},           // SoundPool
+        {"tonegenerator", 7},           // dial tones
+        // R values above.
+    };
+    return map;
+}
+
+// A map in case we need to return a flag for input devices.
+// This is 64 bits (and hence not the same as audio_device_t) because we need extra
+// bits to represent new devices.
+// NOT USED FOR R.  We do not use int64 flags.
+// This can be out of date for now, as it is unused even for string validation
+// (instead TypeConverter<InputDeviceTraits> is used).
+const std::unordered_map<std::string, int64_t>& getAudioDeviceInMap() {
     // DO NOT MODIFY VALUES (OK to add new ones).  This does NOT match audio_device_t.
     static std::unordered_map<std::string, int64_t> map{
         {"AUDIO_DEVICE_IN_COMMUNICATION",          1LL << 0},
@@ -57,7 +80,13 @@
     return map;
 }
 
-std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap() {
+// A map in case we need to return a flag for output devices.
+// This is 64 bits (and hence not the same as audio_device_t) because we need extra
+// bits to represent new devices.
+// NOT USED FOR R.  We do not use int64 flags.
+// This can be out of date for now, as it is unused even for string validation
+// (instead TypeConverter<OutputDeviceTraits> is used).
+const std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap() {
     // DO NOT MODIFY VALUES (OK to add new ones).  This does NOT match audio_device_t.
     static std::unordered_map<std::string, int64_t> map{
         {"AUDIO_DEVICE_OUT_EARPIECE",                  1LL << 0},
@@ -96,24 +125,7 @@
     return map;
 }
 
-std::unordered_map<std::string, int32_t>& getCallerNameMap() {
-    // DO NOT MODIFY VALUES (OK to add new ones).
-    // This may be found in frameworks/av/media/libmediametrics/include/MediaMetricsConstants.h
-    static std::unordered_map<std::string, int32_t> map{
-        {"aaudio",        0},           // Native AAudio
-        {"java",          1},           // Java API layer
-        {"media",         2},           // libmedia (mediaplayer)
-        {"opensles",      3},           // Open SLES
-        {"rtp",           4},           // RTP communication
-        {"soundpool",     5},           // SoundPool
-        {"tonegenerator", 6},           // dial tones
-        {"unknown",       7},           // callerName not set
-        // R values above.
-    };
-    return map;
-}
-
-std::unordered_map<std::string, int32_t>& getThreadTypeMap() {
+const std::unordered_map<std::string, int32_t>& getAudioThreadTypeMap() {
     // DO NOT MODIFY VALUES (OK to add new ones).
     // This may be found in frameworks/av/services/audioflinger/Threads.h
     static std::unordered_map<std::string, int32_t> map{
@@ -130,6 +142,15 @@
     return map;
 }
 
+const std::unordered_map<std::string, int32_t>& getAudioTrackTraitsMap() {
+    // DO NOT MODIFY VALUES (OK to add new ones).
+    static std::unordered_map<std::string, int32_t> map{
+        {"static",        (1 << 0)},  // A static track
+        // R values above.
+    };
+    return map;
+}
+
 // Helper: Create the corresponding int32 from string flags split with '|'.
 template <typename Traits>
 int32_t int32FromFlags(const std::string &flags)
@@ -163,6 +184,37 @@
     return sFlags;
 }
 
+template <typename M>
+std::string validateStringFromMap(const std::string &str, const M& map)
+{
+    if (str.empty()) return {};
+
+    const auto result = stringutils::split(str, "|");
+    std::stringstream ss;
+    for (const auto &s : result) {
+        if (map.count(s) > 0) {
+            if (ss.tellp() > 0) ss << "|";
+            ss << s;
+        }
+    }
+    return ss.str();
+}
+
+template <typename M>
+typename M::mapped_type flagsFromMap(const std::string &str, const M& map)
+{
+    if (str.empty()) return {};
+
+    const auto result = stringutils::split(str, "|");
+    typename M::mapped_type value{};
+    for (const auto &s : result) {
+        auto it = map.find(s);
+        if (it == map.end()) continue;
+        value |= it->second;
+    }
+    return value;
+}
+
 template <>
 int32_t lookup<CONTENT_TYPE>(const std::string &contentType)
 {
@@ -178,7 +230,7 @@
 {
     AudioContentTraits::Type value;
     if (!TypeConverter<AudioContentTraits>::fromString(contentType, value)) {
-        return "UNKNOWN";
+        return "";
     }
     return contentType.c_str() + sizeof("AUDIO_CONTENT_TYPE");
 }
@@ -198,7 +250,7 @@
 {
     FormatTraits::Type value;
     if (!TypeConverter<FormatTraits>::fromString(encoding, value)) {
-        return "INVALID";
+        return "";
     }
     return encoding.c_str() + sizeof("AUDIO_FORMAT");
 }
@@ -242,7 +294,7 @@
 {
     SourceTraits::Type value;
     if (!TypeConverter<SourceTraits>::fromString(sourceType, value)) {
-        return "DEFAULT";
+        return "";
     }
     return sourceType.c_str() + sizeof("AUDIO_SOURCE");
 }
@@ -262,7 +314,7 @@
 {
     StreamTraits::Type value;
     if (!TypeConverter<StreamTraits>::fromString(streamType, value)) {
-        return "DEFAULT";
+        return "";
     }
     return streamType.c_str() + sizeof("AUDIO_STREAM");
 }
@@ -282,7 +334,7 @@
 {
     UsageTraits::Type value;
     if (!TypeConverter<UsageTraits>::fromString(usage, value)) {
-        return "UNKNOWN";
+        return "";
     }
     return usage.c_str() + sizeof("AUDIO_USAGE");
 }
@@ -290,54 +342,40 @@
 template <>
 int64_t lookup<INPUT_DEVICE>(const std::string &inputDevice)
 {
-    auto& map = getAudioDeviceInMap();
-    auto it = map.find(inputDevice);
-    if (it == map.end()) {
-        return 0;
-    }
-    return it->second;
+    // NOT USED FOR R.
+    // Returns a set of bits, each one representing a device in inputDevice.
+    // This is a 64 bit integer, not the same as audio_device_t.
+    return flagsFromMap(inputDevice, getAudioDeviceInMap());
 }
 
 template <>
 std::string lookup<INPUT_DEVICE>(const std::string &inputDevice)
 {
-    auto& map = getAudioDeviceInMap();
-    auto it = map.find(inputDevice);
-    if (it == map.end()) {
-        return "NONE";
-    }
-    return inputDevice.c_str() + sizeof("AUDIO_DEVICE_IN");
+    return stringFromFlags<InputDeviceTraits>(inputDevice, sizeof("AUDIO_DEVICE_IN"));
 }
 
 template <>
 int64_t lookup<OUTPUT_DEVICE>(const std::string &outputDevice)
 {
-    auto& map = getAudioDeviceOutMap();
-    auto it = map.find(outputDevice);
-    if (it == map.end()) {
-        return 0; // nothing
-    }
-    return it->second;
+    // NOT USED FOR R.
+    // Returns a set of bits, each one representing a device in outputDevice.
+    // This is a 64 bit integer, not the same as audio_device_t.
+    return flagsFromMap(outputDevice, getAudioDeviceOutMap());
 }
 
 template <>
 std::string lookup<OUTPUT_DEVICE>(const std::string &outputDevice)
 {
-    auto& map = getAudioDeviceOutMap();
-    auto it = map.find(outputDevice);
-    if (it == map.end()) {
-        return "NONE";
-    }
-    return outputDevice.c_str() + sizeof("AUDIO_DEVICE_OUT");
+    return stringFromFlags<OutputDeviceTraits>(outputDevice, sizeof("AUDIO_DEVICE_OUT"));
 }
 
 template <>
 int32_t lookup<CALLER_NAME>(const std::string &callerName)
 {
-    auto& map = getCallerNameMap();
+    auto& map = getAudioCallerNameMap();
     auto it = map.find(callerName);
     if (it == map.end()) {
-        return 7;      // return unknown
+        return 0;      // return unknown
     }
     return it->second;
 }
@@ -345,10 +383,10 @@
 template <>
 std::string lookup<CALLER_NAME>(const std::string &callerName)
 {
-    auto& map = getCallerNameMap();
+    auto& map = getAudioCallerNameMap();
     auto it = map.find(callerName);
     if (it == map.end()) {
-        return "unknown";
+        return "";
     }
     return callerName;
 }
@@ -356,7 +394,7 @@
 template <>
 int32_t lookup<THREAD_TYPE>(const std::string &threadType)
 {
-    auto& map = getThreadTypeMap();
+    auto& map = getAudioThreadTypeMap();
     auto it = map.find(threadType);
     if (it == map.end()) {
         return -1; // note this as an illegal thread value as we don't have unknown here.
@@ -367,10 +405,10 @@
 template <>
 std::string lookup<THREAD_TYPE>(const std::string &threadType)
 {
-    auto& map = getThreadTypeMap();
+    auto& map = getAudioThreadTypeMap();
     auto it = map.find(threadType);
     if (it == map.end()) {
-        return "UNKNOWN";
+        return "";
     }
     return threadType;
 }
@@ -380,4 +418,16 @@
     return threadType == "RECORD" || threadType == "MMAP_CAPTURE";
 }
 
+template <>
+std::string lookup<TRACK_TRAITS>(const std::string &traits)
+{
+    return validateStringFromMap(traits, getAudioTrackTraitsMap());
+}
+
+template <>
+int32_t lookup<TRACK_TRAITS>(const std::string &traits)
+{
+    return flagsFromMap(traits, getAudioTrackTraitsMap());
+}
+
 } // namespace android::mediametrics::types
diff --git a/services/mediametrics/AudioTypes.h b/services/mediametrics/AudioTypes.h
index a094e6e..e1deeb1 100644
--- a/services/mediametrics/AudioTypes.h
+++ b/services/mediametrics/AudioTypes.h
@@ -21,17 +21,19 @@
 
 namespace android::mediametrics::types {
 
-// Helper methods that map mediametrics logged strings to
-// integer codes.
-std::unordered_map<std::string, int64_t>& getAudioDeviceInMap();
-std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap();
-std::unordered_map<std::string, int32_t>& getCallerNameMap();
-std::unordered_map<std::string, int32_t>& getThreadTypeMap();
+// Helper methods that map mediametrics logged strings to integer codes.
+// In R we do not use the integer codes, but rather we can use these maps
+// to validate correct strings.
+const std::unordered_map<std::string, int32_t>& getAudioCallerNameMap();
+const std::unordered_map<std::string, int64_t>& getAudioDeviceInMap();
+const std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap();
+const std::unordered_map<std::string, int32_t>& getAudioThreadTypeMap();
+const std::unordered_map<std::string, int32_t>& getAudioTrackTraitsMap();
 
 // Enumeration for the device connection results.
 enum DeviceConnectionResult : int32_t {
-    DEVICE_CONNECTION_RESULT_UNKNOWN = 0,              // Success is unknown.
-    DEVICE_CONNECTION_RESULT_SUCCESS = 1,              // Audio delivered
+    DEVICE_CONNECTION_RESULT_SUCCESS = 0,              // Audio delivered
+    DEVICE_CONNECTION_RESULT_UNKNOWN = 1,              // Success is unknown.
     DEVICE_CONNECTION_RESULT_JAVA_SERVICE_CANCEL = 2,  // Canceled in Java service
     // Do not modify the constants above after R.  Adding new constants is fine.
 };
@@ -48,6 +50,7 @@
     SOURCE_TYPE,
     STREAM_TYPE,
     THREAD_TYPE,
+    TRACK_TRAITS,
     USAGE,
 };
 
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index d682fed..48e766e 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -185,7 +185,7 @@
     }
 
     if (!isTrusted || item->getTimestamp() == 0) {
-        // Westworld logs two times for events: ElapsedRealTimeNs (BOOTTIME) and
+        // Statsd logs two times for events: ElapsedRealTimeNs (BOOTTIME) and
         // WallClockTimeNs (REALTIME), but currently logs REALTIME to cloud.
         //
         // For consistency and correlation with other logging mechanisms
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index d152264..792b7f0 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -65,7 +65,7 @@
     static nsecs_t roundTime(nsecs_t timeNs);
 
     /**
-     * Returns true if we should use uid for package name when uploading to WestWorld.
+     * Returns true if we should use uid for package name when uploading to statsd.
      */
     static bool useUidForPackage(const std::string& package, const std::string& installer);
 
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/StringUtils.h
index d878720..7a8bbee 100644
--- a/services/mediametrics/StringUtils.h
+++ b/services/mediametrics/StringUtils.h
@@ -22,6 +22,30 @@
 namespace android::mediametrics::stringutils {
 
 /**
+ * fieldPrint is a helper method that logs to a stringstream a sequence of
+ * field names (in a fixed size array) together with a variable number of arg parameters.
+ *
+ * stringstream << field[0] << ":" << arg0 << " ";
+ * stringstream << field[1] << ":" << arg1 << " ";
+ * ...
+ * stringstream << field[N-1] << ":" << arg{N-1} << " ";
+ *
+ * The number of fields must exactly match the (variable) arguments.
+ *
+ * Example:
+ *
+ * const char * const fields[] = { "integer" };
+ * std::stringstream ss;
+ * fieldPrint(ss, fields, int(10));
+ */
+template <size_t N, typename... Targs>
+void fieldPrint(std::stringstream& ss, const char * const (& fields)[N], Targs... args) {
+    static_assert(N == sizeof...(args));          // guarantee #fields == #args
+    auto fptr = fields;                           // get a pointer to the base of fields array
+    ((ss << *fptr++ << ":" << args << " "), ...); // (fold expression), send to stringstream.
+}
+
+/**
  * Return string tokens from iterator, separated by spaces and reserved chars.
  */
 std::string tokenizer(std::string::const_iterator& it,
diff --git a/services/mediametrics/TimeMachine.h b/services/mediametrics/TimeMachine.h
index 00a44a4..ce579b3 100644
--- a/services/mediametrics/TimeMachine.h
+++ b/services/mediametrics/TimeMachine.h
@@ -220,10 +220,10 @@
 
     using History = std::map<std::string /* key */, std::shared_ptr<KeyHistory>>;
 
-    static inline constexpr size_t kTimeSequenceMaxElements = 100;
-    static inline constexpr size_t kKeyMaxProperties = 100;
-    static inline constexpr size_t kKeyLowWaterMark = 500;
-    static inline constexpr size_t kKeyHighWaterMark = 1000;
+    static inline constexpr size_t kTimeSequenceMaxElements = 50;
+    static inline constexpr size_t kKeyMaxProperties = 50;
+    static inline constexpr size_t kKeyLowWaterMark = 400;
+    static inline constexpr size_t kKeyHighWaterMark = 500;
 
     // Estimated max data space usage is 3KB * kKeyHighWaterMark.
 
@@ -255,6 +255,7 @@
         {
             std::lock_guard lock2(other.mLock);
             mHistory = other.mHistory;
+            mGarbageCollectionCount = other.mGarbageCollectionCount.load();
         }
 
         // Now that we safely have our own shared pointers, let's dup them
@@ -420,6 +421,7 @@
     void clear() {
         std::lock_guard lock(mLock);
         mHistory.clear();
+        mGarbageCollectionCount = 0;
     }
 
     /**
@@ -453,6 +455,10 @@
         return { ss.str(), lines - ll };
     }
 
+    size_t getGarbageCollectionCount() const {
+        return mGarbageCollectionCount;
+    }
+
 private:
 
     // Obtains the lock for a KeyHistory.
@@ -496,8 +502,6 @@
         // TODO: something better than this for garbage collection.
         if (mHistory.size() < mKeyHighWaterMark) return false;
 
-        ALOGD("%s: garbage collection", __func__);
-
         // erase everything explicitly expired.
         std::multimap<int64_t, std::string> accessList;
         // use a stale vector with precise type to avoid type erasure overhead in garbage
@@ -534,12 +538,16 @@
         ALOGD("%s(%zu, %zu): key size:%zu",
                 __func__, mKeyLowWaterMark, mKeyHighWaterMark,
                 mHistory.size());
+
+        ++mGarbageCollectionCount;
         return true;
     }
 
     const size_t mKeyLowWaterMark = kKeyLowWaterMark;
     const size_t mKeyHighWaterMark = kKeyHighWaterMark;
 
+    std::atomic<size_t> mGarbageCollectionCount{};
+
     /**
      * Locking Strategy
      *
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/TransactionLog.h
index 8a22826..0ca4639 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/TransactionLog.h
@@ -43,9 +43,9 @@
     // Transaction Log between the Low Water Mark and the High Water Mark.
 
     // low water mark
-    static inline constexpr size_t kLogItemsLowWater = 5000;
+    static inline constexpr size_t kLogItemsLowWater = 1700;
     // high water mark
-    static inline constexpr size_t kLogItemsHighWater = 10000;
+    static inline constexpr size_t kLogItemsHighWater = 2000;
 
     // Estimated max data usage is 1KB * kLogItemsHighWater.
 
@@ -79,6 +79,7 @@
         std::lock_guard lock2(other.mLock);
         mLog = other.mLog;
         mItemMap = other.mItemMap;
+        mGarbageCollectionCount = other.mGarbageCollectionCount.load();
 
         return *this;
     }
@@ -181,6 +182,11 @@
         std::lock_guard lock(mLock);
         mLog.clear();
         mItemMap.clear();
+        mGarbageCollectionCount = 0;
+    }
+
+    size_t getGarbageCollectionCount() const {
+        return mGarbageCollectionCount;
     }
 
 private:
@@ -216,8 +222,6 @@
     bool gc(std::vector<std::any>& garbage) REQUIRES(mLock) {
         if (mLog.size() < mHighWaterMark) return false;
 
-        ALOGD("%s: garbage collection", __func__);
-
         auto eraseEnd = mLog.begin();
         size_t toRemove = mLog.size() - mLowWaterMark;
         // remove at least those elements.
@@ -265,6 +269,7 @@
         ALOGD("%s(%zu, %zu): log size:%zu item map size:%zu, item map items:%zu",
                 __func__, mLowWaterMark, mHighWaterMark,
                 mLog.size(), mItemMap.size(), itemMapCount);
+        ++mGarbageCollectionCount;
         return true;
     }
 
@@ -287,6 +292,8 @@
     const size_t mLowWaterMark = kLogItemsLowWater;
     const size_t mHighWaterMark = kLogItemsHighWater;
 
+    std::atomic<size_t> mGarbageCollectionCount{};
+
     mutable std::mutex mLock;
 
     MapTimeItem mLog GUARDED_BY(mLock);
diff --git a/services/mediametrics/cleaner.cpp b/services/mediametrics/cleaner.cpp
new file mode 100644
index 0000000..e746842
--- /dev/null
+++ b/services/mediametrics/cleaner.cpp
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MetricsCleaner"
+#include <utils/Log.h>
+
+#include "cleaner.h"
+
+namespace android::mediametrics {
+
+// place time into buckets at 0,1,2,4,8,16,32 seconds and then at minute boundaries.
+// time is rounded up to the next boundary.
+//
+int64_t bucket_time_minutes(int64_t in_millis) {
+
+    const int64_t SEC_TO_MS = 1000;
+    const int64_t MIN_TO_MS = (60 * SEC_TO_MS);
+
+    if (in_millis <= 0) {
+        return 0;
+    }
+    if (in_millis <= 32 * SEC_TO_MS) {
+        for (int sec = 1; sec <= 32; sec *= 2) {
+            if (in_millis <= sec * SEC_TO_MS) {
+                return sec * SEC_TO_MS;
+            }
+        }
+    }
+    /* up to next 1 minute boundary */
+    int64_t minutes = (in_millis + MIN_TO_MS - 1) / MIN_TO_MS;
+    in_millis = minutes * MIN_TO_MS;
+    return in_millis;
+}
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/cleaner.h b/services/mediametrics/cleaner.h
new file mode 100644
index 0000000..72e24f9
--- /dev/null
+++ b/services/mediametrics/cleaner.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIAMETRICS_CLEANER_H
+#define MEDIAMETRICS_CLEANER_H
+
+namespace android::mediametrics {
+
+// break time into buckets at 1,2,4,8,16,32 seconds
+// and then at minute boundaries
+//
+extern int64_t bucket_time_minutes(int64_t incomingMs);
+
+} // namespace android::mediametrics
+
+#endif  // MEDIAMETRICS_CLEANER_H
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 26eda79..ec9354f 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -31,6 +31,7 @@
 
 #include <statslog.h>
 
+#include "cleaner.h"
 #include "MediaMetricsService.h"
 #include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
 #include "iface_statsd.h"
@@ -168,11 +169,6 @@
     }
     // android.media.mediacodec.latency.hist    NOT EMITTED
 
-#if 0
-    // TODO(b/139143194)
-    // can't send them to statsd until statsd proto updates merge
-    // but in the meantime, they can appear in local 'dumpsys media.metrics' output
-    //
     // android.media.mediacodec.bitrate_mode string
     std::string bitrate_mode;
     if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
@@ -186,9 +182,9 @@
     // android.media.mediacodec.lifetimeMs int64
     int64_t lifetimeMs = -1;
     if ( item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMs)) {
+        lifetimeMs = mediametrics::bucket_time_minutes(lifetimeMs);
         metrics_proto.set_lifetime_millis(lifetimeMs);
     }
-#endif
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index bdeda30..c2e0759 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -21,6 +21,10 @@
         "libutils",
     ],
 
+    header_libs: [
+        "libaudioutils_headers",
+    ],
+
     srcs: [
         "mediametrics_tests.cpp",
     ],
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index 7da6306..478355b 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -18,12 +18,16 @@
 #include <utils/Log.h>
 
 #include "MediaMetricsService.h"
-#include "StringUtils.h"
 
 #include <stdio.h>
+#include <unordered_set>
 
 #include <gtest/gtest.h>
 #include <media/MediaMetricsItem.h>
+#include <system/audio.h>
+
+#include "AudioTypes.h"
+#include "StringUtils.h"
 
 using namespace android;
 
@@ -36,6 +40,15 @@
     return count;
 }
 
+template <typename M>
+ssize_t countDuplicates(const M& map) {
+    std::unordered_set<typename M::mapped_type> s;
+    for (const auto &m : map) {
+        s.emplace(m.second);
+    }
+    return map.size() - s.size();
+}
+
 TEST(mediametrics_tests, startsWith) {
   std::string s("test");
   ASSERT_EQ(true, android::mediametrics::startsWith(s, "te"));
@@ -804,7 +817,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -840,7 +853,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -926,6 +939,132 @@
     ASSERT_EQ((size_t)1, timedAction.size());
 }
 
+// Ensure we don't introduce unexpected duplicates into our maps.
+TEST(mediametrics_tests, audio_types_tables) {
+    using namespace android::mediametrics::types;
+
+    ASSERT_EQ(0, countDuplicates(getAudioCallerNameMap()));
+    ASSERT_EQ(2, countDuplicates(getAudioDeviceInMap()));  // has dups
+    ASSERT_EQ(1, countDuplicates(getAudioDeviceOutMap())); // has dups
+    ASSERT_EQ(0, countDuplicates(getAudioThreadTypeMap()));
+    ASSERT_EQ(0, countDuplicates(getAudioTrackTraitsMap()));
+}
+
+// Check our string validation (before logging to statsd).
+// This variant checks the logged, possibly shortened string.
+TEST(mediametrics_tests, audio_types_string) {
+    using namespace android::mediametrics::types;
+
+    ASSERT_EQ("java", (lookup<CALLER_NAME, std::string>)("java"));
+    ASSERT_EQ("", (lookup<CALLER_NAME, std::string>)("random"));
+
+    ASSERT_EQ("SPEECH", (lookup<CONTENT_TYPE, std::string>)("AUDIO_CONTENT_TYPE_SPEECH"));
+    ASSERT_EQ("", (lookup<CONTENT_TYPE, std::string>)("random"));
+
+    ASSERT_EQ("FLAC", (lookup<ENCODING, std::string>)("AUDIO_FORMAT_FLAC"));
+    ASSERT_EQ("", (lookup<ENCODING, std::string>)("random"));
+
+    ASSERT_EQ("USB_DEVICE", (lookup<INPUT_DEVICE, std::string>)("AUDIO_DEVICE_IN_USB_DEVICE"));
+    ASSERT_EQ("BUILTIN_MIC|WIRED_HEADSET", (lookup<INPUT_DEVICE, std::string>)(
+            "AUDIO_DEVICE_IN_BUILTIN_MIC|AUDIO_DEVICE_IN_WIRED_HEADSET"));
+    ASSERT_EQ("", (lookup<INPUT_DEVICE, std::string>)("random"));
+
+    ASSERT_EQ("RAW", (lookup<INPUT_FLAG, std::string>)("AUDIO_INPUT_FLAG_RAW"));
+    ASSERT_EQ("HW_AV_SYNC|VOIP_TX", (lookup<INPUT_FLAG, std::string>)(
+            "AUDIO_INPUT_FLAG_HW_AV_SYNC|AUDIO_INPUT_FLAG_VOIP_TX"));
+    ASSERT_EQ("", (lookup<INPUT_FLAG, std::string>)("random"));
+
+    ASSERT_EQ("BLUETOOTH_SCO_CARKIT",
+            (lookup<OUTPUT_DEVICE, std::string>)("AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"));
+    ASSERT_EQ("SPEAKER|HDMI", (lookup<OUTPUT_DEVICE, std::string>)(
+            "AUDIO_DEVICE_OUT_SPEAKER|AUDIO_DEVICE_OUT_HDMI"));
+    ASSERT_EQ("", (lookup<OUTPUT_DEVICE, std::string>)("random"));
+
+    ASSERT_EQ("PRIMARY", (lookup<OUTPUT_FLAG, std::string>)("AUDIO_OUTPUT_FLAG_PRIMARY"));
+    ASSERT_EQ("DEEP_BUFFER|NON_BLOCKING", (lookup<OUTPUT_FLAG, std::string>)(
+            "AUDIO_OUTPUT_FLAG_DEEP_BUFFER|AUDIO_OUTPUT_FLAG_NON_BLOCKING"));
+    ASSERT_EQ("", (lookup<OUTPUT_FLAG, std::string>)("random"));
+
+    ASSERT_EQ("MIC", (lookup<SOURCE_TYPE, std::string>)("AUDIO_SOURCE_MIC"));
+    ASSERT_EQ("", (lookup<SOURCE_TYPE, std::string>)("random"));
+
+    ASSERT_EQ("TTS", (lookup<STREAM_TYPE, std::string>)("AUDIO_STREAM_TTS"));
+    ASSERT_EQ("", (lookup<STREAM_TYPE, std::string>)("random"));
+
+    ASSERT_EQ("DIRECT", (lookup<THREAD_TYPE, std::string>)("DIRECT"));
+    ASSERT_EQ("", (lookup<THREAD_TYPE, std::string>)("random"));
+
+    ASSERT_EQ("static", (lookup<TRACK_TRAITS, std::string>)("static"));
+    ASSERT_EQ("", (lookup<TRACK_TRAITS, std::string>)("random"));
+
+    ASSERT_EQ("VOICE_COMMUNICATION",
+            (lookup<USAGE, std::string>)("AUDIO_USAGE_VOICE_COMMUNICATION"));
+    ASSERT_EQ("", (lookup<USAGE, std::string>)("random"));
+}
+
+// Check our string validation (before logging to statsd).
+// This variant checks integral value logging.
+TEST(mediametrics_tests, audio_types_integer) {
+    using namespace android::mediametrics::types;
+
+    ASSERT_EQ(2, (lookup<CALLER_NAME, int32_t>)("java"));
+    ASSERT_EQ(0, (lookup<CALLER_NAME, int32_t>)("random")); // 0 == unknown
+
+    ASSERT_EQ((int32_t)AUDIO_CONTENT_TYPE_SPEECH,
+            (lookup<CONTENT_TYPE, int32_t>)("AUDIO_CONTENT_TYPE_SPEECH"));
+    ASSERT_EQ((int32_t)AUDIO_CONTENT_TYPE_UNKNOWN, (lookup<CONTENT_TYPE, int32_t>)("random"));
+
+    ASSERT_EQ((int32_t)AUDIO_FORMAT_FLAC, (lookup<ENCODING, int32_t>)("AUDIO_FORMAT_FLAC"));
+    ASSERT_EQ((int32_t)AUDIO_FORMAT_INVALID, (lookup<ENCODING, int32_t>)("random"));
+
+    ASSERT_EQ(getAudioDeviceInMap().at("AUDIO_DEVICE_IN_USB_DEVICE"),
+            (lookup<INPUT_DEVICE, int64_t>)("AUDIO_DEVICE_IN_USB_DEVICE"));
+    ASSERT_EQ(getAudioDeviceInMap().at("AUDIO_DEVICE_IN_BUILTIN_MIC")
+            | getAudioDeviceInMap().at("AUDIO_DEVICE_IN_WIRED_HEADSET"),
+            (lookup<INPUT_DEVICE, int64_t>)(
+            "AUDIO_DEVICE_IN_BUILTIN_MIC|AUDIO_DEVICE_IN_WIRED_HEADSET"));
+    ASSERT_EQ(0, (lookup<INPUT_DEVICE, int64_t>)("random"));
+
+    ASSERT_EQ((int32_t)AUDIO_INPUT_FLAG_RAW,
+            (lookup<INPUT_FLAG, int32_t>)("AUDIO_INPUT_FLAG_RAW"));
+    ASSERT_EQ((int32_t)AUDIO_INPUT_FLAG_HW_AV_SYNC
+            | (int32_t)AUDIO_INPUT_FLAG_VOIP_TX,
+            (lookup<INPUT_FLAG, int32_t>)(
+            "AUDIO_INPUT_FLAG_HW_AV_SYNC|AUDIO_INPUT_FLAG_VOIP_TX"));
+    ASSERT_EQ(0, (lookup<INPUT_FLAG, int32_t>)("random"));
+
+    ASSERT_EQ(getAudioDeviceOutMap().at("AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"),
+            (lookup<OUTPUT_DEVICE, int64_t>)("AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"));
+    ASSERT_EQ(getAudioDeviceOutMap().at("AUDIO_DEVICE_OUT_SPEAKER")
+            | getAudioDeviceOutMap().at("AUDIO_DEVICE_OUT_HDMI"),
+            (lookup<OUTPUT_DEVICE, int64_t>)(
+            "AUDIO_DEVICE_OUT_SPEAKER|AUDIO_DEVICE_OUT_HDMI"));
+    ASSERT_EQ(0, (lookup<OUTPUT_DEVICE, int64_t>)("random"));
+
+    ASSERT_EQ((int32_t)AUDIO_OUTPUT_FLAG_PRIMARY,
+            (lookup<OUTPUT_FLAG, int32_t>)("AUDIO_OUTPUT_FLAG_PRIMARY"));
+    ASSERT_EQ((int32_t)AUDIO_OUTPUT_FLAG_DEEP_BUFFER | (int32_t)AUDIO_OUTPUT_FLAG_NON_BLOCKING,
+            (lookup<OUTPUT_FLAG, int32_t>)(
+            "AUDIO_OUTPUT_FLAG_DEEP_BUFFER|AUDIO_OUTPUT_FLAG_NON_BLOCKING"));
+    ASSERT_EQ(0, (lookup<OUTPUT_FLAG, int32_t>)("random"));
+
+    ASSERT_EQ((int32_t)AUDIO_SOURCE_MIC, (lookup<SOURCE_TYPE, int32_t>)("AUDIO_SOURCE_MIC"));
+    ASSERT_EQ((int32_t)AUDIO_SOURCE_DEFAULT, (lookup<SOURCE_TYPE, int32_t>)("random"));
+
+    ASSERT_EQ((int32_t)AUDIO_STREAM_TTS, (lookup<STREAM_TYPE, int32_t>)("AUDIO_STREAM_TTS"));
+    ASSERT_EQ((int32_t)AUDIO_STREAM_DEFAULT, (lookup<STREAM_TYPE, int32_t>)("random"));
+
+    ASSERT_EQ(1, (lookup<THREAD_TYPE, int32_t>)("DIRECT"));
+    ASSERT_EQ(-1, (lookup<THREAD_TYPE, int32_t>)("random"));
+
+    ASSERT_EQ(getAudioTrackTraitsMap().at("static"), (lookup<TRACK_TRAITS, int32_t>)("static"));
+    ASSERT_EQ(0, (lookup<TRACK_TRAITS, int32_t>)("random"));
+
+    ASSERT_EQ((int32_t)AUDIO_USAGE_VOICE_COMMUNICATION,
+            (lookup<USAGE, int32_t>)("AUDIO_USAGE_VOICE_COMMUNICATION"));
+    ASSERT_EQ((int32_t)AUDIO_USAGE_UNKNOWN, (lookup<USAGE, int32_t>)("random"));
+}
+
 #if 0
 // Stress test code for garbage collection, you need to enable AID_SHELL as trusted to run
 // in MediaMetricsService.cpp.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index ff45c87..3d36f8e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -57,9 +57,11 @@
         ALOGW("ResourceManagerService is dead as well.");
         return;
     }
-    service->removeResource(mPid, mClientId, false);
 
     service->overridePid(mPid, -1);
+    // thiz is freed in the call below, so it must be last call referring thiz
+    service->removeResource(mPid, mClientId, false);
+
 }
 
 template <typename T>
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index 79e9fbc..bfcf01f 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -10,11 +10,12 @@
     shared_libs: [
         "libbase",
         "libbinder_ndk",
+        "libcutils",
         "liblog",
         "libmediatranscoding",
         "libutils",
     ],
-    
+
     export_shared_lib_headers: [
         "libmediatranscoding",
     ],
@@ -51,18 +52,6 @@
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
 
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
-
     init_rc: ["mediatranscoding.rc"],
 
     cflags: [
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index b843967..b0ea0db 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -20,6 +20,8 @@
 
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
+#include <cutils/properties.h>
+#include <media/TranscoderWrapper.h>
 #include <media/TranscodingClientManager.h>
 #include <media/TranscodingJobScheduler.h>
 #include <media/TranscodingUidPolicy.h>
@@ -51,19 +53,14 @@
     }
 }
 
-MediaTranscodingService::MediaTranscodingService()
-      : MediaTranscodingService(std::make_shared<SimulatedTranscoder>(),
-                                std::make_shared<TranscodingUidPolicy>()) {}
-
 MediaTranscodingService::MediaTranscodingService(
-        const std::shared_ptr<TranscoderInterface>& transcoder,
-        const std::shared_ptr<UidPolicyInterface>& uidPolicy)
-      : mJobScheduler(new TranscodingJobScheduler(transcoder, uidPolicy)),
+        const std::shared_ptr<TranscoderInterface>& transcoder)
+      : mUidPolicy(new TranscodingUidPolicy()),
+        mJobScheduler(new TranscodingJobScheduler(transcoder, mUidPolicy)),
         mClientManager(new TranscodingClientManager(mJobScheduler)) {
     ALOGV("MediaTranscodingService is created");
-
     transcoder->setCallback(mJobScheduler);
-    uidPolicy->setCallback(mJobScheduler);
+    mUidPolicy->setCallback(mJobScheduler);
 }
 
 MediaTranscodingService::~MediaTranscodingService() {
@@ -86,8 +83,15 @@
 
 //static
 void MediaTranscodingService::instantiate() {
+    std::shared_ptr<TranscoderInterface> transcoder;
+    if (property_get_bool("debug.transcoding.simulated_transcoder", false)) {
+        transcoder = std::make_shared<SimulatedTranscoder>();
+    } else {
+        transcoder = std::make_shared<TranscoderWrapper>();
+    }
+
     std::shared_ptr<MediaTranscodingService> service =
-            ::ndk::SharedRefBase::make<MediaTranscodingService>();
+            ::ndk::SharedRefBase::make<MediaTranscodingService>(transcoder);
     binder_status_t status =
             AServiceManager_addService(service->asBinder().get(), getServiceName());
     if (status != STATUS_OK) {
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index f7ac336..505239c 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -38,9 +38,7 @@
     static constexpr int32_t kInvalidJobId = -1;
     static constexpr int32_t kInvalidClientId = -1;
 
-    MediaTranscodingService();
-    MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder,
-                            const std::shared_ptr<UidPolicyInterface>& uidPolicy);
+    MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder);
     virtual ~MediaTranscodingService();
 
     static void instantiate();
@@ -61,6 +59,7 @@
 
     mutable std::mutex mServiceLock;
 
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
     std::shared_ptr<TranscodingJobScheduler> mJobScheduler;
     std::shared_ptr<TranscodingClientManager> mClientManager;
 };
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
index 1b68d5c..97d5f5f 100644
--- a/services/mediatranscoding/SimulatedTranscoder.cpp
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -47,29 +47,53 @@
     mCallback = cb;
 }
 
-void SimulatedTranscoder::start(ClientIdType clientId, JobIdType jobId,
-                                const TranscodingRequestParcel& /*request*/) {
-    queueEvent(Event::Start, clientId, jobId);
+void SimulatedTranscoder::start(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
+        mJobProcessingTimeMs = request.testConfig->processingTotalTimeMs;
+    }
+    ALOGV("%s: job {%d}: processingTime: %lld", __FUNCTION__, jobId,
+          (long long)mJobProcessingTimeMs);
+    queueEvent(Event::Start, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onStarted(clientId, jobId);
+        }
+    });
 }
 
 void SimulatedTranscoder::pause(ClientIdType clientId, JobIdType jobId) {
-    queueEvent(Event::Pause, clientId, jobId);
+    queueEvent(Event::Pause, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onPaused(clientId, jobId);
+        }
+    });
 }
 
-void SimulatedTranscoder::resume(ClientIdType clientId, JobIdType jobId) {
-    queueEvent(Event::Resume, clientId, jobId);
+void SimulatedTranscoder::resume(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& /*request*/,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    queueEvent(Event::Resume, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onResumed(clientId, jobId);
+        }
+    });
 }
 
 void SimulatedTranscoder::stop(ClientIdType clientId, JobIdType jobId) {
-    queueEvent(Event::Stop, clientId, jobId);
+    queueEvent(Event::Stop, clientId, jobId, nullptr);
 }
 
-void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId) {
+void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                                     std::function<void()> runnable) {
     ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)clientId, jobId, toString(type));
 
     auto lock = std::scoped_lock(mLock);
 
-    mQueue.push_back({type, clientId, jobId});
+    mQueue.push_back({type, clientId, jobId, runnable});
     mCondition.notify_one();
 }
 
@@ -123,7 +147,7 @@
                 lastRunningTime = std::chrono::system_clock::now();
                 lastRunningEvent = event;
                 if (event.type == Event::Start) {
-                    remainingUs = std::chrono::microseconds(kJobDurationUs);
+                    remainingUs = std::chrono::milliseconds(mJobProcessingTimeMs);
                 }
             } else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
                 running = false;
@@ -134,10 +158,9 @@
                 continue;
             }
 
-            auto callback = mCallback.lock();
-            if (callback != nullptr) {
+            if (event.runnable != nullptr) {
                 lock.unlock();
-                callback->onProgressUpdate(event.clientId, event.jobId, event.type);
+                event.runnable();
                 lock.lock();
             }
         }
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
index 646ba4e..1c359dd 100644
--- a/services/mediatranscoding/SimulatedTranscoder.h
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -29,6 +29,8 @@
  * SimulatedTranscoder is currently used to instantiate MediaTranscodingService
  * on service side for testing, so that we could actually test the IPC calls of
  * MediaTranscodingService to expose issues that's observable only over IPC.
+ * SimulatedTranscoder is used when useSimulatedTranscoder in TranscodingTestConfig
+ * is set to true.
  *
  * SimulatedTranscoder simulates job execution by reporting finish after kJobDurationUs.
  * Job lifecycle events are reported via progress updates with special progress
@@ -40,6 +42,7 @@
         enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
         ClientIdType clientId;
         JobIdType jobId;
+        std::function<void()> runnable;
     };
 
     static constexpr int64_t kJobDurationUs = 1000000;
@@ -48,10 +51,11 @@
 
     // TranscoderInterface
     void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
-    void start(ClientIdType clientId, JobIdType jobId,
-               const TranscodingRequestParcel& request) override;
+    void start(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+               const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
     void pause(ClientIdType clientId, JobIdType jobId) override;
-    void resume(ClientIdType clientId, JobIdType jobId) override;
+    void resume(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
     void stop(ClientIdType clientId, JobIdType jobId) override;
     // ~TranscoderInterface
 
@@ -61,8 +65,12 @@
     std::condition_variable mCondition;
     std::list<Event> mQueue GUARDED_BY(mLock);
 
+    // Minimum time spent on transcode the video. This is used just for testing.
+    int64_t mJobProcessingTimeMs = kJobDurationUs / 1000;
+
     static const char* toString(Event::Type type);
-    void queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId);
+    void queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                    std::function<void()> runnable);
     void threadLoop();
 };
 
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index f37b39e..364a198 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -27,16 +27,24 @@
     ],
 }
 
-// MediaTranscodingService unit test
+// MediaTranscodingService unit test using simulated transcoder
 cc_test {
-    name: "mediatranscodingservice_tests",
+    name: "mediatranscodingservice_simulated_tests",
     defaults: ["mediatranscodingservice_test_defaults"],
 
-    srcs: ["mediatranscodingservice_tests.cpp"],
+    srcs: ["mediatranscodingservice_simulated_tests.cpp"],
 
     required: [
-        ":TranscodingUidPolicy_TestAppA",
-        ":TranscodingUidPolicy_TestAppB",
-        ":TranscodingUidPolicy_TestAppC",
+        "TranscodingUidPolicy_TestAppA",
+        "TranscodingUidPolicy_TestAppB",
+        "TranscodingUidPolicy_TestAppC",
     ],
-}
\ No newline at end of file
+}
+
+// MediaTranscodingService unit test using real transcoder
+cc_test {
+    name: "mediatranscodingservice_real_tests",
+    defaults: ["mediatranscodingservice_test_defaults"],
+
+    srcs: ["mediatranscodingservice_real_tests.cpp"],
+}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
new file mode 100644
index 0000000..2f4e74b
--- /dev/null
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -0,0 +1,430 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingJobParcel.h>
+#include <aidl/android/media/TranscodingJobPriority.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::ITranscodingClientCallback;
+using aidl::android::media::TranscodingJobParcel;
+using aidl::android::media::TranscodingJobPriority;
+using aidl::android::media::TranscodingRequestParcel;
+using aidl::android::media::TranscodingVideoTrackFormat;
+
+constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
+
+constexpr uid_t kClientUid = 5000;
+#define UID(n) (kClientUid + (n))
+
+constexpr int32_t kClientId = 0;
+#define CLIENT(n) (kClientId + (n))
+
+constexpr const char* kClientName = "TestClient";
+constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
+constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
+constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
+
+constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
+
+static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
+    PermissionController pc;
+    uid = pc.getPackageUid(packageName, 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", String8(packageName).string());
+        return BAD_VALUE;
+    }
+
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+struct ShellHelper {
+    static bool RunCmd(const std::string& cmdStr) {
+        int ret = system(cmdStr.c_str());
+        if (ret != 0) {
+            ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
+            return false;
+        }
+        return true;
+    }
+
+    static bool Start(const char* packageName, const char* activityName) {
+        return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
+                      " &> /dev/null");
+    }
+
+    static bool Stop(const char* packageName) {
+        return RunCmd("am force-stop " + std::string(packageName));
+    }
+};
+
+struct EventTracker {
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
+        int64_t clientId;
+        int32_t jobId;
+    };
+
+#define DECLARE_EVENT(action)                              \
+    static Event action(int32_t clientId, int32_t jobId) { \
+        return {Event::action, clientId, jobId};           \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+    static std::string toString(const Event& event) {
+        std::string eventStr;
+        switch (event.type) {
+        case Event::Start:
+            eventStr = "Start";
+            break;
+        case Event::Pause:
+            eventStr = "Pause";
+            break;
+        case Event::Resume:
+            eventStr = "Resume";
+            break;
+        case Event::Finished:
+            eventStr = "Finished";
+            break;
+        case Event::Failed:
+            eventStr = "Failed";
+            break;
+        default:
+            return "NoEvent";
+        }
+        return "job {" + std::to_string(event.clientId) + ", " + std::to_string(event.jobId) +
+               "}: " + eventStr;
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& pop(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    // Push 1 event to back.
+    void append(const Event& event,
+                const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+        ALOGD("%s", toString(event).c_str());
+
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        mLastErr = err;
+        mCondition.notify_one();
+    }
+
+    TranscodingErrorCode getLastError() {
+        std::unique_lock lock(mLock);
+        return mLastErr;
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    TranscodingErrorCode mLastErr;
+};
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.jobId == rhs.jobId;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+    str << EventTracker::toString(v);
+    return str;
+}
+
+struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
+    TestClientCallback(int32_t id) : mClientId(id) {
+        ALOGI("TestClientCallback %d Created", mClientId);
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
+
+    Status openFileDescriptor(const std::string& in_fileUri, const std::string& in_mode,
+                              ::ndk::ScopedFileDescriptor* _aidl_return) override {
+        ALOGD("@@@ openFileDescriptor: %s", in_fileUri.c_str());
+        int fd;
+        if (in_mode == "w" || in_mode == "rw") {
+            int kOpenFlags;
+            if (in_mode == "w") {
+                // Write-only, create file if non-existent, truncate existing file.
+                kOpenFlags = O_WRONLY | O_CREAT | O_TRUNC;
+            } else {
+                // Read-Write, create if non-existent, no truncate (service will truncate if needed)
+                kOpenFlags = O_RDWR | O_CREAT;
+            }
+            // User R+W permission.
+            constexpr int kFileMode = S_IRUSR | S_IWUSR;
+            fd = open(in_fileUri.c_str(), kOpenFlags, kFileMode);
+        } else {
+            fd = open(in_fileUri.c_str(), O_RDONLY);
+        }
+        _aidl_return->set(fd);
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t in_jobId) override {
+        append(EventTracker::Start(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingPaused(int32_t in_jobId) override {
+        append(EventTracker::Pause(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingResumed(int32_t in_jobId) override {
+        append(EventTracker::Resume(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFinished(
+            int32_t in_jobId,
+            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+        append(Finished(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_jobId,
+                               ::aidl::android::media::TranscodingErrorCode in_errorCode) override {
+        append(Failed(mClientId, in_jobId), in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
+                                      int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
+        return Status::ok();
+    }
+
+    int32_t mClientId;
+};
+
+class MediaTranscodingServiceTestBase : public ::testing::Test {
+public:
+    MediaTranscodingServiceTestBase() { ALOGI("MediaTranscodingServiceTestBase created"); }
+
+    virtual ~MediaTranscodingServiceTestBase() {
+        ALOGI("MediaTranscodingServiceTestBase destroyed");
+    }
+
+    void SetUp() override {
+        // Need thread pool to receive callbacks, otherwise oneway callbacks are
+        // silently ignored.
+        ABinderProcess_startThreadPool();
+        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+        mService = IMediaTranscodingService::fromBinder(binder);
+        if (mService == nullptr) {
+            ALOGE("Failed to connect to the media.trascoding service.");
+            return;
+        }
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(1));
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(2));
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(3));
+    }
+
+    std::shared_ptr<ITranscodingClient> registerOneClient(
+            const char* packageName, const std::shared_ptr<TestClientCallback>& callback,
+            uid_t defaultUid) {
+        uid_t uid;
+        if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) != NO_ERROR) {
+            uid = defaultUid;
+        }
+
+        ALOGD("registering %s with uid %d", packageName, uid);
+
+        std::shared_ptr<ITranscodingClient> client;
+        Status status = mService->registerClient(callback, kClientName, packageName, uid,
+                                                 kClientUseCallingPid, &client);
+        return status.isOk() ? client : nullptr;
+    }
+
+    void registerMultipleClients() {
+        // Register 3 clients.
+        mClient1 = registerOneClient(kClientPackageA, mClientCallback1, UID(1));
+        EXPECT_TRUE(mClient1 != nullptr);
+
+        mClient2 = registerOneClient(kClientPackageB, mClientCallback2, UID(2));
+        EXPECT_TRUE(mClient2 != nullptr);
+
+        mClient3 = registerOneClient(kClientPackageC, mClientCallback3, UID(3));
+        EXPECT_TRUE(mClient3 != nullptr);
+
+        // Check the number of clients.
+        int32_t numOfClients;
+        Status status = mService->getNumOfClients(&numOfClients);
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(3, numOfClients);
+    }
+
+    void unregisterMultipleClients() {
+        Status status;
+
+        // Unregister the clients.
+        status = mClient1->unregister();
+        EXPECT_TRUE(status.isOk());
+
+        status = mClient2->unregister();
+        EXPECT_TRUE(status.isOk());
+
+        status = mClient3->unregister();
+        EXPECT_TRUE(status.isOk());
+
+        // Check the number of clients.
+        int32_t numOfClients;
+        status = mService->getNumOfClients(&numOfClients);
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(0, numOfClients);
+    }
+
+    static constexpr bool success = true;
+    static constexpr bool fail = false;
+
+    template <bool expectation = success>
+    bool submit(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
+                const char* sourceFilePath, const char* destinationFilePath,
+                TranscodingJobPriority priority = TranscodingJobPriority::kNormal,
+                int bitrateBps = -1) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingRequestParcel request;
+        TranscodingJobParcel job;
+
+        request.sourceFilePath = sourceFilePath;
+        request.destinationFilePath = destinationFilePath;
+        request.priority = priority;
+        if (bitrateBps > 0) {
+            request.requestedVideoTrackFormat.emplace(TranscodingVideoTrackFormat());
+            request.requestedVideoTrackFormat->bitrateBps = bitrateBps;
+        }
+        Status status = client->submitRequest(request, &job, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(job.jobId, jobId);
+        }
+
+        return status.isOk() && (result == shouldSucceed) && (!shouldSucceed || job.jobId == jobId);
+    }
+
+    template <bool expectation = success>
+    bool cancel(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        Status status = client->cancelJob(jobId, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+
+        return status.isOk() && (result == shouldSucceed);
+    }
+
+    template <bool expectation = success>
+    bool getJob(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
+                const char* sourceFilePath, const char* destinationFilePath) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingJobParcel job;
+        Status status = client->getJobWithId(jobId, &job, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(job.jobId, jobId);
+            EXPECT_EQ(job.request.sourceFilePath, sourceFilePath);
+        }
+
+        return status.isOk() && (result == shouldSucceed) &&
+               (!shouldSucceed ||
+                (job.jobId == jobId && job.request.sourceFilePath == sourceFilePath &&
+                 job.request.destinationFilePath == destinationFilePath));
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    std::shared_ptr<IMediaTranscodingService> mService;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
+    std::shared_ptr<ITranscodingClient> mClient1;
+    std::shared_ptr<ITranscodingClient> mClient2;
+    std::shared_ptr<ITranscodingClient> mClient3;
+    const char* mTestName;
+};
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/README.txt b/services/mediatranscoding/tests/README.txt
new file mode 100644
index 0000000..cde465e
--- /dev/null
+++ b/services/mediatranscoding/tests/README.txt
@@ -0,0 +1,8 @@
+mediatranscodingservice_simulated_tests:
+	Tests media transcoding service with simulated transcoder.
+
+mediatranscodingservice_real_tests:
+	Tests media transcoding service with real transcoder. Uses the same test assets
+	as the MediaTranscoder unit tests. Before running the test, please make sure
+	to push the test assets to /sdcard:
+	adb push $TOP/frameworks/av/media/libmediatranscoding/tests/assets /data/local/tmp/TranscodingTestAssets
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
old mode 100644
new mode 100755
index bdc0394..d66b340
--- a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -13,6 +13,9 @@
 
 mm
 
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/push_assets.sh
+
 echo "[==========] installing test apps"
 adb root
 adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppA/arm64/TranscodingUidPolicy_TestAppA.apk
@@ -21,7 +24,19 @@
 
 echo "[==========] waiting for device and sync"
 adb wait-for-device remount && adb sync
-adb shell kill -9 `pid media.transcoding`
 
-#adb shell /data/nativetest64/mediatranscodingservice_tests/mediatranscodingservice_tests
-adb shell /data/nativetest/mediatranscodingservice_tests/mediatranscodingservice_tests
+echo "[==========] running simulated tests"
+adb shell setprop debug.transcoding.simulated_transcoder true
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+adb shell /data/nativetest/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+
+echo "[==========] running real tests"
+adb shell setprop debug.transcoding.simulated_transcoder false
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+adb shell /data/nativetest/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+
+echo "[==========] removing debug properties"
+adb shell setprop debug.transcoding.simulated_transcoder \"\"
+adb shell kill -9 `pid media.transcoding`
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
new file mode 100644
index 0000000..c6368a8
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
@@ -0,0 +1,272 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceRealTest"
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 200000;
+constexpr int64_t kJobWithPaddingUs = 10000000 + kPaddingUs;
+constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+constexpr const char* kShortSrcPath =
+        "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+class MediaTranscodingServiceRealTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceRealTest() {}
+
+    void deleteFile(const char* path) { unlink(path); }
+};
+
+TEST_F(MediaTranscodingServiceRealTest, TestInvalidSource) {
+    registerMultipleClients();
+
+    const char* srcPath = "bad_file_uri";
+    const char* dstPath = OUTPATH(TestInvalidSource);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath, dstPath, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Check expected error.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->getLastError(), TranscodingErrorCode::kErrorIO);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPassthru) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestPassthru);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(submit(mClient1, 0, kShortSrcPath, dstPath));
+
+    // Wait for job to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideo) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestTranscodeVideo);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(
+            submit(mClient1, 0, kShortSrcPath, dstPath, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Wait for job to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel immediately after start.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelImmediately) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestCancelImmediately_Job0);
+    const char* dstPath1 = OUTPATH(TestCancelImmediately_Job1);
+
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+    // Submit one job, should start immediately.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Test cancel job immediately, getJob should fail after cancel.
+    EXPECT_TRUE(cancel(mClient1, 0));
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+
+    // Submit new job, new job should start immediately and finish.
+    EXPECT_TRUE(submit(mClient1, 1, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel in the middle of transcoding.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelWhileRunning) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestCancelWhileRunning_Job0);
+    const char* dstPath1 = OUTPATH(TestCancelWhileRunning_Job1);
+
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+    // Submit two jobs, job 0 should start immediately, job 1 should be queued.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_TRUE(submit(mClient1, 1, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+    EXPECT_TRUE(getJob(mClient1, 1, srcPath1, dstPath1));
+
+    // Job 0 (longtest) shouldn't finish in 1 seconds.
+    EXPECT_EQ(mClientCallback1->pop(1000000), EventTracker::NoEvent);
+
+    // Now cancel job 0. Job 1 should start immediately and finish.
+    EXPECT_TRUE(cancel(mClient1, 0));
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeSingleClient) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeSingleClient_Job0);
+    const char* dstPath1 = OUTPATH(TestPauseResumeSingleClient_Job1);
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+
+    // Submit one offline job, should start immediately.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kUnspecified,
+                       kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    // Test get job after starts.
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Submit one realtime job.
+    EXPECT_TRUE(submit(mClient1, 1, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Offline job should pause.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Realtime job should start immediately, and run to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    // Test get job after finish fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 1, "", ""));
+
+    // Then offline job should resume.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    // Test get job after resume.
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Offline job should finish.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    // Test get job after finish fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Basic test for pause/resume with two clients, with one job each.
+ * Top app's job should preempt the other app's job.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeMultiClients) {
+    ALOGD("TestPauseResumeMultiClients starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+    const char* dstPath1 = OUTPATH(TestPauseResumeMultiClients_Client1);
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit job to Client1.
+    ALOGD("Submitting job to client1 (app A) ...");
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Client1's job should start immediately.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Client1's job should continue to run, since Client2 (app B) doesn't have any job.
+    EXPECT_EQ(mClientCallback1->pop(1000000), EventTracker::NoEvent);
+
+    // Submit job to Client2.
+    ALOGD("Submitting job to client2 (app B) ...");
+    EXPECT_TRUE(submit(mClient2, 0, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Client1's job should pause, client2's job should start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    // Client2's job should finish, then Client1's job should resume.
+    EXPECT_EQ(mClientCallback2->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(2), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    // Client1's job should finish.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestPauseResumeMultiClients finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
new file mode 100644
index 0000000..42b5877
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceSimulatedTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingJobParcel.h>
+#include <aidl/android/media/TranscodingJobPriority.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "MediaTranscodingServiceTestHelper.h"
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
+// use them. This test is not a privilege caller.
+constexpr int32_t kInvalidClientPid = -5;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientOpPackageName = "";
+
+constexpr int32_t kClientUseCallingUid = IMediaTranscodingService::USE_CALLING_UID;
+
+constexpr int64_t kPaddingUs = 1000000;
+constexpr int64_t kJobWithPaddingUs = SimulatedTranscoder::kJobDurationUs + kPaddingUs;
+
+constexpr const char* kClientOpPackageName = "TestClientPackage";
+
+class MediaTranscodingServiceSimulatedTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceSimulatedTest() {}
+};
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with null callback.
+    Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPid) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kInvalidClientPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status = mService->registerClient(mClientCallback1, kInvalidClientName,
+                                             kInvalidClientOpPackageName, kClientUseCallingUid,
+                                             kClientUseCallingPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPackageName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status =
+            mService->registerClient(mClientCallback1, kClientName, kInvalidClientOpPackageName,
+                                     kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterOneClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Check the number of Clients.
+    int32_t numOfClients;
+    status = mService->getNumOfClients(&numOfClients);
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(1, numOfClients);
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Check the number of Clients.
+    status = mService->getNumOfClients(&numOfClients);
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(0, numOfClients);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientTwice) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Register the client again and expects failure.
+    std::shared_ptr<ITranscodingClient> client1;
+    status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                      kClientUseCallingUid, kClientUseCallingPid, &client1);
+    EXPECT_FALSE(status.isOk());
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterMultipleClients) {
+    registerMultipleClients();
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestJobIdIndependence) {
+    registerMultipleClients();
+
+    // Submit 2 requests on client1 first.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file", "test_destination_file"));
+
+    // Submit 2 requests on client2, jobId should be independent for each client.
+    EXPECT_TRUE(submit(mClient2, 0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient2, 1, "test_source_file", "test_destination_file"));
+
+    // Cancel all jobs.
+    EXPECT_TRUE(cancel(mClient1, 0));
+    EXPECT_TRUE(cancel(mClient1, 1));
+    EXPECT_TRUE(cancel(mClient2, 0));
+    EXPECT_TRUE(cancel(mClient2, 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelJobs) {
+    registerMultipleClients();
+
+    // Test jobId assignment.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file"));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    EXPECT_TRUE(submit<fail>(mClient1, 0, "", ""));
+
+    // Test cancel non-existent job fails.
+    EXPECT_TRUE(cancel<fail>(mClient1, 100));
+
+    // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test cancel valid jobId in random order.
+    // Test cancel finished job fails.
+    EXPECT_TRUE(cancel(mClient1, 2));
+    EXPECT_TRUE(cancel<fail>(mClient1, 0));
+    EXPECT_TRUE(cancel(mClient1, 1));
+
+    // Test cancel job again fails.
+    EXPECT_TRUE(cancel<fail>(mClient1, 1));
+
+    // Test no more events arriving after cancel.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::NoEvent);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestGetJobs) {
+    registerMultipleClients();
+
+    // Submit 3 requests.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+
+    // Test get jobs by id.
+    EXPECT_TRUE(getJob(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(getJob(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(getJob(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
+
+    // Test get job by invalid id fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 100, "", ""));
+    EXPECT_TRUE(getJob<fail>(mClient1, -1, "", ""));
+
+    // Test get job after cancel fails.
+    EXPECT_TRUE(cancel(mClient1, 2));
+    EXPECT_TRUE(getJob<fail>(mClient1, 2, "", ""));
+
+    // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test get job after finish fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+
+    // Test get the remaining job 1.
+    EXPECT_TRUE(getJob(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+
+    // Cancel remaining job 1.
+    EXPECT_TRUE(cancel(mClient1, 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelWithOfflineJobs) {
+    registerMultipleClients();
+
+    // Submit some offline jobs first.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0",
+                       TranscodingJobPriority::kUnspecified));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1",
+                       TranscodingJobPriority::kUnspecified));
+
+    // Job 0 should start immediately.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Submit more real-time jobs.
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(submit(mClient1, 3, "test_source_file_3", "test_destination_file_3"));
+
+    // Job 0 should pause immediately and job 2 should start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // Job 2 should finish in 2 seconds and job 3 should start.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
+
+    // Cancel job 3 now
+    EXPECT_TRUE(cancel(mClient1, 3));
+
+    // Job 0 should resume and finish in 2 seconds, followed by job 1 start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Cancel remaining job 1.
+    EXPECT_TRUE(cancel(mClient1, 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestClientUseAfterUnregister) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register a client, then unregister.
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_TRUE(status.isOk());
+
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test various operations on the client, should fail with ERROR_DISCONNECTED.
+    TranscodingJobParcel job;
+    bool result;
+    status = client->getJobWithId(0, &job, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelJob(0, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    TranscodingRequestParcel request;
+    status = client->submitRequest(request, &job, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicy) {
+    ALOGD("TestTranscodingUidPolicy starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit 3 requests.
+    ALOGD("Submitting job to client1 (app A) ...");
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+
+    // Job 0 should start immediately.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Job 0 should continue and finish in 2 seconds, then job 1 should start.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    ALOGD("Submitting job to client2 (app B) ...");
+    EXPECT_TRUE(submit(mClient2, 0, "test_source_file_0", "test_destination_file_0"));
+
+    // Client1's job should pause, client2's job should start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    ALOGD("Moving app A back to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Client2's job should pause, client1's job 1 should resume.
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
+
+    // Client2's job 1 should finish in 2 seconds, then its job 2 should start.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // After client2's jobs finish, client1's job should resume.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestTranscodingUidPolicy finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
deleted file mode 100644
index babcfb5..0000000
--- a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
+++ /dev/null
@@ -1,708 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Unit Test for MediaTranscodingService.
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaTranscodingServiceTest"
-
-#include <aidl/android/media/BnTranscodingClientCallback.h>
-#include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingClient.h>
-#include <aidl/android/media/ITranscodingClientCallback.h>
-#include <aidl/android/media/TranscodingJobParcel.h>
-#include <aidl/android/media/TranscodingJobPriority.h>
-#include <aidl/android/media/TranscodingRequestParcel.h>
-#include <android-base/logging.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-#include <binder/PermissionController.h>
-#include <cutils/multiuser.h>
-#include <gtest/gtest.h>
-#include <utils/Log.h>
-
-#include <iostream>
-#include <list>
-
-#include "SimulatedTranscoder.h"
-
-namespace android {
-
-namespace media {
-
-using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingClientCallback;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingClient;
-using aidl::android::media::ITranscodingClientCallback;
-using aidl::android::media::TranscodingJobParcel;
-using aidl::android::media::TranscodingJobPriority;
-using aidl::android::media::TranscodingRequestParcel;
-
-// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
-// use them. This test is not a privilege caller.
-constexpr int32_t kInvalidClientPid = -5;
-constexpr const char* kInvalidClientName = "";
-constexpr const char* kInvalidClientOpPackageName = "";
-
-constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
-constexpr int32_t kClientUseCallingUid = IMediaTranscodingService::USE_CALLING_UID;
-
-constexpr uid_t kClientUid = 5000;
-#define UID(n) (kClientUid + (n))
-
-constexpr int32_t kClientId = 0;
-#define CLIENT(n) (kClientId + (n))
-
-constexpr int64_t kPaddingUs = 200000;
-constexpr int64_t kJobWithPaddingUs = SimulatedTranscoder::kJobDurationUs + kPaddingUs;
-
-constexpr const char* kClientName = "TestClient";
-constexpr const char* kClientOpPackageName = "TestClientPackage";
-constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
-constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
-constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
-constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
-
-static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
-    PermissionController pc;
-    uid = pc.getPackageUid(packageName, 0);
-    if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", String8(packageName).string());
-        return BAD_VALUE;
-    }
-
-    if (userId < 0) {
-        ALOGE("Invalid user: %d", userId);
-        return BAD_VALUE;
-    }
-
-    uid = multiuser_get_uid(userId, uid);
-    return NO_ERROR;
-}
-
-struct ShellHelper {
-    static bool RunCmd(const std::string& cmdStr) {
-        int ret = system(cmdStr.c_str());
-        if (ret != 0) {
-            ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
-            return false;
-        }
-        return true;
-    }
-
-    static bool Start(const char* packageName, const char* activityName) {
-        return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
-                      " &> /dev/null");
-    }
-
-    static bool Stop(const char* packageName) {
-        return RunCmd("am force-stop " + std::string(packageName));
-    }
-};
-
-struct EventTracker {
-    struct Event {
-        enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
-        int64_t clientId;
-        int32_t jobId;
-    };
-
-#define DECLARE_EVENT(action)                              \
-    static Event action(int32_t clientId, int32_t jobId) { \
-        return {Event::action, clientId, jobId};           \
-    }
-
-    DECLARE_EVENT(Start);
-    DECLARE_EVENT(Pause);
-    DECLARE_EVENT(Resume);
-    DECLARE_EVENT(Finished);
-    DECLARE_EVENT(Failed);
-
-    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
-
-    static std::string toString(const Event& event) {
-        std::string eventStr;
-        switch (event.type) {
-        case Event::Start:
-            eventStr = "Start";
-            break;
-        case Event::Pause:
-            eventStr = "Pause";
-            break;
-        case Event::Resume:
-            eventStr = "Resume";
-            break;
-        case Event::Finished:
-            eventStr = "Finished";
-            break;
-        case Event::Failed:
-            eventStr = "Failed";
-            break;
-        default:
-            return "NoEvent";
-        }
-        return "job {" + std::to_string(event.clientId) + ", " + std::to_string(event.jobId) +
-               "}: " + eventStr;
-    }
-
-    // Pop 1 event from front, wait for up to timeoutUs if empty.
-    const Event& pop(int64_t timeoutUs = 0) {
-        std::unique_lock lock(mLock);
-
-        if (mEventQueue.empty() && timeoutUs > 0) {
-            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
-        }
-
-        if (mEventQueue.empty()) {
-            mPoppedEvent = NoEvent;
-        } else {
-            mPoppedEvent = *mEventQueue.begin();
-            mEventQueue.pop_front();
-        }
-
-        return mPoppedEvent;
-    }
-
-    // Push 1 event to back.
-    void append(const Event& event) {
-        ALOGD("%s", toString(event).c_str());
-
-        std::unique_lock lock(mLock);
-
-        mEventQueue.push_back(event);
-        mCondition.notify_one();
-    }
-
-private:
-    std::mutex mLock;
-    std::condition_variable mCondition;
-    Event mPoppedEvent;
-    std::list<Event> mEventQueue;
-};
-
-// Operators for GTest macros.
-bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
-    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.jobId == rhs.jobId;
-}
-
-std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
-    str << EventTracker::toString(v);
-    return str;
-}
-
-struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
-    TestClientCallback(int32_t id) : mClientId(id) {
-        ALOGI("TestClientCallback %d Created", mClientId);
-    }
-
-    virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
-
-    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
-                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
-        return Status::ok();
-    }
-
-    Status onTranscodingFinished(
-            int32_t in_jobId,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
-        append(Finished(mClientId, in_jobId));
-        return Status::ok();
-    }
-
-    Status onTranscodingFailed(
-            int32_t in_jobId,
-            ::aidl::android::media::TranscodingErrorCode /* in_errorCode */) override {
-        append(Failed(mClientId, in_jobId));
-        return Status::ok();
-    }
-
-    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
-                                      int32_t /* in_newAwaitNumber */) override {
-        return Status::ok();
-    }
-
-    Status onProgressUpdate(int32_t in_jobId, int32_t in_progress) override {
-        // The progress numbers from the SimulatedTranscoder represents the
-        // event's type in the transcoder.
-        switch (in_progress) {
-        case SimulatedTranscoder::Event::Start:
-            append(EventTracker::Start(mClientId, in_jobId));
-            break;
-        case SimulatedTranscoder::Event::Pause:
-            append(EventTracker::Pause(mClientId, in_jobId));
-            break;
-        case SimulatedTranscoder::Event::Resume:
-            append(EventTracker::Resume(mClientId, in_jobId));
-            break;
-        default:
-            ALOGE("unrecognized progress number %d, ignored by test", in_progress);
-            break;
-        }
-        return Status::ok();
-    }
-
-    int32_t mClientId;
-};
-
-class MediaTranscodingServiceTest : public ::testing::Test {
-public:
-    MediaTranscodingServiceTest() { ALOGI("MediaTranscodingServiceTest created"); }
-
-    ~MediaTranscodingServiceTest() { ALOGI("MediaTranscodingingServiceTest destroyed"); }
-
-    void SetUp() override {
-        // Need thread pool to receive callbacks, otherwise oneway callbacks are
-        // silently ignored.
-        ABinderProcess_startThreadPool();
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(1));
-        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(2));
-        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(3));
-    }
-
-    std::shared_ptr<ITranscodingClient> registerOneClient(
-            const char* packageName, const std::shared_ptr<TestClientCallback>& callback,
-            uid_t defaultUid) {
-        uid_t uid;
-        if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) != NO_ERROR) {
-            uid = defaultUid;
-        }
-
-        ALOGD("registering %s with uid %d", packageName, uid);
-
-        std::shared_ptr<ITranscodingClient> client;
-        Status status = mService->registerClient(callback, kClientName, packageName, uid,
-                                                 kClientUseCallingPid, &client);
-        return status.isOk() ? client : nullptr;
-    }
-
-    void registerMultipleClients() {
-        // Register 3 clients.
-        mClient1 = registerOneClient(kClientPackageA, mClientCallback1, UID(1));
-        EXPECT_TRUE(mClient1 != nullptr);
-
-        mClient2 = registerOneClient(kClientPackageB, mClientCallback2, UID(2));
-        EXPECT_TRUE(mClient2 != nullptr);
-
-        mClient3 = registerOneClient(kClientPackageC, mClientCallback3, UID(3));
-        EXPECT_TRUE(mClient3 != nullptr);
-
-        // Check the number of clients.
-        int32_t numOfClients;
-        Status status = mService->getNumOfClients(&numOfClients);
-        EXPECT_TRUE(status.isOk());
-        EXPECT_EQ(3, numOfClients);
-    }
-
-    void unregisterMultipleClients() {
-        Status status;
-
-        // Unregister the clients.
-        status = mClient1->unregister();
-        EXPECT_TRUE(status.isOk());
-
-        status = mClient2->unregister();
-        EXPECT_TRUE(status.isOk());
-
-        status = mClient3->unregister();
-        EXPECT_TRUE(status.isOk());
-
-        // Check the number of clients.
-        int32_t numOfClients;
-        status = mService->getNumOfClients(&numOfClients);
-        EXPECT_TRUE(status.isOk());
-        EXPECT_EQ(0, numOfClients);
-    }
-
-    static constexpr bool success = true;
-    static constexpr bool fail = false;
-
-    template <bool expectation = success>
-    bool submit(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
-                const char* sourceFilePath, const char* destinationFilePath,
-                TranscodingJobPriority priority = TranscodingJobPriority::kNormal) {
-        constexpr bool shouldSucceed = (expectation == success);
-        bool result;
-        TranscodingRequestParcel request;
-        TranscodingJobParcel job;
-
-        request.sourceFilePath = sourceFilePath;
-        request.destinationFilePath = destinationFilePath;
-        request.priority = priority;
-        Status status = client->submitRequest(request, &job, &result);
-
-        EXPECT_TRUE(status.isOk());
-        EXPECT_EQ(result, shouldSucceed);
-        if (shouldSucceed) {
-            EXPECT_EQ(job.jobId, jobId);
-        }
-
-        return status.isOk() && (result == shouldSucceed) && (!shouldSucceed || job.jobId == jobId);
-    }
-
-    template <bool expectation = success>
-    bool cancel(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId) {
-        constexpr bool shouldSucceed = (expectation == success);
-        bool result;
-        Status status = client->cancelJob(jobId, &result);
-
-        EXPECT_TRUE(status.isOk());
-        EXPECT_EQ(result, shouldSucceed);
-
-        return status.isOk() && (result == shouldSucceed);
-    }
-
-    template <bool expectation = success>
-    bool getJob(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
-                const char* sourceFilePath, const char* destinationFilePath) {
-        constexpr bool shouldSucceed = (expectation == success);
-        bool result;
-        TranscodingJobParcel job;
-        Status status = client->getJobWithId(jobId, &job, &result);
-
-        EXPECT_TRUE(status.isOk());
-        EXPECT_EQ(result, shouldSucceed);
-        if (shouldSucceed) {
-            EXPECT_EQ(job.jobId, jobId);
-            EXPECT_EQ(job.request.sourceFilePath, sourceFilePath);
-        }
-
-        return status.isOk() && (result == shouldSucceed) &&
-               (!shouldSucceed || (job.jobId == jobId && 
-                job.request.sourceFilePath == sourceFilePath && job.request.destinationFilePath == destinationFilePath));
-    }
-
-    std::shared_ptr<IMediaTranscodingService> mService;
-    std::shared_ptr<TestClientCallback> mClientCallback1;
-    std::shared_ptr<TestClientCallback> mClientCallback2;
-    std::shared_ptr<TestClientCallback> mClientCallback3;
-    std::shared_ptr<ITranscodingClient> mClient1;
-    std::shared_ptr<ITranscodingClient> mClient2;
-    std::shared_ptr<ITranscodingClient> mClient3;
-};
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterNullClient) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    // Register the client with null callback.
-    Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &client);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    // Register the client with the service.
-    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
-                                             kClientUseCallingUid, kInvalidClientPid, &client);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientName) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    // Register the client with the service.
-    Status status = mService->registerClient(mClientCallback1, kInvalidClientName,
-                                             kInvalidClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &client);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    // Register the client with the service.
-    Status status =
-            mService->registerClient(mClientCallback1, kClientName, kInvalidClientOpPackageName,
-                                     kClientUseCallingUid, kClientUseCallingPid, &client);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterOneClient) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &client);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the client.
-    EXPECT_TRUE(client != nullptr);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client.
-    status = client->unregister();
-    EXPECT_TRUE(status.isOk());
-
-    // Check the number of Clients.
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(0, numOfClients);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientTwice) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &client);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the client.
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client again and expects failure.
-    std::shared_ptr<ITranscodingClient> client1;
-    status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
-                                      kClientUseCallingUid, kClientUseCallingPid, &client1);
-    EXPECT_FALSE(status.isOk());
-
-    // Unregister the client.
-    status = client->unregister();
-    EXPECT_TRUE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterMultipleClients) {
-    registerMultipleClients();
-    unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestJobIdIndependence) {
-    registerMultipleClients();
-
-    // Submit 2 requests on client1 first.
-    EXPECT_TRUE(submit(mClient1, 0, "test_source_file", "test_destination_file"));
-    EXPECT_TRUE(submit(mClient1, 1, "test_source_file", "test_destination_file"));
-
-    // Submit 2 requests on client2, jobId should be independent for each client.
-    EXPECT_TRUE(submit(mClient2, 0, "test_source_file", "test_destination_file"));
-    EXPECT_TRUE(submit(mClient2, 1, "test_source_file", "test_destination_file"));
-
-    // Cancel all jobs.
-    EXPECT_TRUE(cancel(mClient1, 0));
-    EXPECT_TRUE(cancel(mClient1, 1));
-    EXPECT_TRUE(cancel(mClient2, 0));
-    EXPECT_TRUE(cancel(mClient2, 1));
-
-    unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestSubmitCancelJobs) {
-    registerMultipleClients();
-
-    // Test jobId assignment.
-    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file"));
-    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file"));
-    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file"));
-
-    // Test submit bad request (no valid sourceFilePath) fails.
-    EXPECT_TRUE(submit<fail>(mClient1, 0, "", ""));
-
-    // Test cancel non-existent job fails.
-    EXPECT_TRUE(cancel<fail>(mClient1, 100));
-
-    // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
-    // Test cancel valid jobId in random order.
-    // Test cancel finished job fails.
-    EXPECT_TRUE(cancel(mClient1, 2));
-    EXPECT_TRUE(cancel<fail>(mClient1, 0));
-    EXPECT_TRUE(cancel(mClient1, 1));
-
-    // Test cancel job again fails.
-    EXPECT_TRUE(cancel<fail>(mClient1, 1));
-
-    // Test no more events arriving after cancel.
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::NoEvent);
-
-    unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestGetJobs) {
-    registerMultipleClients();
-
-    // Submit 3 requests.
-    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
-    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
-    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
-
-    // Test get jobs by id.
-    EXPECT_TRUE(getJob(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
-    EXPECT_TRUE(getJob(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
-    EXPECT_TRUE(getJob(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
-
-    // Test get job by invalid id fails.
-    EXPECT_TRUE(getJob<fail>(mClient1, 100, "", ""));
-    EXPECT_TRUE(getJob<fail>(mClient1, -1, "", ""));
-
-    // Test get job after cancel fails.
-    EXPECT_TRUE(cancel(mClient1, 2));
-    EXPECT_TRUE(getJob<fail>(mClient1, 2, "", ""));
-
-    // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
-    // Test get job after finish fails.
-    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
-
-    // Test get the remaining job 1.
-    EXPECT_TRUE(getJob(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
-
-    // Cancel remaining job 1.
-    EXPECT_TRUE(cancel(mClient1, 1));
-
-    unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestSubmitCancelWithOfflineJobs) {
-    registerMultipleClients();
-
-    // Submit some offline jobs first.
-    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0", TranscodingJobPriority::kUnspecified));
-    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1", TranscodingJobPriority::kUnspecified));
-
-    // Job 0 should start immediately.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
-
-    // Submit more real-time jobs.
-    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
-    EXPECT_TRUE(submit(mClient1, 3, "test_source_file_3", "test_destination_file_3"));
-
-    // Job 0 should pause immediately and job 2 should start.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
-
-    // Job 2 should finish in 2 seconds and job 3 should start.
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
-
-    // Cancel job 3 now
-    EXPECT_TRUE(cancel(mClient1, 3));
-
-    // Job 0 should resume and finish in 2 seconds, followed by job 1 start.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
-    // Cancel remaining job 1.
-    EXPECT_TRUE(cancel(mClient1, 1));
-
-    unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestClientUseAfterUnregister) {
-    std::shared_ptr<ITranscodingClient> client;
-
-    // Register a client, then unregister.
-    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &client);
-    EXPECT_TRUE(status.isOk());
-
-    status = client->unregister();
-    EXPECT_TRUE(status.isOk());
-
-    // Test various operations on the client, should fail with ERROR_DISCONNECTED.
-    TranscodingJobParcel job;
-    bool result;
-    status = client->getJobWithId(0, &job, &result);
-    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
-
-    status = client->cancelJob(0, &result);
-    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
-
-    TranscodingRequestParcel request;
-    status = client->submitRequest(request, &job, &result);
-    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestTranscodingUidPolicy) {
-    ALOGD("TestTranscodingUidPolicy starting...");
-
-    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
-    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
-
-    registerMultipleClients();
-
-    ALOGD("Moving app A to top...");
-    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
-
-    // Submit 3 requests.
-    ALOGD("Submitting job to client1 (app A) ...");
-    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
-    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
-    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
-
-    // Job 0 should start immediately.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
-
-    ALOGD("Moving app B to top...");
-    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
-
-    // Job 0 should continue and finish in 2 seconds, then job 1 should start.
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
-    ALOGD("Submitting job to client2 (app B) ...");
-    EXPECT_TRUE(submit(mClient2, 0, "test_source_file_0", "test_destination_file_0"));
-
-    // Client1's job should pause, client2's job should start.
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
-    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
-
-    ALOGD("Moving app A back to top...");
-    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
-
-    // Client2's job should pause, client1's job 1 should resume.
-    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
-
-    // Client2's job 1 should finish in 2 seconds, then its job 2 should start.
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
-    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
-
-    // After client2's jobs finish, client1's job should resume.
-    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
-    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
-
-    unregisterMultipleClients();
-
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
-    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
-
-    ALOGD("TestTranscodingUidPolicy finished.");
-}
-
-}  // namespace media
-}  // namespace android