Merge "codec2: do not count padding into allocation's capacity."
diff --git a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
index 6ac3510..089eb1c 100644
--- a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
@@ -207,6 +207,7 @@
     }
 
     infoMap.clear();
+    android::Mutex::Autolock lock(mPlayPolicyLock);
     for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
         infoMap.add(mPlayPolicy.keyAt(i), mPlayPolicy.valueAt(i));
     }
diff --git a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
index aa9b59d..95f15ca 100644
--- a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
@@ -262,7 +262,7 @@
     void initProperties();
     void setPlayPolicy();
 
-    android::Mutex mPlayPolicyLock;
+    mutable android::Mutex mPlayPolicyLock;
     android::KeyedVector<String8, String8> mPlayPolicy;
     android::KeyedVector<String8, String8> mStringProperties;
     android::KeyedVector<String8, Vector<uint8_t>> mByteArrayProperties;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index e6e1f80..c49d5fe 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -50,7 +50,7 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
 
     shared_libs: [
         "android.hardware.drm@1.0",
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index d278633..302dd39 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -37,6 +37,8 @@
     sp<IMemory> hidlMemory = mapMemory(base);
     ALOGE_IF(hidlMemory == nullptr, "mapMemory returns nullptr");
 
+    std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
+
     // allow mapMemory to return nullptr
     mSharedBufferMap[bufferId] = hidlMemory;
     return Void();
@@ -94,6 +96,7 @@
         return Void();
     }
 
+    std::unique_lock<std::mutex> shared_buffer_lock(mSharedBufferLock);
     if (mSharedBufferMap.find(source.bufferId) == mSharedBufferMap.end()) {
       _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
                "source decrypt buffer base not set");
@@ -142,12 +145,17 @@
 
     base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
 
-    if (destBuffer.offset + destBuffer.size > destBase->getSize()) {
+    totalSize = 0;
+    if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
+        totalSize > destBase->getSize()) {
+        android_errorWriteLog(0x534e4554, "176444622");
         _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
         return Void();
     }
-    destPtr = static_cast<void *>(base + destination.nonsecureMemory.offset);
+    destPtr = static_cast<void*>(base + destination.nonsecureMemory.offset);
 
+    // release mSharedBufferLock
+    shared_buffer_lock.unlock();
 
     // Calculate the output buffer size and determine if any subsamples are
     // encrypted.
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index a77759e..6f69110 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -220,6 +220,7 @@
         if (requestString.find(kOfflineLicense) != std::string::npos) {
             std::string emptyResponse;
             std::string keySetIdString(keySetId.begin(), keySetId.end());
+            Mutex::Autolock lock(mFileHandleLock);
             if (!mFileHandle.StoreLicense(keySetIdString,
                     DeviceFiles::kLicenseStateReleasing,
                     emptyResponse)) {
@@ -335,6 +336,7 @@
         }
         *keySetId = kKeySetIdPrefix + ByteArrayToHexString(
                 reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
+        Mutex::Autolock lock(mFileHandleLock);
         if (mFileHandle.LicenseExists(*keySetId)) {
             // collision, regenerate
             ALOGV("Retry generating KeySetId");
@@ -392,6 +394,7 @@
     if (status == Status::OK) {
         if (isOfflineLicense) {
             if (isRelease) {
+                Mutex::Autolock lock(mFileHandleLock);
                 mFileHandle.DeleteLicense(keySetId);
                 mSessionLibrary->destroySession(session);
             } else {
@@ -400,6 +403,7 @@
                     return Void();
                 }
 
+                Mutex::Autolock lock(mFileHandleLock);
                 bool ok = mFileHandle.StoreLicense(
                         keySetId,
                         DeviceFiles::kLicenseStateActive,
@@ -454,6 +458,7 @@
         DeviceFiles::LicenseState licenseState;
         std::string offlineLicense;
         Status status = Status::OK;
+        Mutex::Autolock lock(mFileHandleLock);
         if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
                 &licenseState, &offlineLicense)) {
             ALOGE("Failed to restore offline license");
@@ -576,7 +581,6 @@
 Return<void> DrmPlugin::queryKeyStatus(
         const hidl_vec<uint8_t>& sessionId,
         queryKeyStatus_cb _hidl_cb) {
-
     if (sessionId.size() == 0) {
         // Returns empty key status KeyValue pair
         _hidl_cb(Status::BAD_VALUE, hidl_vec<KeyValue>());
@@ -586,12 +590,14 @@
     std::vector<KeyValue> infoMapVec;
     infoMapVec.clear();
 
+    mPlayPolicyLock.lock();
     KeyValue keyValuePair;
     for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
         keyValuePair.key = mPlayPolicy[i].key;
         keyValuePair.value = mPlayPolicy[i].value;
         infoMapVec.push_back(keyValuePair);
     }
+    mPlayPolicyLock.unlock();
     _hidl_cb(Status::OK, toHidlVec(infoMapVec));
     return Void();
 }
@@ -704,6 +710,8 @@
 }
 
 Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
+    Mutex::Autolock lock(mFileHandleLock);
+
     std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
     std::vector<KeySetId> keySetIds;
     if (mMockError != Status_V1_2::OK) {
@@ -724,6 +732,7 @@
         return toStatus_1_0(mMockError);
     }
     std::string licenseName(keySetId.begin(), keySetId.end());
+    Mutex::Autolock lock(mFileHandleLock);
     if (mFileHandle.DeleteLicense(licenseName)) {
         return Status::OK;
     }
@@ -732,6 +741,8 @@
 
 Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
         getOfflineLicenseState_cb _hidl_cb) {
+    Mutex::Autolock lock(mFileHandleLock);
+
     std::string licenseName(keySetId.begin(), keySetId.end());
     DeviceFiles::LicenseState state;
     std::string license;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index 051a968..32cf2dc 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -24,11 +24,13 @@
 }
 
 bool MemoryFileSystem::FileExists(const std::string& fileName) const {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     return result != mMemoryFileSystem.end();
 }
 
 ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         return static_cast<ssize_t>(result->second.getFileSize());
@@ -40,6 +42,7 @@
 
 std::vector<std::string> MemoryFileSystem::ListFiles() const {
     std::vector<std::string> list;
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     for (const auto& filename : mMemoryFileSystem) {
         list.push_back(filename.first);
     }
@@ -48,6 +51,7 @@
 
 size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
     std::string key = GetFileName(path);
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         std::string serializedHashFile = result->second.getContent();
@@ -61,6 +65,7 @@
 
 size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
     std::string key = GetFileName(path);
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(key);
@@ -70,6 +75,7 @@
 }
 
 bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(result);
@@ -81,6 +87,7 @@
 }
 
 bool MemoryFileSystem::RemoveAllFiles() {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     mMemoryFileSystem.clear();
     return mMemoryFileSystem.empty();
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
index 8680f0c..23a64fa 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
@@ -20,6 +20,8 @@
 #include <android/hardware/drm/1.2/ICryptoPlugin.h>
 #include <android/hidl/memory/1.0/IMemory.h>
 
+#include <mutex>
+
 #include "ClearKeyTypes.h"
 #include "Session.h"
 #include "Utils.h"
@@ -93,7 +95,7 @@
             const SharedBuffer& source,
             uint64_t offset,
             const DestinationBuffer& destination,
-            decrypt_1_2_cb _hidl_cb);
+            decrypt_1_2_cb _hidl_cb) NO_THREAD_SAFETY_ANALYSIS; // use unique_lock
 
     Return<void> setSharedBufferBase(const hidl_memory& base,
             uint32_t bufferId);
@@ -105,7 +107,8 @@
 private:
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
 
-    std::map<uint32_t, sp<IMemory> > mSharedBufferMap;
+    std::mutex mSharedBufferLock;
+    std::map<uint32_t, sp<IMemory>> mSharedBufferMap GUARDED_BY(mSharedBufferLock);
     sp<Session> mSession;
     Status mInitStatus;
 };
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 076beb8..894985b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -416,7 +416,8 @@
         mMockError = Status_V1_2::OK;
     }
 
-    DeviceFiles mFileHandle;
+    DeviceFiles mFileHandle GUARDED_BY(mFileHandleLock);
+    Mutex mFileHandleLock;
     Mutex mSecureStopLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
index bcd9fd6..6ac0e2c 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
@@ -5,7 +5,9 @@
 #ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
 #define CLEARKEY_MEMORY_FILE_SYSTEM_H_
 
+#include <android-base/thread_annotations.h>
 #include <map>
+#include <mutex>
 #include <string>
 
 #include "ClearKeyTypes.h"
@@ -49,10 +51,12 @@
     size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
 
  private:
+    mutable std::mutex mMemoryFileSystemLock;
+
     // License file name is made up of a unique keySetId, therefore,
     // the filename can be used as the key to locate licenses in the
     // memory file system.
-    std::map<std::string, MemoryFile> mMemoryFileSystem;
+    std::map<std::string, MemoryFile> mMemoryFileSystem GUARDED_BY(mMemoryFileSystemLock);
 
     std::string GetFileName(const std::string& path);
 
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 76345ae..2fa4f25 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -65,12 +65,14 @@
                                               C2Config::PROFILE_AV1_1}),
                                   C2F(mProfileLevel, level)
                                       .oneOf({
-                                          C2Config::LEVEL_AV1_2,
-                                          C2Config::LEVEL_AV1_2_1,
-                                          C2Config::LEVEL_AV1_2_2,
-                                          C2Config::LEVEL_AV1_3,
-                                          C2Config::LEVEL_AV1_3_1,
-                                          C2Config::LEVEL_AV1_3_2,
+                                          C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
+                                          C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
+                                          C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
+                                          C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
+                                          C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
+                                          C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
+                                          C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
+                                          C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
                                       })})
                      .withSetter(ProfileLevelSetter, mSize)
                      .build());
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7e9090f..dc4c658 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -484,8 +484,14 @@
     switch (layout.type) {
         case C2PlanarLayout::TYPE_RGB:
         case C2PlanarLayout::TYPE_RGBA: {
+            std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
+            {
+                IntfImpl::Lock lock = mIntf->lock();
+                colorAspects = mIntf->getCodedColorAspects_l();
+            }
             ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
-                                  mConversionBuffer.size(), *rView.get());
+                                  mConversionBuffer.size(), *rView.get(),
+                                  colorAspects->matrix, colorAspects->range);
             vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
                          mStrideAlign, mConversionBuffer.data());
             break;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index c98b802..c70ffb9 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -444,6 +444,9 @@
     std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const { return mBitrateMode; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const { return mLayering; }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
     uint32_t getSyncFramePeriod() const {
         if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
             return 0;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 727b1ff..27e87e6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1188,11 +1188,14 @@
 
     bool changed = false;
     if (domain & mInputDomain) {
-        sp<AMessage> oldFormat = mInputFormat->dup();
+        sp<AMessage> oldFormat = mInputFormat;
+        mInputFormat = mInputFormat->dup(); // trigger format changed
         mInputFormat->extend(getFormatForDomain(reflected, mInputDomain));
         if (mInputFormat->countEntries() != oldFormat->countEntries()
                 || mInputFormat->changesFrom(oldFormat)->countEntries() > 0) {
             changed = true;
+        } else {
+            mInputFormat = oldFormat; // no change
         }
     }
     if (domain & mOutputDomain) {
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index fc4ee51..8fc2ef5 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -679,17 +679,20 @@
 std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
 #ifdef __LP64__
     static std::once_flag s_checkOnce;
-    static bool s_64bitonly {false};
+    static bool s_is64bitOk {true};
     std::call_once(s_checkOnce, [&](){
         const std::string abi32list =
         ::android::base::GetProperty("ro.product.cpu.abilist32", "");
-        if (abi32list.empty()) {
-            s_64bitonly = true;
+        if (!abi32list.empty()) {
+            int32_t inputSurfaceSetting =
+            ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
+            s_is64bitOk = inputSurfaceSetting != 0;
         }
     });
 
-    if (!s_64bitonly) {
-        ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object");
+    if (!s_is64bitOk) {
+        ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
+              "when debug.stagefright.c2inputsurface is set to 0");
         return nullptr;
     }
 #endif
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 3ddae01..50d600c 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -61,24 +61,24 @@
     /// Input Surface configuration
     struct Config {
         // IN PARAMS (GBS)
-        float mMinFps; // minimum fps (repeat frame to achieve this)
-        float mMaxFps; // max fps (via frame drop)
-        float mCaptureFps; // capture fps
-        float mCodedFps;   // coded fps
-        bool mSuspended; // suspended
-        int64_t mTimeOffsetUs; // time offset (input => codec)
-        int64_t mSuspendAtUs; // suspend/resume time
-        int64_t mStartAtUs; // start time
-        bool mStopped; // stopped
-        int64_t mStopAtUs; // stop time
+        float mMinFps = 0.0; // minimum fps (repeat frame to achieve this)
+        float mMaxFps = 0.0; // max fps (via frame drop)
+        float mCaptureFps = 0.0; // capture fps
+        float mCodedFps = 0.0;   // coded fps
+        bool mSuspended = false; // suspended
+        int64_t mTimeOffsetUs = 0; // time offset (input => codec)
+        int64_t mSuspendAtUs = 0; // suspend/resume time
+        int64_t mStartAtUs = 0; // start time
+        bool mStopped = false; // stopped
+        int64_t mStopAtUs = 0; // stop time
 
         // OUT PARAMS (GBS)
-        int64_t mInputDelayUs; // delay between encoder input and surface input
+        int64_t mInputDelayUs = 0; // delay between encoder input and surface input
 
         // IN PARAMS (CODEC WRAPPER)
-        float mFixedAdjustedFps; // fixed fps via PTS manipulation
-        float mMinAdjustedFps; // minimum fps via PTS manipulation
-        uint64_t mUsage; // consumer usage
+        float mFixedAdjustedFps = 0.0; // fixed fps via PTS manipulation
+        float mMinAdjustedFps = 0.0; // minimum fps via PTS manipulation
+        uint64_t mUsage = 0; // consumer usage
     };
 
     /**
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 0966988..5f87c66 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -507,9 +507,21 @@
     };
 }
 
+// Matrix coefficient to convert RGB to Planar YUV data.
+// Each sub-array represents the 3X3 coeff used with R, G and B
+static const int16_t bt601Matrix[2][3][3] = {
+    { { 76, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+    { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } },  /* RANGE_LIMITED */
+};
+
+static const int16_t bt709Matrix[2][3][3] = {
+    { { 54, 183, 18 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
+};
+
 status_t ConvertRGBToPlanarYUV(
         uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
-        const C2GraphicView &src) {
+        const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
     CHECK(dstY != nullptr);
     CHECK((src.width() & 1) == 0);
     CHECK((src.height() & 1) == 0);
@@ -527,28 +539,38 @@
     const uint8_t *pGreen = src.data()[C2PlanarLayout::PLANE_G];
     const uint8_t *pBlue  = src.data()[C2PlanarLayout::PLANE_B];
 
-#define CLIP3(x,y,z) (((z) < (x)) ? (x) : (((z) > (y)) ? (y) : (z)))
+    // set default range as limited
+    if (colorRange != C2Color::RANGE_FULL && colorRange != C2Color::RANGE_LIMITED) {
+        colorRange = C2Color::RANGE_LIMITED;
+    }
+    const int16_t (*weights)[3] =
+        (colorMatrix == C2Color::MATRIX_BT709) ?
+            bt709Matrix[colorRange - 1] : bt601Matrix[colorRange - 1];
+    uint8_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 16;
+    uint8_t maxLvlLuma =  colorRange == C2Color::RANGE_FULL ? 255 : 235;
+    uint8_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 255 : 240;
+
+#define CLIP3(min,v,max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
     for (size_t y = 0; y < src.height(); ++y) {
         for (size_t x = 0; x < src.width(); ++x) {
-            uint8_t red = *pRed;
-            uint8_t green = *pGreen;
-            uint8_t blue = *pBlue;
+            uint8_t r = *pRed;
+            uint8_t g = *pGreen;
+            uint8_t b = *pBlue;
 
-            // using ITU-R BT.601 conversion matrix
-            unsigned luma =
-                CLIP3(0, (((red * 66 + green * 129 + blue * 25) >> 8) + 16), 255);
+            unsigned luma = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2]) >> 8) +
+                             zeroLvl;
 
-            dstY[x] = luma;
+            dstY[x] = CLIP3(zeroLvl, luma, maxLvlLuma);
 
             if ((x & 1) == 0 && (y & 1) == 0) {
-                unsigned U =
-                    CLIP3(0, (((-red * 38 - green * 74 + blue * 112) >> 8) + 128), 255);
+                unsigned U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2]) >> 8) +
+                              128;
 
-                unsigned V =
-                    CLIP3(0, (((red * 112 - green * 94 - blue * 18) >> 8) + 128), 255);
+                unsigned V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2]) >> 8) +
+                              128;
 
-                dstU[x >> 1] = U;
-                dstV[x >> 1] = V;
+                dstU[x >> 1] = CLIP3(zeroLvl, U, maxLvlChroma);
+                dstV[x >> 1] = CLIP3(zeroLvl, V, maxLvlChroma);
             }
             pRed   += layout.planes[C2PlanarLayout::PLANE_R].colInc;
             pGreen += layout.planes[C2PlanarLayout::PLANE_G].colInc;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index af29e81..9fa642d 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -18,6 +18,7 @@
 #define CODEC2_BUFFER_UTILS_H_
 
 #include <C2Buffer.h>
+#include <C2Config.h>
 #include <C2ParamDef.h>
 
 #include <media/hardware/VideoAPI.h>
@@ -39,7 +40,8 @@
  */
 status_t ConvertRGBToPlanarYUV(
         uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
-        const C2GraphicView &src);
+        const C2GraphicView &src, C2Color::matrix_t colorMatrix = C2Color::MATRIX_BT601,
+        C2Color::range_t colorRange = C2Color::RANGE_LIMITED);
 
 /**
  * Returns a planar YUV 420 8-bit media image descriptor.
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 189fef0..c89c023 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -35,54 +35,6 @@
 
 using media::VolumeShaper;
 
-enum {
-    DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
-    SET_DATA_SOURCE_URL,
-    SET_DATA_SOURCE_FD,
-    SET_DATA_SOURCE_STREAM,
-    SET_DATA_SOURCE_CALLBACK,
-    SET_DATA_SOURCE_RTP,
-    SET_BUFFERING_SETTINGS,
-    GET_BUFFERING_SETTINGS,
-    PREPARE_ASYNC,
-    START,
-    STOP,
-    IS_PLAYING,
-    SET_PLAYBACK_SETTINGS,
-    GET_PLAYBACK_SETTINGS,
-    SET_SYNC_SETTINGS,
-    GET_SYNC_SETTINGS,
-    PAUSE,
-    SEEK_TO,
-    GET_CURRENT_POSITION,
-    GET_DURATION,
-    RESET,
-    NOTIFY_AT,
-    SET_AUDIO_STREAM_TYPE,
-    SET_LOOPING,
-    SET_VOLUME,
-    INVOKE,
-    SET_METADATA_FILTER,
-    GET_METADATA,
-    SET_AUX_EFFECT_SEND_LEVEL,
-    ATTACH_AUX_EFFECT,
-    SET_VIDEO_SURFACETEXTURE,
-    SET_PARAMETER,
-    GET_PARAMETER,
-    SET_RETRANSMIT_ENDPOINT,
-    GET_RETRANSMIT_ENDPOINT,
-    SET_NEXT_PLAYER,
-    APPLY_VOLUME_SHAPER,
-    GET_VOLUME_SHAPER_STATE,
-    // Modular DRM
-    PREPARE_DRM,
-    RELEASE_DRM,
-    // AudioRouting
-    SET_OUTPUT_DEVICE,
-    GET_ROUTED_DEVICE_ID,
-    ENABLE_AUDIO_DEVICE_CALLBACK,
-};
-
 // ModDrm helpers
 static status_t readVector(const Parcel& reply, Vector<uint8_t>& vector) {
     uint32_t size = 0;
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index 3548a1e..28684d1 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -137,6 +137,56 @@
     virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
     virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
     virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
+protected:
+
+    friend class IMediaPlayerTest;
+    enum {
+        DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
+        SET_DATA_SOURCE_URL,
+        SET_DATA_SOURCE_FD,
+        SET_DATA_SOURCE_STREAM,
+        SET_DATA_SOURCE_CALLBACK,
+        SET_DATA_SOURCE_RTP,
+        SET_BUFFERING_SETTINGS,
+        GET_BUFFERING_SETTINGS,
+        PREPARE_ASYNC,
+        START,
+        STOP,
+        IS_PLAYING,
+        SET_PLAYBACK_SETTINGS,
+        GET_PLAYBACK_SETTINGS,
+        SET_SYNC_SETTINGS,
+        GET_SYNC_SETTINGS,
+        PAUSE,
+        SEEK_TO,
+        GET_CURRENT_POSITION,
+        GET_DURATION,
+        RESET,
+        NOTIFY_AT,
+        SET_AUDIO_STREAM_TYPE,
+        SET_LOOPING,
+        SET_VOLUME,
+        INVOKE,
+        SET_METADATA_FILTER,
+        GET_METADATA,
+        SET_AUX_EFFECT_SEND_LEVEL,
+        ATTACH_AUX_EFFECT,
+        SET_VIDEO_SURFACETEXTURE,
+        SET_PARAMETER,
+        GET_PARAMETER,
+        SET_RETRANSMIT_ENDPOINT,
+        GET_RETRANSMIT_ENDPOINT,
+        SET_NEXT_PLAYER,
+        APPLY_VOLUME_SHAPER,
+        GET_VOLUME_SHAPER_STATE,
+        // Modular DRM
+        PREPARE_DRM,
+        RELEASE_DRM,
+        // AudioRouting
+        SET_OUTPUT_DEVICE,
+        GET_ROUTED_DEVICE_ID,
+        ENABLE_AUDIO_DEVICE_CALLBACK,
+    };
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/tests/mediaplayer/Android.bp b/media/libmedia/tests/mediaplayer/Android.bp
new file mode 100644
index 0000000..0fff7b4
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_test {
+    name: "IMediaPlayerTest",
+    test_suites: ["device-tests", "mts"],
+    gtest: true,
+
+    srcs: [
+        "IMediaPlayerTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+    compile_multilib: "first",
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
new file mode 100644
index 0000000..097e8ef
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/IServiceManager.h>
+#include <binder/Parcel.h>
+#include <gtest/gtest.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/IMediaPlayer.h>
+#include <media/IMediaPlayerService.h>
+#include <media/mediaplayer.h>
+
+namespace android {
+
+constexpr uint8_t kMockByteArray[16] = {};
+
+ class IMediaPlayerTest : public testing::Test {
+  protected:
+   static constexpr uint32_t PREPARE_DRM = IMediaPlayer::PREPARE_DRM;
+
+   void SetUp() override {
+    mediaPlayer_ = new MediaPlayer();
+    sp<IServiceManager> serviceManager = defaultServiceManager();
+    sp<IBinder> mediaPlayerService = serviceManager->getService(String16("media.player"));
+    sp<IMediaPlayerService> iMediaPlayerService =
+            IMediaPlayerService::asInterface(mediaPlayerService);
+    iMediaPlayer_ = iMediaPlayerService->create(mediaPlayer_);
+   }
+
+   sp<MediaPlayer> mediaPlayer_;
+   sp<IMediaPlayer> iMediaPlayer_;
+ };
+
+TEST_F(IMediaPlayerTest, PrepareDrmInvalidTransaction) {
+    Parcel data, reply;
+    data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+    data.write(kMockByteArray, 16);
+
+    // We write a length greater than the following session id array. Should be discarded.
+    data.writeUint32(2);
+    data.writeUnpadded(kMockByteArray, 1);
+
+    status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+            ->transact(PREPARE_DRM, data, &reply);
+    ASSERT_EQ(result, BAD_VALUE);
+}
+
+TEST_F(IMediaPlayerTest, PrepareDrmValidTransaction) {
+    Parcel data, reply;
+    data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+    data.write(kMockByteArray, 16);
+
+    // We write a length equal to the length of the following data. The transaction should be valid.
+    data.writeUint32(1);
+    data.write(kMockByteArray, 1);
+
+    status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+            ->transact(PREPARE_DRM, data, &reply);
+    ASSERT_EQ(result, OK);
+}
+}  // namespace android
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 6c4addf..0107c32 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -169,9 +169,7 @@
 }
 
 status_t MediaCodecSource::Puller::setStopTimeUs(int64_t stopTimeUs) {
-    sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, this);
-    msg->setInt64("stop-time-us", stopTimeUs);
-    return postSynchronouslyAndReturnError(msg);
+    return mSource->setStopTimeUs(stopTimeUs);
 }
 
 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> &notify) {
@@ -189,19 +187,11 @@
 }
 
 void MediaCodecSource::Puller::stop() {
-    bool interrupt = false;
-    {
-        // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
-        // stop.
-        Mutexed<Queue>::Locked queue(mQueue);
-        queue->mPulling = false;
-        interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
-        queue->flush(); // flush any unprocessed pulled buffers
-    }
-
-    if (interrupt) {
-        interruptSource();
-    }
+    // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
+    // stop.
+    Mutexed<Queue>::Locked queue(mQueue);
+    queue->mPulling = false;
+    queue->flush(); // flush any unprocessed pulled buffers
 }
 
 void MediaCodecSource::Puller::interruptSource() {
@@ -685,9 +675,9 @@
     if (mStopping && reachedEOS) {
         ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
         if (mPuller != NULL) {
-            mPuller->stopSource();
+            mPuller->interruptSource();
         }
-        ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
+        ALOGI("source (%s) stopped", mIsVideo ? "video" : "audio");
         // posting reply to everyone that's waiting
         List<sp<AReplyToken>>::iterator it;
         for (it = mStopReplyIDQueue.begin();
@@ -896,7 +886,7 @@
     {
         int32_t eos = 0;
         if (msg->findInt32("eos", &eos) && eos) {
-            ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
+            ALOGI("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
             signalEOS();
             break;
         }
@@ -1114,12 +1104,7 @@
         if (generation != mGeneration) {
              break;
         }
-
-        if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
-            ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
-            mPuller->interruptSource();
-            ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
-        }
+        ALOGD("source (%s) stopping stalled", mIsVideo ? "video" : "audio");
         signalEOS();
         break;
     }
diff --git a/media/libstagefright/foundation/ABuffer.cpp b/media/libstagefright/foundation/ABuffer.cpp
index c8965d9..c79384c 100644
--- a/media/libstagefright/foundation/ABuffer.cpp
+++ b/media/libstagefright/foundation/ABuffer.cpp
@@ -67,7 +67,7 @@
 
 void ABuffer::setRange(size_t offset, size_t size) {
     CHECK_LE(offset, mCapacity);
-    CHECK_LE(offset + size, mCapacity);
+    CHECK_LE(size, mCapacity - offset);
 
     mRangeOffset = offset;
     mRangeLength = size;
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 92b2b09..e1cc5ec 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -45,6 +45,7 @@
       mAccessUnitDamaged(false),
       mFirstIFrameProvided(false),
       mLastIFrameProvidedAtMs(0),
+      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 }
@@ -123,45 +124,65 @@
     sp<ABuffer> buffer = *queue->begin();
     buffer->meta()->setObject("source", source);
 
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * But that is not useful as an ingredient of buffering time.
+     * Instead, we calculates the time only for all 'NAL units'.
+     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+    int64_t nowTimeUs = ALooper::GetNowUs();
+    if (rtpTime != mLastRtpTimeJitterDataUs) {
+        source->putBaseJitterData(rtpTime, nowTimeUs);
+        mLastRtpTimeJitterDataUs = rtpTime;
+    }
+    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mFirstSysTime / 1000;
-    const int64_t nowTimeMs = ALooper::GetNowUs() / 1000;
-    const int64_t staticJbTimeMs = source->getStaticJitterTimeMs();
-    const int64_t dynamicJbTimeMs = source->getDynamicJitterTimeMs();
+    const int64_t nowTimeMs = nowTimeUs / 1000;
+    const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
+    const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
+    const int32_t dynamicJitterTimeMs = source->getInterArrivalJitterTimeMs();
     const int64_t clockRate = source->mClockRate;
 
     int64_t playedTimeMs = nowTimeMs - startTimeMs;
     int64_t playedTimeRtp = source->mFirstRtpTime + MsToRtp(playedTimeMs, clockRate);
 
     /**
-     * Based on experience in real commercial network services,
+     * Based on experiences in real commercial network services,
      * 300 ms is a maximum heuristic jitter buffer time for video RTP service.
      */
 
     /**
-     * The static(base) jitter is a kind of expected propagation time that we desire.
-     * We can drop packets if it doesn't meet our standards.
-     * If it gets shorter we can get faster response but can lose packets.
+     * The base jitter is an expected additional propagation time.
+     * We can drop packets if the time doesn't meet our standards.
+     * If it gets shorter, we can get faster response but should drop delayed packets.
      * Expecting range : 50ms ~ 1000ms (But 300 ms would be practical upper bound)
      */
-    const int64_t baseJbTimeRtp = MsToRtp(staticJbTimeMs, clockRate);
+    const int32_t baseJbTimeMs = std::min(std::max(staticJitterTimeMs, baseJitterTimeMs), 300);
     /**
      * Dynamic jitter is a variance of interarrival time as defined in the 6.4.1 of RFC 3550.
-     * We can regard this as a tolerance of every moments.
+     * We can regard this as a tolerance of every data putting moments.
      * Expecting range : 0ms ~ 150ms (Not to over 300 ms practically)
      */
-    const int64_t dynamicJbTimeRtp =                        // Max 150
-            std::min(MsToRtp(dynamicJbTimeMs, clockRate), MsToRtp(150, clockRate));
-    const int64_t jitterTimeRtp = baseJbTimeRtp + dynamicJbTimeRtp; // Total jitter time
+    const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
+    const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
+    /* Fundamental jitter time */
+    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
+    // Till (T), this assembler waits unconditionally to collect current NAL unit
     int64_t expiredTimeRtp = rtpTime + jitterTimeRtp;       // When does this buffer expire ? (T)
     int64_t diffTimeRtp = playedTimeRtp - expiredTimeRtp;
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
-    bool isFirstLineBroken = (diffTimeRtp > jitterTimeRtp); // (T + jitter) is a standard tolerance
 
-    int64_t finalMargin = dynamicJbTimeRtp * JITTER_MULTIPLE;
-    bool isSecondLineBroken = (diffTimeRtp > jitterTimeRtp + finalMargin); // The Maginot line
+    // From (T), this assembler tries to complete the NAL till (T + try)
+    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
+    bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
+
+    // After (T + try), it gives last chance till (T + try + a) with warning messages.
+    int64_t alpha = dynamicJbTimeRtp * JITTER_MULTIPLE;     // Use Dyn as 'a'
+    bool isSecondLineBroken = (diffTimeRtp > (tryJbTimeRtp + alpha));   // The Maginot line
 
     if (mShowQueue && mShowQueueCnt < 20) {
         showCurrentQueue(queue);
@@ -179,20 +200,20 @@
 
     if (isFirstLineBroken) {
         if (isSecondLineBroken) {
-            ALOGW("buffer too late ... \t Diff in Jb=%lld \t "
+            int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+            ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %lld + (%lld * %.3f)",
-                    (long long)(diffTimeRtp),
+                    "JitterMs %d + (%d + %d * %.3f)",
+                    (long long)diffTimeRtp, (long long)totalDiffTimeMs,
                     buffer->int32Data(), mNextExpectedSeqNo,
-                    (long long)staticJbTimeMs, (long long)dynamicJbTimeMs, JITTER_MULTIPLE + 1);
+                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
             printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
 
             mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         }  else {
-            ALOGW("=== WARNING === buffer arrived after %lld + %lld = %lld ms === WARNING === ",
-                    (long long)staticJbTimeMs, (long long)dynamicJbTimeMs,
-                    (long long)RtpToMs(jitterTimeRtp, clockRate));
+            ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
+                    jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
         }
     }
 
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 954086c..8d19773 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -48,6 +48,7 @@
     bool mAccessUnitDamaged;
     bool mFirstIFrameProvided;
     uint64_t mLastIFrameProvidedAtMs;
+    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index cd60203..d32e85d 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -52,6 +52,7 @@
       mAccessUnitDamaged(false),
       mFirstIFrameProvided(false),
       mLastIFrameProvidedAtMs(0),
+      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 
@@ -133,45 +134,65 @@
     sp<ABuffer> buffer = *queue->begin();
     buffer->meta()->setObject("source", source);
 
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * But that is not useful as an ingredient of buffering time.
+     * Instead, we calculates the time only for all 'NAL units'.
+     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+    int64_t nowTimeUs = ALooper::GetNowUs();
+    if (rtpTime != mLastRtpTimeJitterDataUs) {
+        source->putBaseJitterData(rtpTime, nowTimeUs);
+        mLastRtpTimeJitterDataUs = rtpTime;
+    }
+    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mFirstSysTime / 1000;
-    const int64_t nowTimeMs = ALooper::GetNowUs() / 1000;
-    const int64_t staticJbTimeMs = source->getStaticJitterTimeMs();
-    const int64_t dynamicJbTimeMs = source->getDynamicJitterTimeMs();
+    const int64_t nowTimeMs = nowTimeUs / 1000;
+    const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
+    const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
+    const int32_t dynamicJitterTimeMs = source->getInterArrivalJitterTimeMs();
     const int64_t clockRate = source->mClockRate;
 
     int64_t playedTimeMs = nowTimeMs - startTimeMs;
     int64_t playedTimeRtp = source->mFirstRtpTime + MsToRtp(playedTimeMs, clockRate);
 
     /**
-     * Based on experience in real commercial network services,
+     * Based on experiences in real commercial network services,
      * 300 ms is a maximum heuristic jitter buffer time for video RTP service.
      */
 
     /**
-     * The static(base) jitter is a kind of expected propagation time that we desire.
-     * We can drop packets if it doesn't meet our standards.
-     * If it gets shorter we can get faster response but can lose packets.
+     * The base jitter is an expected additional propagation time.
+     * We can drop packets if the time doesn't meet our standards.
+     * If it gets shorter, we can get faster response but should drop delayed packets.
      * Expecting range : 50ms ~ 1000ms (But 300 ms would be practical upper bound)
      */
-    const int64_t baseJbTimeRtp = MsToRtp(staticJbTimeMs, clockRate);
+    const int32_t baseJbTimeMs = std::min(std::max(staticJitterTimeMs, baseJitterTimeMs), 300);
     /**
      * Dynamic jitter is a variance of interarrival time as defined in the 6.4.1 of RFC 3550.
-     * We can regard this as a tolerance of every moments.
+     * We can regard this as a tolerance of every data putting moments.
      * Expecting range : 0ms ~ 150ms (Not to over 300 ms practically)
      */
-    const int64_t dynamicJbTimeRtp =                        // Max 150
-            std::min(MsToRtp(dynamicJbTimeMs, clockRate), MsToRtp(150, clockRate));
-    const int64_t jitterTimeRtp = baseJbTimeRtp + dynamicJbTimeRtp; // Total jitter time
+    const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
+    const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
+    /* Fundamental jitter time */
+    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
+    // Till (T), this assembler waits unconditionally to collect current NAL unit
     int64_t expiredTimeRtp = rtpTime + jitterTimeRtp;       // When does this buffer expire ? (T)
     int64_t diffTimeRtp = playedTimeRtp - expiredTimeRtp;
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
-    bool isFirstLineBroken = (diffTimeRtp > jitterTimeRtp); // (T + jitter) is a standard tolerance
 
-    int64_t finalMargin = dynamicJbTimeRtp * JITTER_MULTIPLE;
-    bool isSecondLineBroken = (diffTimeRtp > jitterTimeRtp + finalMargin); // The Maginot line
+    // From (T), this assembler tries to complete the NAL till (T + try)
+    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
+    bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
+
+    // After (T + try), it gives last chance till (T + try + a) with warning messages.
+    int64_t alpha = dynamicJbTimeRtp * JITTER_MULTIPLE;     // Use Dyn as 'a'
+    bool isSecondLineBroken = (diffTimeRtp > (tryJbTimeRtp + alpha));   // The Maginot line
 
     if (mShowQueueCnt < 20) {
         showCurrentQueue(queue);
@@ -189,20 +210,20 @@
 
     if (isFirstLineBroken) {
         if (isSecondLineBroken) {
-            ALOGW("buffer too late ... \t Diff in Jb=%lld \t "
+            int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+            ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %lld + (%lld * %.3f)",
-                    (long long)(diffTimeRtp),
+                    "JitterMs %d + (%d + %d * %.3f)",
+                    (long long)diffTimeRtp, (long long)totalDiffTimeMs,
                     buffer->int32Data(), mNextExpectedSeqNo,
-                    (long long)staticJbTimeMs, (long long)dynamicJbTimeMs, JITTER_MULTIPLE + 1);
+                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
             printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
 
             mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         }  else {
-            ALOGW("=== WARNING === buffer arrived after %lld + %lld = %lld ms === WARNING === ",
-                    (long long)staticJbTimeMs, (long long)dynamicJbTimeMs,
-                    (long long)RtpToMs(jitterTimeRtp, clockRate));
+            ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
+                    jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
         }
     }
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index e64b661..68777a7 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -49,6 +49,7 @@
     bool mAccessUnitDamaged;
     bool mFirstIFrameProvided;
     uint64_t mLastIFrameProvidedAtMs;
+    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 9509377..33c85a7 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -723,7 +723,6 @@
     buffer->setInt32Data(u16at(&data[2]));
     buffer->setRange(payloadOffset, size - payloadOffset);
 
-    source->putDynamicJitterData(rtpTime, s->mLastPollTimeUs);
     source->processRTPPacket(buffer);
 
     return OK;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 402dc27..8787d65 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -146,6 +146,7 @@
         mFirstSsrc = ssrc;
         ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
                 mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
+        mJitterCalc->init(mFirstRtpTime, mFirstSysTime, 0, mStaticJbTimeMs * 1000);
         mQueue.push_back(buffer);
         return true;
     }
@@ -331,7 +332,7 @@
     data[18] = (mHighestSeqNumber >> 8) & 0xff;
     data[19] = mHighestSeqNumber & 0xff;
 
-    uint32_t jitterTime = getDynamicJitterTimeMs() * mClockRate / 1000;
+    uint32_t jitterTime = 0;
     data[20] = jitterTime >> 24;    // Interarrival jitter
     data[21] = (jitterTime >> 16) & 0xff;
     data[22] = (jitterTime >> 8) & 0xff;
@@ -518,20 +519,28 @@
     mIssueFIRRequests = enable;
 }
 
-uint32_t ARTPSource::getStaticJitterTimeMs() {
+int32_t ARTPSource::getStaticJitterTimeMs() {
     return mStaticJbTimeMs;
 }
 
-uint32_t ARTPSource::getDynamicJitterTimeMs() {
-    return mJitterCalc->getJitterMs();
+int32_t ARTPSource::getBaseJitterTimeMs() {
+    return mJitterCalc->getBaseJitterMs();
+}
+
+int32_t ARTPSource::getInterArrivalJitterTimeMs() {
+    return mJitterCalc->getInterArrivalJitterMs();
 }
 
 void ARTPSource::setStaticJitterTimeMs(const uint32_t jbTimeMs) {
     mStaticJbTimeMs = jbTimeMs;
 }
 
-void ARTPSource::putDynamicJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putData(timeStamp, arrivalTime);
+void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
+    mJitterCalc->putBaseData(timeStamp, arrivalTime);
+}
+
+void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
+    mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
 }
 
 bool ARTPSource::isNeedToEarlyNotify() {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index 56011d3..0edff23 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -70,10 +70,12 @@
     void setSelfID(const uint32_t selfID);
     void setPeriodicFIR(bool enable);
 
-    uint32_t getStaticJitterTimeMs();
-    uint32_t getDynamicJitterTimeMs();
+    int32_t getStaticJitterTimeMs();
+    int32_t getBaseJitterTimeMs();
+    int32_t getInterArrivalJitterTimeMs();
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
-    void putDynamicJitterData(uint32_t timeStamp, int64_t arrivalTime);
+    void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
+    void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
 
     bool isNeedToEarlyNotify();
     void notifyPktInfo(int32_t bitrate, bool isRegular);
@@ -104,7 +106,7 @@
     List<sp<ABuffer> > mQueue;
     sp<ARTPAssembler> mAssembler;
 
-    uint32_t mStaticJbTimeMs;
+    int32_t mStaticJbTimeMs;
     sp<JitterCalc> mJitterCalc;
 
     typedef struct infoNACK {
diff --git a/media/libstagefright/rtsp/JitterCalculator.cpp b/media/libstagefright/rtsp/JitterCalculator.cpp
index 466171c..93b5a83 100644
--- a/media/libstagefright/rtsp/JitterCalculator.cpp
+++ b/media/libstagefright/rtsp/JitterCalculator.cpp
@@ -25,45 +25,63 @@
 
 JitterCalc::JitterCalc(int32_t clockRate)
     : mClockRate(clockRate) {
-    init();
+    init(0, 0, 0, 0);
 }
 
-void JitterCalc::init() {
-    mJitterValueUs = 0;
-    mLastTimeStamp = 0;
-    mLastArrivalTimeUs = 0;
+void JitterCalc::init(uint32_t rtpTime, int64_t arrivalTimeUs, int32_t base, int32_t inter) {
+    mFirstTimeStamp = rtpTime;
+    mLastTimeStamp = rtpTime;
+    mFirstArrivalTimeUs = arrivalTimeUs;
+    mLastArrivalTimeUs = arrivalTimeUs;
+
+    mBaseJitterUs = base;
+    mInterArrivalJitterUs = inter;
 }
 
-void JitterCalc::putData(int64_t rtpTime, int64_t arrivalTimeUs) {
-    if (mLastTimeStamp == 0) {
-        mLastTimeStamp = rtpTime;
-        mLastArrivalTimeUs = arrivalTimeUs;
-    }
-
+void JitterCalc::putBaseData(int64_t rtpTime, int64_t arrivalTimeUs) {
+    // A RTP time wraps around after UINT32_MAX. We must consider this case.
     const int64_t UINT32_MSB = 0x80000000;
+    int64_t overflowMask = (mFirstTimeStamp & UINT32_MSB & ~rtpTime) << 1;
+    int64_t tempRtpTime = overflowMask | rtpTime;
+
+    // Base jitter implementation can be various
+    int64_t scheduledTimeUs = (tempRtpTime - (int64_t)mFirstTimeStamp) * 1000000ll / mClockRate;
+    int64_t elapsedTimeUs = arrivalTimeUs - mFirstArrivalTimeUs;
+    int64_t correctionTimeUs = elapsedTimeUs - scheduledTimeUs; // additional propagation delay;
+    mBaseJitterUs = (mBaseJitterUs * 15 + correctionTimeUs) / 16;
+    ALOGV("BaseJitterUs : %lld \t\t correctionTimeUs : %lld",
+            (long long)mBaseJitterUs, (long long)correctionTimeUs);
+}
+
+void JitterCalc::putInterArrivalData(int64_t rtpTime, int64_t arrivalTimeUs) {
+    const int64_t UINT32_MSB = 0x80000000;
+    int64_t tempRtpTime = rtpTime;
     int64_t tempLastTimeStamp = mLastTimeStamp;
+
     // A RTP time wraps around after UINT32_MAX. We must consider this case.
     int64_t overflowMask = (mLastTimeStamp ^ rtpTime) & UINT32_MSB;
-    rtpTime |= ((overflowMask & ~rtpTime) << 1);
+    tempRtpTime |= ((overflowMask & ~rtpTime) << 1);
     tempLastTimeStamp |= ((overflowMask & ~mLastTimeStamp) << 1);
-    ALOGV("Raw stamp \t\t now %llx \t\t last %llx",
-            (long long)rtpTime, (long long)tempLastTimeStamp);
-
-    int64_t diffTimeStampUs = abs(rtpTime - tempLastTimeStamp) * 1000000ll / mClockRate;
-    int64_t diffArrivalUs = abs(arrivalTimeUs - mLastArrivalTimeUs);
-    ALOGV("diffTimeStampus %lld \t\t diffArrivalUs %lld",
-            (long long)diffTimeStampUs, (long long)diffArrivalUs);
 
     // 6.4.1 of RFC3550 defines this interarrival jitter value.
-    mJitterValueUs = (mJitterValueUs * 15 + abs(diffTimeStampUs - diffArrivalUs)) / 16;
-    ALOGV("JitterUs %lld", (long long)mJitterValueUs);
+    int64_t diffTimeStampUs = abs(tempRtpTime - tempLastTimeStamp) * 1000000ll / mClockRate;
+    int64_t diffArrivalUs = arrivalTimeUs - mLastArrivalTimeUs; // Can't be minus
+    ALOGV("diffTimeStampUs %lld \t\t diffArrivalUs %lld",
+            (long long)diffTimeStampUs, (long long)diffArrivalUs);
+
+    int64_t varianceUs = diffArrivalUs - diffTimeStampUs;
+    mInterArrivalJitterUs = (mInterArrivalJitterUs * 15 + abs(varianceUs)) / 16;
 
     mLastTimeStamp = (uint32_t)rtpTime;
     mLastArrivalTimeUs = arrivalTimeUs;
 }
 
-uint32_t JitterCalc::getJitterMs() {
-    return mJitterValueUs / 1000;
+int32_t JitterCalc::getBaseJitterMs() {
+    return mBaseJitterUs / 1000;
+}
+
+int32_t JitterCalc::getInterArrivalJitterMs() {
+    return mInterArrivalJitterUs / 1000;
 }
 
 }   // namespace android
diff --git a/media/libstagefright/rtsp/JitterCalculator.h b/media/libstagefright/rtsp/JitterCalculator.h
index 03e43ff..ff36f1f 100644
--- a/media/libstagefright/rtsp/JitterCalculator.h
+++ b/media/libstagefright/rtsp/JitterCalculator.h
@@ -28,15 +28,22 @@
     // Time Stamp per Second
     const int32_t mClockRate;
 
-    uint32_t mJitterValueUs;
+    uint32_t mFirstTimeStamp;
     uint32_t mLastTimeStamp;
+    int64_t mFirstArrivalTimeUs;
     int64_t mLastArrivalTimeUs;
 
-    void init();
+    int32_t mBaseJitterUs;
+    int32_t mInterArrivalJitterUs;
+
 public:
     JitterCalc(int32_t clockRate);
-    void putData(int64_t rtpTime, int64_t arrivalTime);
-    uint32_t getJitterMs();
+
+    void init(uint32_t rtpTime, int64_t arrivalTimeUs, int32_t base, int32_t inter);
+    void putInterArrivalData(int64_t rtpTime, int64_t arrivalTime);
+    void putBaseData(int64_t rtpTime, int64_t arrivalTimeUs);
+    int32_t getBaseJitterMs();
+    int32_t getInterArrivalJitterMs();
 };
 
 }   // namespace android
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 65e74e6..bcbc7fc 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -85,9 +85,6 @@
     dictionary: "dictionaries/formats.dict",
     defaults: ["libstagefright_fuzzer_defaults"],
     static_libs: [
-        "libstagefright_webm",
         "libdatasource",
-        "libstagefright_esds",
-        "libogg",
     ],
 }
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 59d74de..819e146 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -39,10 +39,9 @@
     static std::atomic<int> curAudioHalPids = 0;
 
     if (update) {
-        audioHalPids[(curAudioHalPids + 1) % kNumAudioHalPidsVectors] = *pids;
-        curAudioHalPids++;
+        audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
     } else {
-        *pids = audioHalPids[curAudioHalPids];
+        *pids = audioHalPids[curAudioHalPids % kNumAudioHalPidsVectors];
     }
 }
 
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 9770054..d26a601 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -226,10 +226,23 @@
 
     sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
 
+    // presentationComplete checked by frames. (Mixed Tracks).
     // framesWritten is cumulative, never reset, and is shared all tracks
     // audioHalFrames is derived from output latency
-    // FIXME parameters not needed, could get them from the thread
     bool presentationComplete(int64_t framesWritten, size_t audioHalFrames);
+
+    // presentationComplete checked by time. (Direct Tracks).
+    bool presentationComplete(uint32_t latencyMs);
+
+    void resetPresentationComplete() {
+        mPresentationCompleteFrames = 0;
+        mPresentationCompleteTimeNs = 0;
+    }
+
+    // notifyPresentationComplete is called when presentationComplete() detects
+    // that the track is finished stopping.
+    void notifyPresentationComplete();
+
     void signalClientFlag(int32_t flag);
 
     /** Set that a metadata has changed and needs to be notified to backend. Thread safe. */
@@ -262,9 +275,6 @@
     int32_t             *mAuxBuffer;
     int                 mAuxEffectId;
     bool                mHasVolumeController;
-    size_t              mPresentationCompleteFrames; // number of frames written to the
-                                    // audio HAL when this track will be fully rendered
-                                    // zero means not monitoring
 
     // access these three variables only when holding thread lock.
     LinearMap<int64_t> mFrameMap;           // track frame to server frame mapping
@@ -300,6 +310,14 @@
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
+    size_t              mPresentationCompleteFrames = 0; // (Used for Mixed tracks)
+                                    // The number of frames written to the
+                                    // audio HAL when this track is considered fully rendered.
+                                    // Zero means not monitoring.
+    int64_t             mPresentationCompleteTimeNs = 0; // (Used for Direct tracks)
+                                    // The time when this track is considered fully rendered.
+                                    // Zero means not monitoring.
+
     // The following fields are only for fast tracks, and should be in a subclass
     int                 mFastIndex; // index within FastMixerState::mFastTracks[];
                                     // either mFastIndex == -1 if not isFastTrack()
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 590b379..61108f3 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2568,7 +2568,7 @@
         }
 
         track->mResetDone = false;
-        track->mPresentationCompleteFrames = 0;
+        track->resetPresentationComplete();
         mActiveTracks.add(track);
         sp<EffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
@@ -5958,16 +5958,8 @@
                     track->isStopping_2() || track->isPaused()) {
                 // We have consumed all the buffers of this track.
                 // Remove it from the list of active tracks.
-                size_t audioHALFrames;
-                if (audio_has_proportional_frames(mFormat)) {
-                    audioHALFrames = (latency_l() * mSampleRate) / 1000;
-                } else {
-                    audioHALFrames = 0;
-                }
-
-                int64_t framesWritten = mBytesWritten / mFrameSize;
                 if (mStandby || !last ||
-                        track->presentationComplete(framesWritten, audioHALFrames) ||
+                        track->presentationComplete(latency_l()) ||
                         track->isPaused() || mHwPaused) {
                     if (track->isStopping_2()) {
                         track->mState = TrackBase::STOPPED;
@@ -6541,14 +6533,7 @@
                 // Drain has completed or we are in standby, signal presentation complete
                 if (!(mDrainSequence & 1) || !last || mStandby) {
                     track->mState = TrackBase::STOPPED;
-                    uint32_t latency = 0;
-                    status_t result = mOutput->stream->getLatency(&latency);
-                    ALOGE_IF(result != OK,
-                            "Error when retrieving output stream latency: %d", result);
-                    size_t audioHALFrames = (latency * mSampleRate) / 1000;
-                    int64_t framesWritten =
-                            mBytesWritten / mOutput->getFrameSize();
-                    track->presentationComplete(framesWritten, audioHALFrames);
+                    track->presentationComplete(latency_l());
                     track->reset();
                     tracksToRemove->add(track);
                     // OFFLOADED stop resets frame counts.
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index af261e9..e98a1a1 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -632,7 +632,6 @@
     mMainBuffer(thread->sinkBuffer()),
     mAuxBuffer(NULL),
     mAuxEffectId(0), mHasVolumeController(false),
-    mPresentationCompleteFrames(0),
     mFrameMap(16 /* sink-frame-to-track-frame map memory */),
     mVolumeHandler(new media::VolumeHandler(sampleRate)),
     mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
@@ -1433,6 +1432,7 @@
     mAuxBuffer = buffer;
 }
 
+// presentationComplete verified by frames, used by Mixed tracks.
 bool AudioFlinger::PlaybackThread::Track::presentationComplete(
         int64_t framesWritten, size_t audioHalFrames)
 {
@@ -1451,30 +1451,71 @@
             (long long)mPresentationCompleteFrames, (long long)framesWritten);
     if (mPresentationCompleteFrames == 0) {
         mPresentationCompleteFrames = framesWritten + audioHalFrames;
-        ALOGV("%s(%d): presentationComplete() reset:"
+        ALOGV("%s(%d): set:"
                 " mPresentationCompleteFrames %lld audioHalFrames %zu",
                 __func__, mId,
                 (long long)mPresentationCompleteFrames, audioHalFrames);
     }
 
     bool complete;
-    if (isOffloaded()) {
-        complete = true;
-    } else if (isDirect() || isFastTrack()) { // these do not go through linear map
+    if (isFastTrack()) { // does not go through linear map
         complete = framesWritten >= (int64_t) mPresentationCompleteFrames;
+        ALOGV("%s(%d): %s framesWritten:%lld  mPresentationCompleteFrames:%lld",
+                __func__, mId, (complete ? "complete" : "waiting"),
+                (long long) framesWritten, (long long) mPresentationCompleteFrames);
     } else {  // Normal tracks, OutputTracks, and PatchTracks
         complete = framesWritten >= (int64_t) mPresentationCompleteFrames
                 && mAudioTrackServerProxy->isDrained();
     }
 
     if (complete) {
-        triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
-        mAudioTrackServerProxy->setStreamEndDone();
+        notifyPresentationComplete();
         return true;
     }
     return false;
 }
 
+// presentationComplete checked by time, used by DirectTracks.
+bool AudioFlinger::PlaybackThread::Track::presentationComplete(uint32_t latencyMs)
+{
+    // For Offloaded or Direct tracks.
+
+    // For a direct track, we incorporated time based testing for presentationComplete.
+
+    // For an offloaded track the HAL+h/w delay is variable so a HAL drain() is used
+    // to detect when all frames have been played. In this case latencyMs isn't
+    // useful because it doesn't always reflect whether there is data in the h/w
+    // buffers, particularly if a track has been paused and resumed during draining
+
+    // Scaling exists on internal branch.
+    //constexpr float MIN_SPEED = 0.125f; // min speed scaling allowed for timely response.
+    if (mPresentationCompleteTimeNs == 0) {
+        mPresentationCompleteTimeNs = systemTime() + latencyMs * 1e6; // / fmax(mSpeed, MIN_SPEED);
+        ALOGV("%s(%d): set: latencyMs %u  mPresentationCompleteTimeNs:%lld",
+                __func__, mId, latencyMs, (long long) mPresentationCompleteTimeNs);
+    }
+
+    bool complete;
+    if (isOffloaded()) {
+        complete = true;
+    } else { // Direct
+        complete = systemTime() >= mPresentationCompleteTimeNs;
+        ALOGV("%s(%d): %s", __func__, mId, (complete ? "complete" : "waiting"));
+    }
+    if (complete) {
+        notifyPresentationComplete();
+        return true;
+    }
+    return false;
+}
+
+void AudioFlinger::PlaybackThread::Track::notifyPresentationComplete()
+{
+    // This only triggers once. TODO: should we enforce this?
+    triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
+    mAudioTrackServerProxy->setStreamEndDone();
+}
+
 void AudioFlinger::PlaybackThread::Track::triggerEvents(AudioSystem::sync_event_t type)
 {
     for (size_t i = 0; i < mSyncEvents.size();) {