Merge changes I373db2f4,I3f0cc1dd,I05f8ed86,Iec0cc3d2,I01cfcaf1, ...

* changes:
  codec2: signal aliases in XML vs. C2Store
  codec2: Codec2InfoBuilder rework
  stagefright: list only the first codec for a given name
  stagefright: find codecs by their aliases as well
  stagefright: MediaCodecsXmlParser: add support for parsing rank
  stagefright: add method to MediaCodecInfoWriter to find existing info
  stagefright: rework and simplify OmxInfoBuilder
  codec2: CCodec plugin CreateInputSurface should create C2 HAL surface
  codec2: C2PlatformStore: signal component traits via interface
  codec2: make C2Component::Traits.aliases a vector of C2Strings
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index c661233..50ad7b2 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -104,7 +104,8 @@
     for (size_t i = 0; i < entry.count; ++i) {
         if (ids[i] == '\0') {
             if (start != i) {
-                mStaticPhysicalCameraIds.push_back((const char*)ids+start);
+                mStaticPhysicalCameraIdValues.push_back(String8((const char *)ids+start));
+                mStaticPhysicalCameraIds.push_back(mStaticPhysicalCameraIdValues.back().string());
             }
             start = i+1;
         }
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 7049c4b..3d895cb 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -117,6 +117,7 @@
     static std::unordered_set<uint32_t> sSystemTags;
 
     std::vector<const char*> mStaticPhysicalCameraIds;
+    std::vector<String8> mStaticPhysicalCameraIdValues;
 };
 
 #endif // _ACAMERA_METADATA_H
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 7803ccc..c361690 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -86,6 +86,7 @@
 using android::INFO_FORMAT_CHANGED;
 using android::INFO_OUTPUT_BUFFERS_CHANGED;
 using android::INVALID_OPERATION;
+using android::NAME_NOT_FOUND;
 using android::NO_ERROR;
 using android::UNKNOWN_ERROR;
 
@@ -585,8 +586,12 @@
     self->startThreadPool();
 
     // Get main display parameters.
-    sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
-            ISurfaceComposer::eDisplayIdMain);
+    const sp<IBinder> mainDpy = SurfaceComposerClient::getInternalDisplayToken();
+    if (mainDpy == nullptr) {
+        fprintf(stderr, "ERROR: no display\n");
+        return NAME_NOT_FOUND;
+    }
+
     DisplayInfo mainDpyInfo;
     err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
     if (err != NO_ERROR) {
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index a463ec5..e5a4337 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -411,10 +411,12 @@
         composerClient = new SurfaceComposerClient;
         CHECK_EQ(composerClient->initCheck(), (status_t)OK);
 
-        sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
-                ISurfaceComposer::eDisplayIdMain));
+        const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+        CHECK(display != nullptr);
+
         DisplayInfo info;
-        SurfaceComposerClient::getDisplayInfo(display, &info);
+        CHECK_EQ(SurfaceComposerClient::getDisplayInfo(display, &info), NO_ERROR);
+
         ssize_t displayWidth = info.w;
         ssize_t displayHeight = info.h;
 
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index f0ee0e1..2cf6955 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -748,10 +748,12 @@
         composerClient = new SurfaceComposerClient;
         CHECK_EQ((status_t)OK, composerClient->initCheck());
 
-        android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
-                ISurfaceComposer::eDisplayIdMain));
+        const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+        CHECK(display != nullptr);
+
         DisplayInfo info;
-        SurfaceComposerClient::getDisplayInfo(display, &info);
+        CHECK_EQ(SurfaceComposerClient::getDisplayInfo(display, &info), NO_ERROR);
+
         ssize_t displayWidth = info.w;
         ssize_t displayHeight = info.h;
 
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index b2f39dc..35bdbc0 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -318,10 +318,12 @@
     sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
     CHECK_EQ(composerClient->initCheck(), (status_t)OK);
 
-    sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
-            ISurfaceComposer::eDisplayIdMain));
+    const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+    CHECK(display != nullptr);
+
     DisplayInfo info;
-    SurfaceComposerClient::getDisplayInfo(display, &info);
+    CHECK_EQ(SurfaceComposerClient::getDisplayInfo(display, &info), NO_ERROR);
+
     ssize_t displayWidth = info.w;
     ssize_t displayHeight = info.h;
 
diff --git a/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp b/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp
index 27bd631..bf35224 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp
+++ b/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp
@@ -97,13 +97,13 @@
 ///////////////////////////////////////////////////////////////////////////////
 ClearKeyCasPlugin::ClearKeyCasPlugin(
         void *appData, CasPluginCallback callback)
-    : mCallback(callback), mAppData(appData) {
+    : mCallback(callback), mCallbackExt(NULL), mAppData(appData) {
     ALOGV("CTOR");
 }
 
 ClearKeyCasPlugin::ClearKeyCasPlugin(
         void *appData, CasPluginCallbackExt callback)
-    : mCallbackExt(callback), mAppData(appData) {
+    : mCallback(NULL), mCallbackExt(callback), mAppData(appData) {
     ALOGV("CTOR");
 }
 
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index c7c8442..04dda8f 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -52,33 +52,26 @@
 
 namespace android {
 
-class C2SoftAacDec::IntfImpl : public C2InterfaceHelper {
+constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+
+class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
-        setDerivedInstance(this);
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_AAC) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
 
         addParameter(
-                DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
-                .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
-                .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
-                .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
-                        MEDIA_MIMETYPE_AUDIO_AAC))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
-                .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withConstValue(new C2PortActualDelayTuning::output(2u))
                 .build());
 
         addParameter(
@@ -231,8 +224,6 @@
     // TODO Add : C2StreamAacSbrModeTuning
 };
 
-constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
-
 C2SoftAacDec::C2SoftAacDec(
         const char *name,
         c2_node_id_t id,
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 3e62744..86cd3d8 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -51,6 +51,12 @@
         noInputLatency();
         noTimeStretch();
 
+        // TODO: Proper support for reorder depth.
+        addParameter(
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withConstValue(new C2PortActualDelayTuning::output(8u))
+                .build());
+
         // TODO: output latency and reordering
 
         addParameter(
@@ -877,6 +883,8 @@
     } else if (!hasPicture) {
         fillEmptyWork(work);
     }
+
+    work->input.buffers.clear();
 }
 
 c2_status_t C2SoftAvcDec::drainInternal(
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 99892ce..f0d7d88 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -51,7 +51,11 @@
         noInputLatency();
         noTimeStretch();
 
-        // TODO: output latency and reordering
+        // TODO: Proper support for reorder depth.
+        addParameter(
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withConstValue(new C2PortActualDelayTuning::output(8u))
+                .build());
 
         addParameter(
                 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 901f5ed..0b89cff 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -60,7 +60,11 @@
         noInputLatency();
         noTimeStretch();
 
-        // TODO: output latency and reordering
+        // TODO: Proper support for reorder depth.
+        addParameter(
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withConstValue(new C2PortActualDelayTuning::output(1u))
+                .build());
 
         addParameter(
                 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index d1557cb..fca2902 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -55,12 +55,10 @@
         : callBack(fn) {}
     virtual void onWorkDone(
         const std::weak_ptr<android::Codec2Client::Component>& comp,
-        std::list<std::unique_ptr<C2Work>>& workItems,
-        size_t numDiscardedInputBuffers) override {
+        std::list<std::unique_ptr<C2Work>>& workItems) override {
         /* TODO */
         ALOGD("onWorkDone called");
         (void)comp;
-        (void)numDiscardedInputBuffers;
         if (callBack) callBack(workItems);
     }
 
@@ -89,9 +87,10 @@
     }
 
     virtual void onInputBufferDone(
-        const std::shared_ptr<C2Buffer>& buffer) override {
+        uint64_t frameIndex, size_t arrayIndex) override {
         /* TODO */
-        (void)buffer;
+        (void)frameIndex;
+        (void)arrayIndex;
     }
 
     virtual void onFrameRendered(
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 458f675..3808be5 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -344,17 +344,13 @@
             return Void();
         }
         // release input buffers potentially held by the component from queue
-        size_t numDiscardedInputBuffers = 0;
         std::shared_ptr<Codec2Client::Component> strongComponent =
                 component.lock();
         if (strongComponent) {
-            numDiscardedInputBuffers =
-                    strongComponent->handleOnWorkDone(workItems);
+            strongComponent->handleOnWorkDone(workItems);
         }
         if (std::shared_ptr<Codec2Client::Listener> listener = base.lock()) {
-            listener->onWorkDone(component,
-                                 workItems,
-                                 numDiscardedInputBuffers);
+            listener->onWorkDone(component, workItems);
         } else {
             LOG(DEBUG) << "onWorkDone -- listener died.";
         }
@@ -418,26 +414,15 @@
             LOG(DEBUG) << "onInputBuffersReleased -- listener died.";
             return Void();
         }
-        std::shared_ptr<Codec2Client::Component> strongComponent =
-                component.lock();
-        if (!strongComponent) {
-            LOG(DEBUG) << "onInputBuffersReleased -- component died.";
-            return Void();
-        }
         for (const InputBuffer& inputBuffer : inputBuffers) {
-            std::shared_ptr<C2Buffer> buffer =
-                    strongComponent->freeInputBuffer(
-                        inputBuffer.frameIndex,
-                        inputBuffer.arrayIndex);
             LOG(VERBOSE) << "onInputBuffersReleased --"
                             " received death notification of"
                             " input buffer:"
                             " frameIndex = " << inputBuffer.frameIndex
                          << ", bufferIndex = " << inputBuffer.arrayIndex
                          << ".";
-            if (buffer) {
-                listener->onInputBufferDone(buffer);
-            }
+            listener->onInputBufferDone(
+                    inputBuffer.frameIndex, inputBuffer.arrayIndex);
         }
         return Void();
     }
@@ -917,43 +902,8 @@
     return static_cast<c2_status_t>(static_cast<Status>(transResult));
 }
 
-size_t Codec2Client::Component::handleOnWorkDone(
+void Codec2Client::Component::handleOnWorkDone(
         const std::list<std::unique_ptr<C2Work>> &workItems) {
-    // Input buffers' lifetime management
-    std::vector<uint64_t> inputDone;
-    for (const std::unique_ptr<C2Work> &work : workItems) {
-        if (work) {
-            if (work->worklets.empty()
-                    || !work->worklets.back()
-                    || (work->worklets.back()->output.flags &
-                        C2FrameData::FLAG_INCOMPLETE) == 0) {
-                // input is complete
-                inputDone.emplace_back(work->input.ordinal.frameIndex.peeku());
-            }
-        }
-    }
-
-    size_t numDiscardedInputBuffers = 0;
-    {
-        std::lock_guard<std::mutex> lock(mInputBuffersMutex);
-        for (uint64_t inputIndex : inputDone) {
-            auto it = mInputBuffers.find(inputIndex);
-            if (it == mInputBuffers.end()) {
-                LOG(VERBOSE) << "onWorkDone -- returned consumed/unknown "
-                                "input frame: index = "
-                             << inputIndex << ".";
-            } else {
-                LOG(VERBOSE) << "onWorkDone -- processed input frame: "
-                             << inputIndex
-                             << " (containing " << it->second.size()
-                                 << " buffers).";
-                mInputBuffers.erase(it);
-                mInputBufferCount.erase(inputIndex);
-                ++numDiscardedInputBuffers;
-            }
-        }
-    }
-
     // Output bufferqueue-based blocks' lifetime management
     mOutputBufferQueueMutex.lock();
     sp<IGraphicBufferProducer> igbp = mOutputIgbp;
@@ -964,72 +914,10 @@
     if (igbp) {
         holdBufferQueueBlocks(workItems, igbp, bqId, generation);
     }
-    return numDiscardedInputBuffers;
-}
-
-std::shared_ptr<C2Buffer> Codec2Client::Component::freeInputBuffer(
-        uint64_t frameIndex,
-        size_t bufferIndex) {
-    std::shared_ptr<C2Buffer> buffer;
-    std::lock_guard<std::mutex> lock(mInputBuffersMutex);
-    auto it = mInputBuffers.find(frameIndex);
-    if (it == mInputBuffers.end()) {
-        LOG(INFO) << "freeInputBuffer -- Unrecognized input frame index "
-                  << frameIndex << ".";
-        return nullptr;
-    }
-    if (bufferIndex >= it->second.size()) {
-        LOG(INFO) << "freeInputBuffer -- Input buffer number " << bufferIndex
-                  << " is not valid in input with frame index " << frameIndex
-                  << ".";
-        return nullptr;
-    }
-    buffer = it->second[bufferIndex];
-    if (!buffer) {
-        LOG(INFO) << "freeInputBuffer -- Input buffer number " << bufferIndex
-                  << " in input with frame index " << frameIndex
-                  << " has already been freed.";
-        return nullptr;
-    }
-    it->second[bufferIndex] = nullptr;
-    if (--mInputBufferCount[frameIndex] == 0) {
-        mInputBuffers.erase(it);
-        mInputBufferCount.erase(frameIndex);
-    }
-    return buffer;
 }
 
 c2_status_t Codec2Client::Component::queue(
         std::list<std::unique_ptr<C2Work>>* const items) {
-    // remember input buffers queued to hold reference to them
-    {
-        std::lock_guard<std::mutex> lock(mInputBuffersMutex);
-        for (const std::unique_ptr<C2Work> &work : *items) {
-            if (!work) {
-                continue;
-            }
-            if (work->input.buffers.size() == 0) {
-                continue;
-            }
-
-            uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
-            auto res = mInputBuffers.emplace(inputIndex, work->input.buffers);
-            if (!res.second) {
-                // TODO: append? - for now we are replacing
-                res.first->second = work->input.buffers;
-                LOG(INFO) << "queue -- duplicate input frame index: "
-                          << inputIndex
-                          << ". Discarding the old input frame...";
-            }
-            mInputBufferCount[inputIndex] = work->input.buffers.size();
-            LOG(VERBOSE) << "queue -- queuing input frame: "
-                         << "index = " << inputIndex
-                         << ", number of buffers = "
-                             << work->input.buffers.size()
-                         << ".";
-        }
-    }
-
     WorkBundle workBundle;
     if (!objcpy(&workBundle, *items, &mBufferPoolSender)) {
         LOG(ERROR) << "queue -- bad input.";
@@ -1087,24 +975,6 @@
         }
     }
 
-    // Input buffers' lifetime management
-    for (uint64_t flushedIndex : flushedIndices) {
-        std::lock_guard<std::mutex> lock(mInputBuffersMutex);
-        auto it = mInputBuffers.find(flushedIndex);
-        if (it == mInputBuffers.end()) {
-            LOG(VERBOSE) << "flush -- returned consumed/unknown input frame: "
-                            "index = " << flushedIndex << ".";
-        } else {
-            LOG(VERBOSE) << "flush -- returned unprocessed input frame: "
-                            "index = " << flushedIndex
-                         << ", number of buffers = "
-                             << mInputBufferCount[flushedIndex]
-                         << ".";
-            mInputBuffers.erase(it);
-            mInputBufferCount.erase(flushedIndex);
-        }
-    }
-
     // Output bufferqueue-based blocks' lifetime management
     mOutputBufferQueueMutex.lock();
     sp<IGraphicBufferProducer> igbp = mOutputIgbp;
@@ -1159,10 +1029,6 @@
     if (status != C2_OK) {
         LOG(DEBUG) << "stop -- call failed: " << status << ".";
     }
-    mInputBuffersMutex.lock();
-    mInputBuffers.clear();
-    mInputBufferCount.clear();
-    mInputBuffersMutex.unlock();
     return status;
 }
 
@@ -1177,10 +1043,6 @@
     if (status != C2_OK) {
         LOG(DEBUG) << "reset -- call failed: " << status << ".";
     }
-    mInputBuffersMutex.lock();
-    mInputBuffers.clear();
-    mInputBufferCount.clear();
-    mInputBuffersMutex.unlock();
     return status;
 }
 
@@ -1195,10 +1057,6 @@
     if (status != C2_OK) {
         LOG(DEBUG) << "release -- call failed: " << status << ".";
     }
-    mInputBuffersMutex.lock();
-    mInputBuffers.clear();
-    mInputBufferCount.clear();
-    mInputBuffersMutex.unlock();
     return status;
 }
 
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index 8f06512..478ce6e 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -250,16 +250,9 @@
 struct Codec2Client::Listener {
 
     // This is called when the component produces some output.
-    //
-    // numDiscardedInputBuffers is the number of input buffers contained in
-    // workItems that have just become unused. Note that workItems may contain
-    // more input buffers than numDiscardedInputBuffers because buffers that
-    // have been previously reported by onInputBufferDone() are not counted
-    // towards numDiscardedInputBuffers, but may still show up in workItems.
     virtual void onWorkDone(
             const std::weak_ptr<Component>& comp,
-            std::list<std::unique_ptr<C2Work>>& workItems,
-            size_t numDiscardedInputBuffers) = 0;
+            std::list<std::unique_ptr<C2Work>>& workItems) = 0;
 
     // This is called when the component goes into a tripped state.
     virtual void onTripped(
@@ -281,7 +274,7 @@
     // Input buffers that have been returned by onWorkDone() or flush() will not
     // trigger a call to this function.
     virtual void onInputBufferDone(
-            const std::shared_ptr<C2Buffer>& buffer) = 0;
+            uint64_t frameIndex, size_t arrayIndex) = 0;
 
     // This is called when the component becomes aware of a frame being
     // rendered.
@@ -383,24 +376,6 @@
 protected:
     sp<Base> mBase;
 
-    // Mutex for mInputBuffers and mInputBufferCount.
-    mutable std::mutex mInputBuffersMutex;
-
-    // Map: frameIndex -> vector of bufferIndices
-    //
-    // mInputBuffers[frameIndex][bufferIndex] may be null if the buffer in that
-    // slot has been freed.
-    mutable std::map<uint64_t, std::vector<std::shared_ptr<C2Buffer>>>
-            mInputBuffers;
-
-    // Map: frameIndex -> number of bufferIndices that have not been freed
-    //
-    // mInputBufferCount[frameIndex] keeps track of the number of non-null
-    // elements in mInputBuffers[frameIndex]. When mInputBufferCount[frameIndex]
-    // decreases to 0, frameIndex can be removed from both mInputBuffers and
-    // mInputBufferCount.
-    mutable std::map<uint64_t, size_t> mInputBufferCount;
-
     ::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender
             mBufferPoolSender;
 
@@ -417,10 +392,7 @@
     friend struct Codec2Client;
 
     struct HidlListener;
-    // Return the number of input buffers that should be discarded.
-    size_t handleOnWorkDone(const std::list<std::unique_ptr<C2Work>> &workItems);
-    // Remove an input buffer from mInputBuffers and return it.
-    std::shared_ptr<C2Buffer> freeInputBuffer(uint64_t frameIndex, size_t bufferIndex);
+    void handleOnWorkDone(const std::list<std::unique_ptr<C2Work>> &workItems);
 
 };
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 2870d39..a212651 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -8,6 +8,7 @@
         "CCodecConfig.cpp",
         "Codec2Buffer.cpp",
         "Codec2InfoBuilder.cpp",
+        "PipelineWatcher.cpp",
         "ReflectedParamUpdater.cpp",
         "SkipCutBuffer.cpp",
     ],
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index c72b58d..bc22045 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -448,14 +448,13 @@
 
     virtual void onWorkDone(
             const std::weak_ptr<Codec2Client::Component>& component,
-            std::list<std::unique_ptr<C2Work>>& workItems,
-            size_t numDiscardedInputBuffers) override {
+            std::list<std::unique_ptr<C2Work>>& workItems) override {
         (void)component;
         sp<CCodec> codec(mCodec.promote());
         if (!codec) {
             return;
         }
-        codec->onWorkDone(workItems, numDiscardedInputBuffers);
+        codec->onWorkDone(workItems);
     }
 
     virtual void onTripped(
@@ -504,10 +503,10 @@
     }
 
     virtual void onInputBufferDone(
-            const std::shared_ptr<C2Buffer>& buffer) override {
+            uint64_t frameIndex, size_t arrayIndex) override {
         sp<CCodec> codec(mCodec.promote());
         if (codec) {
-            codec->onInputBufferDone(buffer);
+            codec->onInputBufferDone(frameIndex, arrayIndex);
         }
     }
 
@@ -531,10 +530,6 @@
                 {RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
     }
 
-    void onWorkQueued(bool eos) override {
-        mCodec->onWorkQueued(eos);
-    }
-
     void onOutputBuffersChanged() override {
         mCodec->mCallback->onOutputBuffersChanged();
     }
@@ -546,8 +541,7 @@
 // CCodec
 
 CCodec::CCodec()
-    : mChannel(new CCodecBufferChannel(std::make_shared<CCodecCallbackImpl>(this))),
-      mQueuedWorkCount(0) {
+    : mChannel(new CCodecBufferChannel(std::make_shared<CCodecCallbackImpl>(this))) {
 }
 
 CCodec::~CCodec() {
@@ -1384,7 +1378,6 @@
     }
 
     mChannel->flush(flushedWork);
-    subQueuedWorkCount(flushedWork.size());
 
     {
         Mutexed<State>::Locked state(mState);
@@ -1506,28 +1499,16 @@
     config->setParameters(comp, params, C2_MAY_BLOCK);
 }
 
-void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems,
-                        size_t numDiscardedInputBuffers) {
+void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
-        {
-            Mutexed<std::list<size_t>>::Locked numDiscardedInputBuffersQueue(
-                    mNumDiscardedInputBuffersQueue);
-            numDiscardedInputBuffersQueue->insert(
-                    numDiscardedInputBuffersQueue->end(),
-                    workItems.size() - 1, 0);
-            numDiscardedInputBuffersQueue->emplace_back(
-                    numDiscardedInputBuffers);
-        }
-        {
-            Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
-            queue->splice(queue->end(), workItems);
-        }
+        Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+        queue->splice(queue->end(), workItems);
     }
     (new AMessage(kWhatWorkDone, this))->post();
 }
 
-void CCodec::onInputBufferDone(const std::shared_ptr<C2Buffer>& buffer) {
-    mChannel->onInputBufferDone(buffer);
+void CCodec::onInputBufferDone(uint64_t frameIndex, size_t arrayIndex) {
+    mChannel->onInputBufferDone(frameIndex, arrayIndex);
 }
 
 void CCodec::onMessageReceived(const sp<AMessage> &msg) {
@@ -1553,7 +1534,6 @@
         case kWhatStart: {
             // C2Component::start() should return within 500ms.
             setDeadline(now, 550ms, "start");
-            mQueuedWorkCount = 0;
             start();
             break;
         }
@@ -1561,10 +1541,6 @@
             // C2Component::stop() should return within 500ms.
             setDeadline(now, 550ms, "stop");
             stop();
-
-            mQueuedWorkCount = 0;
-            Mutexed<NamedTimePoint>::Locked deadline(mQueueDeadline);
-            deadline->set(TimePoint::max(), "none");
             break;
         }
         case kWhatFlush: {
@@ -1590,7 +1566,6 @@
         }
         case kWhatWorkDone: {
             std::unique_ptr<C2Work> work;
-            size_t numDiscardedInputBuffers;
             bool shouldPost = false;
             {
                 Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -1601,24 +1576,10 @@
                 queue->pop_front();
                 shouldPost = !queue->empty();
             }
-            {
-                Mutexed<std::list<size_t>>::Locked numDiscardedInputBuffersQueue(
-                        mNumDiscardedInputBuffersQueue);
-                if (numDiscardedInputBuffersQueue->empty()) {
-                    numDiscardedInputBuffers = 0;
-                } else {
-                    numDiscardedInputBuffers = numDiscardedInputBuffersQueue->front();
-                    numDiscardedInputBuffersQueue->pop_front();
-                }
-            }
             if (shouldPost) {
                 (new AMessage(kWhatWorkDone, this))->post();
             }
 
-            if (work->worklets.empty()
-                    || !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE)) {
-                subQueuedWorkCount(1);
-            }
             // handle configuration changes in work done
             Mutexed<Config>::Locked config(mConfig);
             bool changed = false;
@@ -1682,8 +1643,7 @@
             }
             mChannel->onWorkDone(
                     std::move(work), changed ? config->mOutputFormat : nullptr,
-                    initData.hasChanged() ? initData.update().get() : nullptr,
-                    numDiscardedInputBuffers);
+                    initData.hasChanged() ? initData.update().get() : nullptr);
             break;
         }
         case kWhatWatch: {
@@ -1710,17 +1670,26 @@
 void CCodec::initiateReleaseIfStuck() {
     std::string name;
     bool pendingDeadline = false;
-    for (Mutexed<NamedTimePoint> *deadlinePtr : { &mDeadline, &mQueueDeadline, &mEosDeadline }) {
-        Mutexed<NamedTimePoint>::Locked deadline(*deadlinePtr);
+    {
+        Mutexed<NamedTimePoint>::Locked deadline(mDeadline);
         if (deadline->get() < std::chrono::steady_clock::now()) {
             name = deadline->getName();
-            break;
         }
         if (deadline->get() != TimePoint::max()) {
             pendingDeadline = true;
         }
     }
     if (name.empty()) {
+        constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
+        std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
+        if (elapsed >= kWorkDurationThreshold) {
+            name = "queue";
+        }
+        if (elapsed > 0s) {
+            pendingDeadline = true;
+        }
+    }
+    if (name.empty()) {
         // We're not stuck.
         if (pendingDeadline) {
             // If we are not stuck yet but still has deadline coming up,
@@ -1735,33 +1704,6 @@
     mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
 }
 
-void CCodec::onWorkQueued(bool eos) {
-    ALOGV("queued work count +1 from %d", mQueuedWorkCount.load());
-    int32_t count = ++mQueuedWorkCount;
-    if (eos) {
-        CCodecWatchdog::getInstance()->watch(this);
-        Mutexed<NamedTimePoint>::Locked deadline(mEosDeadline);
-        deadline->set(std::chrono::steady_clock::now() + 3s, "eos");
-    }
-    // TODO: query and use input/pipeline/output delay combined
-    if (count >= 4) {
-        CCodecWatchdog::getInstance()->watch(this);
-        Mutexed<NamedTimePoint>::Locked deadline(mQueueDeadline);
-        deadline->set(std::chrono::steady_clock::now() + 3s, "queue");
-    }
-}
-
-void CCodec::subQueuedWorkCount(uint32_t count) {
-    ALOGV("queued work count -%u from %d", count, mQueuedWorkCount.load());
-    int32_t currentCount = (mQueuedWorkCount -= count);
-    if (currentCount == 0) {
-        Mutexed<NamedTimePoint>::Locked deadline(mEosDeadline);
-        deadline->set(TimePoint::max(), "none");
-    }
-    Mutexed<NamedTimePoint>::Locked deadline(mQueueDeadline);
-    deadline->set(TimePoint::max(), "none");
-}
-
 }  // namespace android
 
 extern "C" android::CodecBase *CreateCodec() {
diff --git a/media/codec2/sfplugin/CCodec.h b/media/codec2/sfplugin/CCodec.h
index 545ad70..ba5f5f3 100644
--- a/media/codec2/sfplugin/CCodec.h
+++ b/media/codec2/sfplugin/CCodec.h
@@ -66,9 +66,8 @@
     virtual void signalRequestIDRFrame() override;
 
     void initiateReleaseIfStuck();
-    void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems,
-                    size_t numDiscardedInputBuffers);
-    void onInputBufferDone(const std::shared_ptr<C2Buffer>& buffer);
+    void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
+    void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
 
 protected:
     virtual ~CCodec();
@@ -76,7 +75,7 @@
     virtual void onMessageReceived(const sp<AMessage> &msg) override;
 
 private:
-    typedef std::chrono::time_point<std::chrono::steady_clock> TimePoint;
+    typedef std::chrono::steady_clock::time_point TimePoint;
 
     status_t tryAndReportOnError(std::function<status_t()> job);
 
@@ -110,9 +109,6 @@
             const std::chrono::milliseconds &timeout,
             const char *name);
 
-    void onWorkQueued(bool eos);
-    void subQueuedWorkCount(uint32_t count);
-
     enum {
         kWhatAllocate,
         kWhatConfigure,
@@ -177,13 +173,9 @@
     struct ClientListener;
 
     Mutexed<NamedTimePoint> mDeadline;
-    std::atomic_int32_t mQueuedWorkCount;
-    Mutexed<NamedTimePoint> mQueueDeadline;
-    Mutexed<NamedTimePoint> mEosDeadline;
     typedef CCodecConfig Config;
     Mutexed<Config> mConfig;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
-    Mutexed<std::list<size_t>> mNumDiscardedInputBuffersQueue;
 
     friend class CCodecCallbackImpl;
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 8e6a3f8..6842fa5 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -94,6 +94,11 @@
      */
     virtual void getArray(Vector<sp<MediaCodecBuffer>> *) const {}
 
+    /**
+     * Return number of buffers the client owns.
+     */
+    virtual size_t numClientBuffers() const = 0;
+
 protected:
     std::string mComponentName; ///< name of component for debugging
     std::string mChannelName; ///< name of channel for debugging
@@ -508,6 +513,14 @@
         mBuffers.clear();
     }
 
+    size_t numClientBuffers() const {
+        return std::count_if(
+                mBuffers.begin(), mBuffers.end(),
+                [](const Entry &entry) {
+                    return (entry.clientBuffer != nullptr);
+                });
+    }
+
 private:
     friend class BuffersArrayImpl;
 
@@ -693,6 +706,14 @@
         }
     }
 
+    size_t numClientBuffers() const {
+        return std::count_if(
+                mBuffers.begin(), mBuffers.end(),
+                [](const Entry &entry) {
+                    return entry.ownedByClient;
+                });
+    }
+
 private:
     std::string mImplName; ///< name for debugging
     const char *mName; ///< C-string version of name
@@ -756,6 +777,10 @@
         mImpl.flush();
     }
 
+    size_t numClientBuffers() const final {
+        return mImpl.numClientBuffers();
+    }
+
 private:
     BuffersArrayImpl mImpl;
 };
@@ -823,6 +848,10 @@
         return std::move(array);
     }
 
+    size_t numClientBuffers() const final {
+        return mImpl.numClientBuffers();
+    }
+
     virtual sp<Codec2Buffer> alloc(size_t size) {
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         std::shared_ptr<C2LinearBlock> block;
@@ -967,6 +996,10 @@
         return std::move(array);
     }
 
+    size_t numClientBuffers() const final {
+        return mImpl.numClientBuffers();
+    }
+
 private:
     FlexBuffersImpl mImpl;
     std::shared_ptr<C2AllocatorStore> mStore;
@@ -1030,6 +1063,10 @@
         return std::move(array);
     }
 
+    size_t numClientBuffers() const final {
+        return mImpl.numClientBuffers();
+    }
+
 private:
     FlexBuffersImpl mImpl;
     std::shared_ptr<LocalBufferPool> mLocalBufferPool;
@@ -1065,6 +1102,10 @@
     void getArray(Vector<sp<MediaCodecBuffer>> *array) const final {
         array->clear();
     }
+
+    size_t numClientBuffers() const final {
+        return 0u;
+    }
 };
 
 class OutputBuffersArray : public CCodecBufferChannel::OutputBuffers {
@@ -1185,6 +1226,10 @@
         mImpl.realloc(alloc);
     }
 
+    size_t numClientBuffers() const final {
+        return mImpl.numClientBuffers();
+    }
+
 private:
     BuffersArrayImpl mImpl;
 };
@@ -1246,6 +1291,10 @@
         return std::move(array);
     }
 
+    size_t numClientBuffers() const final {
+        return mImpl.numClientBuffers();
+    }
+
     /**
      * Return an appropriate Codec2Buffer object for the type of buffers.
      *
@@ -1422,90 +1471,6 @@
     count->value = -1;
 }
 
-// CCodecBufferChannel::PipelineCapacity
-
-CCodecBufferChannel::PipelineCapacity::PipelineCapacity()
-      : input(0), component(0),
-        mName("<UNKNOWN COMPONENT>") {
-}
-
-void CCodecBufferChannel::PipelineCapacity::initialize(
-        int newInput,
-        int newComponent,
-        const char* newName,
-        const char* callerTag) {
-    input.store(newInput, std::memory_order_relaxed);
-    component.store(newComponent, std::memory_order_relaxed);
-    mName = newName;
-    ALOGV("[%s] %s -- PipelineCapacity::initialize(): "
-          "pipeline availability initialized ==> "
-          "input = %d, component = %d",
-            mName, callerTag ? callerTag : "*",
-            newInput, newComponent);
-}
-
-bool CCodecBufferChannel::PipelineCapacity::allocate(const char* callerTag) {
-    int prevInput = input.fetch_sub(1, std::memory_order_relaxed);
-    int prevComponent = component.fetch_sub(1, std::memory_order_relaxed);
-    if (prevInput > 0 && prevComponent > 0) {
-        ALOGV("[%s] %s -- PipelineCapacity::allocate() returns true: "
-              "pipeline availability -1 all ==> "
-              "input = %d, component = %d",
-                mName, callerTag ? callerTag : "*",
-                prevInput - 1,
-                prevComponent - 1);
-        return true;
-    }
-    input.fetch_add(1, std::memory_order_relaxed);
-    component.fetch_add(1, std::memory_order_relaxed);
-    ALOGV("[%s] %s -- PipelineCapacity::allocate() returns false: "
-          "pipeline availability unchanged ==> "
-          "input = %d, component = %d",
-            mName, callerTag ? callerTag : "*",
-            prevInput,
-            prevComponent);
-    return false;
-}
-
-void CCodecBufferChannel::PipelineCapacity::free(const char* callerTag) {
-    int prevInput = input.fetch_add(1, std::memory_order_relaxed);
-    int prevComponent = component.fetch_add(1, std::memory_order_relaxed);
-    ALOGV("[%s] %s -- PipelineCapacity::free(): "
-          "pipeline availability +1 all ==> "
-          "input = %d, component = %d",
-            mName, callerTag ? callerTag : "*",
-            prevInput + 1,
-            prevComponent + 1);
-}
-
-int CCodecBufferChannel::PipelineCapacity::freeInputSlots(
-        size_t numDiscardedInputBuffers,
-        const char* callerTag) {
-    int prevInput = input.fetch_add(numDiscardedInputBuffers,
-                                    std::memory_order_relaxed);
-    ALOGV("[%s] %s -- PipelineCapacity::freeInputSlots(%zu): "
-          "pipeline availability +%zu input ==> "
-          "input = %d, component = %d",
-            mName, callerTag ? callerTag : "*",
-            numDiscardedInputBuffers,
-            numDiscardedInputBuffers,
-            prevInput + static_cast<int>(numDiscardedInputBuffers),
-            component.load(std::memory_order_relaxed));
-    return prevInput + static_cast<int>(numDiscardedInputBuffers);
-}
-
-int CCodecBufferChannel::PipelineCapacity::freeComponentSlot(
-        const char* callerTag) {
-    int prevComponent = component.fetch_add(1, std::memory_order_relaxed);
-    ALOGV("[%s] %s -- PipelineCapacity::freeComponentSlot(): "
-          "pipeline availability +1 component ==> "
-          "input = %d, component = %d",
-            mName, callerTag ? callerTag : "*",
-            input.load(std::memory_order_relaxed),
-            prevComponent + 1);
-    return prevComponent + 1;
-}
-
 // CCodecBufferChannel::ReorderStash
 
 CCodecBufferChannel::ReorderStash::ReorderStash() {
@@ -1595,7 +1560,6 @@
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
       mMetaMode(MODE_NONE),
-      mAvailablePipelineCapacity(),
       mInputMetEos(false) {
     Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
     buffers->reset(new DummyInputBuffers(""));
@@ -1658,6 +1622,9 @@
     work->input.ordinal.customOrdinal = timeUs;
     work->input.buffers.clear();
 
+    uint64_t queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
+    std::vector<std::shared_ptr<C2Buffer>> queuedBuffers;
+
     if (buffer->size() > 0u) {
         Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
         std::shared_ptr<C2Buffer> c2buffer;
@@ -1665,11 +1632,9 @@
             return -ENOENT;
         }
         work->input.buffers.push_back(c2buffer);
-    } else {
-        mAvailablePipelineCapacity.freeInputSlots(1, "queueInputBufferInternal");
-        if (eos) {
-            flags |= C2FrameData::FLAG_END_OF_STREAM;
-        }
+        queuedBuffers.push_back(c2buffer);
+    } else if (eos) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
     }
     work->input.flags = (C2FrameData::flags_t)flags;
     // TODO: fill info's
@@ -1680,10 +1645,16 @@
 
     std::list<std::unique_ptr<C2Work>> items;
     items.push_back(std::move(work));
+    mPipelineWatcher.lock()->onWorkQueued(
+            queuedFrameIndex,
+            std::move(queuedBuffers),
+            PipelineWatcher::Clock::now());
     c2_status_t err = mComponent->queue(&items);
+    if (err != C2_OK) {
+        mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
+    }
 
     if (err == C2_OK && eos && buffer->size() > 0u) {
-        mCCodecCallback->onWorkQueued(false);
         work.reset(new C2Work);
         work->input.ordinal.timestamp = timeUs;
         work->input.ordinal.frameIndex = mFrameIndex++;
@@ -1693,13 +1664,22 @@
         work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
         work->worklets.emplace_back(new C2Worklet);
 
+        queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
+        queuedBuffers.clear();
+
         items.clear();
         items.push_back(std::move(work));
+
+        mPipelineWatcher.lock()->onWorkQueued(
+                queuedFrameIndex,
+                std::move(queuedBuffers),
+                PipelineWatcher::Clock::now());
         err = mComponent->queue(&items);
+        if (err != C2_OK) {
+            mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
+        }
     }
     if (err == C2_OK) {
-        mCCodecCallback->onWorkQueued(eos);
-
         Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
         bool released = (*buffers)->releaseBuffer(buffer, nullptr, true);
         ALOGV("[%s] queueInputBuffer: buffer %sreleased", mName, released ? "" : "not ");
@@ -1844,16 +1824,26 @@
 }
 
 void CCodecBufferChannel::feedInputBufferIfAvailableInternal() {
-    while (!mInputMetEos &&
-           !mReorderStash.lock()->hasPending() &&
-           mAvailablePipelineCapacity.allocate("feedInputBufferIfAvailable")) {
+    if (mInputMetEos ||
+           mReorderStash.lock()->hasPending() ||
+           mPipelineWatcher.lock()->pipelineFull()) {
+        return;
+    } else {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        if ((*buffers)->numClientBuffers() >= mNumOutputSlots) {
+            return;
+        }
+    }
+    for (size_t i = 0; i < mNumInputSlots; ++i) {
         sp<MediaCodecBuffer> inBuffer;
         size_t index;
         {
             Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+            if ((*buffers)->numClientBuffers() >= mNumInputSlots) {
+                return;
+            }
             if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
                 ALOGV("[%s] no new buffer available", mName);
-                mAvailablePipelineCapacity.free("feedInputBufferIfAvailable");
                 break;
             }
         }
@@ -2032,15 +2022,12 @@
     {
         Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
         if (*buffers && (*buffers)->releaseBuffer(buffer, nullptr, true)) {
-            buffers.unlock();
             released = true;
-            mAvailablePipelineCapacity.freeInputSlots(1, "discardBuffer");
         }
     }
     {
         Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
         if (*buffers && (*buffers)->releaseBuffer(buffer, nullptr)) {
-            buffers.unlock();
             released = true;
         }
     }
@@ -2408,10 +2395,14 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    mAvailablePipelineCapacity.initialize(
-            mNumInputSlots,
-            mNumInputSlots + mNumOutputSlots,
-            mName);
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        watcher->inputDelay(inputDelay ? inputDelay.value : 0)
+                .pipelineDelay(pipelineDelay ? pipelineDelay.value : 0)
+                .outputDelay(outputDelay ? outputDelay.value : 0)
+                .smoothnessFactor(kSmoothnessFactor);
+        watcher->flush();
+    }
 
     mInputMetEos = false;
     mSync.start();
@@ -2472,21 +2463,16 @@
                 buffer->meta()->setInt64("timeUs", 0);
                 post = false;
             }
-            if (mAvailablePipelineCapacity.allocate("requestInitialInputBuffers")) {
-                if (post) {
-                    mCallback->onInputBufferAvailable(index, buffer);
-                } else {
-                    toBeQueued.emplace_back(buffer);
-                }
+            if (post) {
+                mCallback->onInputBufferAvailable(index, buffer);
             } else {
-                ALOGD("[%s] pipeline is full while requesting %zu-th input buffer",
-                        mName, i);
+                toBeQueued.emplace_back(buffer);
             }
         }
     }
     for (const sp<MediaCodecBuffer> &buffer : toBeQueued) {
         if (queueInputBufferInternal(buffer) != OK) {
-            mAvailablePipelineCapacity.freeComponentSlot("requestInitialInputBuffers");
+            ALOGV("[%s] Error while queueing initial buffers", mName);
         }
     }
     return OK;
@@ -2532,28 +2518,25 @@
         (*buffers)->flush(flushedWork);
     }
     mReorderStash.lock()->flush();
+    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::onWorkDone(
         std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
-        const C2StreamInitDataInfo::output *initData,
-        size_t numDiscardedInputBuffers) {
+        const C2StreamInitDataInfo::output *initData) {
     if (handleWork(std::move(work), outputFormat, initData)) {
-        mAvailablePipelineCapacity.freeInputSlots(numDiscardedInputBuffers,
-                                                  "onWorkDone");
         feedInputBufferIfAvailable();
     }
 }
 
 void CCodecBufferChannel::onInputBufferDone(
-        const std::shared_ptr<C2Buffer>& buffer) {
+        uint64_t frameIndex, size_t arrayIndex) {
+    std::shared_ptr<C2Buffer> buffer =
+            mPipelineWatcher.lock()->onInputBufferReleased(frameIndex, arrayIndex);
     bool newInputSlotAvailable;
     {
         Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
         newInputSlotAvailable = (*buffers)->expireComponentBuffer(buffer);
-        if (newInputSlotAvailable) {
-            mAvailablePipelineCapacity.freeInputSlots(1, "onInputBufferDone");
-        }
     }
     if (newInputSlotAvailable) {
         feedInputBufferIfAvailable();
@@ -2573,7 +2556,7 @@
     if (work->worklets.size() != 1u
             || !work->worklets.front()
             || !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE)) {
-        mAvailablePipelineCapacity.freeComponentSlot("handleWork");
+        mPipelineWatcher.lock()->onWorkDone(work->input.ordinal.frameIndex.peeku());
     }
 
     if (work->result == C2_NOT_FOUND) {
@@ -2832,6 +2815,10 @@
     return OK;
 }
 
+PipelineWatcher::Clock::duration CCodecBufferChannel::elapsed() {
+    return mPipelineWatcher.lock()->elapsed(PipelineWatcher::Clock::now());
+}
+
 void CCodecBufferChannel::setMetaMode(MetaMode mode) {
     mMetaMode = mode;
 }
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index ebc1491..9dccab8 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -34,6 +34,7 @@
 #include <media/ICrypto.h>
 
 #include "InputSurfaceWrapper.h"
+#include "PipelineWatcher.h"
 
 namespace android {
 
@@ -44,7 +45,6 @@
     virtual ~CCodecCallback() = default;
     virtual void onError(status_t err, enum ActionCode actionCode) = 0;
     virtual void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) = 0;
-    virtual void onWorkQueued(bool eos) = 0;
     virtual void onOutputBuffersChanged() = 0;
 };
 
@@ -128,22 +128,21 @@
      * @param workItems   finished work item.
      * @param outputFormat new output format if it has changed, otherwise nullptr
      * @param initData    new init data (CSD) if it has changed, otherwise nullptr
-     * @param numDiscardedInputBuffers the number of input buffers that are
-     *                    returned for the first time (not previously returned by
-     *                    onInputBufferDone()).
      */
     void onWorkDone(
             std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
-            const C2StreamInitDataInfo::output *initData,
-            size_t numDiscardedInputBuffers);
+            const C2StreamInitDataInfo::output *initData);
 
     /**
      * Make an input buffer available for the client as it is no longer needed
      * by the codec.
      *
-     * @param buffer The buffer that becomes unused.
+     * @param frameIndex The index of input work
+     * @param arrayIndex The index of buffer in the input work buffers.
      */
-    void onInputBufferDone(const std::shared_ptr<C2Buffer>& buffer);
+    void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
+
+    PipelineWatcher::Clock::duration elapsed();
 
     enum MetaMode {
         MODE_NONE,
@@ -266,79 +265,7 @@
 
     MetaMode mMetaMode;
 
-    // PipelineCapacity is used in the input buffer gating logic.
-    //
-    // There are three criteria that need to be met before
-    // onInputBufferAvailable() is called:
-    // 1. The number of input buffers that have been received by
-    //    CCodecBufferChannel but not returned via onWorkDone() or
-    //    onInputBufferDone() does not exceed a certain limit. (Let us call this
-    //    number the "input" capacity.)
-    // 2. The number of work items that have been received by
-    //    CCodecBufferChannel whose outputs have not been returned from the
-    //    component (by calling onWorkDone()) does not exceed a certain limit.
-    //    (Let us call this the "component" capacity.)
-    //
-    // These three criteria guarantee that a new input buffer that arrives from
-    // the invocation of onInputBufferAvailable() will not
-    // 1. overload CCodecBufferChannel's input buffers;
-    // 2. overload the component; or
-    //
-    struct PipelineCapacity {
-        // The number of available input capacity.
-        std::atomic_int input;
-        // The number of available component capacity.
-        std::atomic_int component;
-
-        PipelineCapacity();
-        // Set the values of #input and #component.
-        void initialize(int newInput, int newComponent,
-                        const char* newName = "<UNKNOWN COMPONENT>",
-                        const char* callerTag = nullptr);
-
-        // Return true and decrease #input and #component by one if
-        // they are all greater than zero; return false otherwise.
-        //
-        // callerTag is used for logging only.
-        //
-        // allocate() is called by CCodecBufferChannel to check whether it can
-        // receive another input buffer. If the return value is true,
-        // onInputBufferAvailable() and onOutputBufferAvailable() can be called
-        // afterwards.
-        bool allocate(const char* callerTag = nullptr);
-
-        // Increase #input and #component by one.
-        //
-        // callerTag is used for logging only.
-        //
-        // free() is called by CCodecBufferChannel after allocate() returns true
-        // but onInputBufferAvailable() cannot be called for any reasons. It
-        // essentially undoes an allocate() call.
-        void free(const char* callerTag = nullptr);
-
-        // Increase #input by @p numDiscardedInputBuffers.
-        //
-        // callerTag is used for logging only.
-        //
-        // freeInputSlots() is called by CCodecBufferChannel when onWorkDone()
-        // or onInputBufferDone() is called. @p numDiscardedInputBuffers is
-        // provided in onWorkDone(), and is 1 in onInputBufferDone().
-        int freeInputSlots(size_t numDiscardedInputBuffers,
-                           const char* callerTag = nullptr);
-
-        // Increase #component by one and return the updated value.
-        //
-        // callerTag is used for logging only.
-        //
-        // freeComponentSlot() is called by CCodecBufferChannel when
-        // onWorkDone() is called.
-        int freeComponentSlot(const char* callerTag = nullptr);
-
-    private:
-        // Component name. Used for logging.
-        const char* mName;
-    };
-    PipelineCapacity mAvailablePipelineCapacity;
+    Mutexed<PipelineWatcher> mPipelineWatcher;
 
     class ReorderStash {
     public:
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
new file mode 100644
index 0000000..fe0a2c8
--- /dev/null
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PipelineWatcher"
+
+#include <numeric>
+
+#include <log/log.h>
+
+#include "PipelineWatcher.h"
+
+namespace android {
+
+PipelineWatcher &PipelineWatcher::inputDelay(uint32_t value) {
+    mInputDelay = value;
+    return *this;
+}
+
+PipelineWatcher &PipelineWatcher::pipelineDelay(uint32_t value) {
+    mPipelineDelay = value;
+    return *this;
+}
+
+PipelineWatcher &PipelineWatcher::outputDelay(uint32_t value) {
+    mOutputDelay = value;
+    return *this;
+}
+
+PipelineWatcher &PipelineWatcher::smoothnessFactor(uint32_t value) {
+    mSmoothnessFactor = value;
+    return *this;
+}
+
+void PipelineWatcher::onWorkQueued(
+        uint64_t frameIndex,
+        std::vector<std::shared_ptr<C2Buffer>> &&buffers,
+        const Clock::time_point &queuedAt) {
+    ALOGV("onWorkQueued(frameIndex=%llu, buffers(size=%zu), queuedAt=%lld)",
+          (unsigned long long)frameIndex,
+          buffers.size(),
+          (long long)queuedAt.time_since_epoch().count());
+    auto it = mFramesInPipeline.find(frameIndex);
+    if (it != mFramesInPipeline.end()) {
+        ALOGD("onWorkQueued: Duplicate frame index (%llu); previous entry removed",
+              (unsigned long long)frameIndex);
+        (void)mFramesInPipeline.erase(it);
+    }
+    (void)mFramesInPipeline.try_emplace(frameIndex, std::move(buffers), queuedAt);
+}
+
+std::shared_ptr<C2Buffer> PipelineWatcher::onInputBufferReleased(
+        uint64_t frameIndex, size_t arrayIndex) {
+    ALOGV("onInputBufferReleased(frameIndex=%llu, arrayIndex=%zu)",
+          (unsigned long long)frameIndex, arrayIndex);
+    auto it = mFramesInPipeline.find(frameIndex);
+    if (it == mFramesInPipeline.end()) {
+        ALOGD("onInputBufferReleased: frameIndex not found (%llu); ignored",
+              (unsigned long long)frameIndex);
+        return nullptr;
+    }
+    if (it->second.buffers.size() <= arrayIndex) {
+        ALOGD("onInputBufferReleased: buffers at %llu: size %zu, requested index: %zu",
+              (unsigned long long)frameIndex, it->second.buffers.size(), arrayIndex);
+        return nullptr;
+    }
+    std::shared_ptr<C2Buffer> buffer(std::move(it->second.buffers[arrayIndex]));
+    ALOGD_IF(!buffer, "onInputBufferReleased: buffer already released (%llu:%zu)",
+             (unsigned long long)frameIndex, arrayIndex);
+    return buffer;
+}
+
+void PipelineWatcher::onWorkDone(uint64_t frameIndex) {
+    ALOGV("onWorkDone(frameIndex=%llu)", (unsigned long long)frameIndex);
+    auto it = mFramesInPipeline.find(frameIndex);
+    if (it == mFramesInPipeline.end()) {
+        ALOGD("onWorkDone: frameIndex not found (%llu); ignored",
+              (unsigned long long)frameIndex);
+        return;
+    }
+    (void)mFramesInPipeline.erase(it);
+}
+
+void PipelineWatcher::flush() {
+    mFramesInPipeline.clear();
+}
+
+bool PipelineWatcher::pipelineFull() const {
+    if (mFramesInPipeline.size() >=
+            mInputDelay + mPipelineDelay + mOutputDelay + mSmoothnessFactor) {
+        ALOGV("pipelineFull: too many frames in pipeline (%zu)", mFramesInPipeline.size());
+        return true;
+    }
+    size_t sizeWithInputReleased = std::count_if(
+            mFramesInPipeline.begin(),
+            mFramesInPipeline.end(),
+            [](const decltype(mFramesInPipeline)::value_type &value) {
+                for (const std::shared_ptr<C2Buffer> &buffer : value.second.buffers) {
+                    if (buffer) {
+                        return false;
+                    }
+                }
+                return true;
+            });
+    if (sizeWithInputReleased >=
+            mPipelineDelay + mOutputDelay + mSmoothnessFactor) {
+        ALOGV("pipelineFull: too many frames in pipeline, with input released (%zu)",
+              sizeWithInputReleased);
+        return true;
+    }
+    ALOGV("pipeline has room (total: %zu, input released: %zu)",
+          mFramesInPipeline.size(), sizeWithInputReleased);
+    return false;
+}
+
+PipelineWatcher::Clock::duration PipelineWatcher::elapsed(
+        const PipelineWatcher::Clock::time_point &now) const {
+    return std::accumulate(
+            mFramesInPipeline.begin(),
+            mFramesInPipeline.end(),
+            Clock::duration::zero(),
+            [&now](const Clock::duration &current,
+                   const decltype(mFramesInPipeline)::value_type &value) {
+                Clock::duration elapsed = now - value.second.queuedAt;
+                ALOGV("elapsed: frameIndex = %llu elapsed = %lldms",
+                      (unsigned long long)value.first,
+                      std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
+                return current > elapsed ? current : elapsed;
+            });
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/PipelineWatcher.h b/media/codec2/sfplugin/PipelineWatcher.h
new file mode 100644
index 0000000..ce82298
--- /dev/null
+++ b/media/codec2/sfplugin/PipelineWatcher.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PIPELINE_WATCHER_H_
+#define PIPELINE_WATCHER_H_
+
+#include <chrono>
+#include <map>
+#include <memory>
+
+#include <C2Work.h>
+
+namespace android {
+
+/**
+ * PipelineWatcher watches the status of the work.
+ */
+class PipelineWatcher {
+public:
+    typedef std::chrono::steady_clock Clock;
+
+    PipelineWatcher()
+        : mInputDelay(0),
+          mPipelineDelay(0),
+          mOutputDelay(0),
+          mSmoothnessFactor(0) {}
+    ~PipelineWatcher() = default;
+
+    PipelineWatcher &inputDelay(uint32_t value);
+    PipelineWatcher &pipelineDelay(uint32_t value);
+    PipelineWatcher &outputDelay(uint32_t value);
+    PipelineWatcher &smoothnessFactor(uint32_t value);
+
+    void onWorkQueued(
+            uint64_t frameIndex,
+            std::vector<std::shared_ptr<C2Buffer>> &&buffers,
+            const Clock::time_point &queuedAt);
+    std::shared_ptr<C2Buffer> onInputBufferReleased(
+            uint64_t frameIndex, size_t arrayIndex);
+    void onWorkDone(uint64_t frameIndex);
+    void flush();
+
+    bool pipelineFull() const;
+    Clock::duration elapsed(const Clock::time_point &now) const;
+
+private:
+    uint32_t mInputDelay;
+    uint32_t mPipelineDelay;
+    uint32_t mOutputDelay;
+    uint32_t mSmoothnessFactor;
+
+    struct Frame {
+        Frame(std::vector<std::shared_ptr<C2Buffer>> &&b,
+              const Clock::time_point &q)
+            : buffers(b),
+              queuedAt(q) {}
+        std::vector<std::shared_ptr<C2Buffer>> buffers;
+        const Clock::time_point queuedAt;
+    };
+    std::map<uint64_t, Frame> mFramesInPipeline;
+};
+
+}  // namespace android
+
+#endif  // PIPELINE_WATCHER_H_
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
index 9711b86..8eb70b1 100644
--- a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
+++ b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
@@ -310,7 +310,7 @@
     }
 
     // Write SHORT data from the first channel.
-    int write(int16_t *inputData, int inputChannelCount, int numFrames) {
+    int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
         // stop at end of buffer
         if ((mFrameCounter + numFrames) > mMaxFrames) {
             numFrames = mMaxFrames - mFrameCounter;
@@ -322,7 +322,7 @@
     }
 
     // Write FLOAT data from the first channel.
-    int write(float *inputData, int inputChannelCount, int numFrames) {
+    int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
         // stop at end of buffer
         if ((mFrameCounter + numFrames) > mMaxFrames) {
             numFrames = mMaxFrames - mFrameCounter;
@@ -333,7 +333,7 @@
         return numFrames;
     }
 
-    int size() {
+    int32_t size() {
         return mFrameCounter;
     }
 
@@ -443,9 +443,14 @@
     virtual ~LoopbackProcessor() = default;
 
 
+    enum process_result {
+        PROCESS_RESULT_OK,
+        PROCESS_RESULT_GLITCH
+    };
+
     virtual void reset() {}
 
-    virtual void process(float *inputData, int inputChannelCount,
+    virtual process_result process(float *inputData, int inputChannelCount,
                  float *outputData, int outputChannelCount,
                  int numFrames) = 0;
 
@@ -639,7 +644,7 @@
         return getSampleRate() / 8;
     }
 
-    void process(float *inputData, int inputChannelCount,
+    process_result process(float *inputData, int inputChannelCount,
                  float *outputData, int outputChannelCount,
                  int numFrames) override {
         int channelsValid = std::min(inputChannelCount, outputChannelCount);
@@ -750,6 +755,7 @@
 
         mState = nextState;
         mLoopCounter++;
+        return PROCESS_RESULT_OK;
     }
 
     int save(const char *fileName) override {
@@ -896,9 +902,10 @@
      * @param inputData contains microphone data with sine signal feedback
      * @param outputData contains the reference sine wave
      */
-    void process(float *inputData, int inputChannelCount,
+    process_result process(float *inputData, int inputChannelCount,
                  float *outputData, int outputChannelCount,
                  int numFrames) override {
+        process_result result = PROCESS_RESULT_OK;
         mProcessCount++;
 
         float peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
@@ -978,6 +985,7 @@
                     mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
                     if (absDiff > mTolerance) {
                         mGlitchCount++;
+                        result = PROCESS_RESULT_GLITCH;
                         //printf("%5d: Got a glitch # %d, predicted = %f, actual = %f\n",
                         //       mFrameCounter, mGlitchCount, predicted, sample);
                         mState = STATE_IMMUNE;
@@ -1018,6 +1026,7 @@
 
             mFrameCounter++;
         }
+        return result;
     }
 
     void resetAccumulator() {
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 3de1514..75d425f 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -34,6 +34,7 @@
 #include "AAudioSimpleRecorder.h"
 #include "AAudioExampleUtils.h"
 #include "LoopbackAnalyzer.h"
+#include "../../utils/AAudioExampleUtils.h"
 
 // V0.4.00 = rectify and low-pass filter the echos, use auto-correlation on entire echo
 #define APP_VERSION             "0.4.00"
@@ -47,10 +48,14 @@
 constexpr int kLogPeriodMillis       = 1000;
 constexpr int kNumInputChannels      = 1;
 constexpr int kNumCallbacksToDrain   = 20;
+constexpr int kNumCallbacksToNotRead = 0; // let input fill back up
 constexpr int kNumCallbacksToDiscard = 20;
+constexpr int kDefaultHangTimeMillis = 50;
+constexpr int kMaxGlitchEventsToSave = 32;
 
 struct LoopbackData {
     AAudioStream      *inputStream = nullptr;
+    AAudioStream      *outputStream = nullptr;
     int32_t            inputFramesMaximum = 0;
     int16_t           *inputShortData = nullptr;
     float             *inputFloatData = nullptr;
@@ -58,6 +63,7 @@
     int32_t            actualInputChannelCount = 0;
     int32_t            actualOutputChannelCount = 0;
     int32_t            numCallbacksToDrain = kNumCallbacksToDrain;
+    int32_t            numCallbacksToNotRead = kNumCallbacksToNotRead;
     int32_t            numCallbacksToDiscard = kNumCallbacksToDiscard;
     int32_t            minNumFrames = INT32_MAX;
     int32_t            maxNumFrames = 0;
@@ -65,6 +71,9 @@
     int32_t            insufficientReadFrames = 0;
     int32_t            framesReadTotal = 0;
     int32_t            framesWrittenTotal = 0;
+    int32_t            hangPeriodMillis = 5 * 1000; // time between hangs
+    int32_t            hangCountdownFrames = 5 * 48000; // frames til next hang
+    int32_t            hangTimeMillis = 0; // 0 for no hang
     bool               isDone = false;
 
     aaudio_result_t    inputError = AAUDIO_OK;
@@ -74,6 +83,29 @@
     EchoAnalyzer       echoAnalyzer;
     AudioRecording     audioRecording;
     LoopbackProcessor *loopbackProcessor;
+
+    int32_t            glitchFrames[kMaxGlitchEventsToSave];
+    int32_t            numGlitchEvents = 0;
+
+    void hangIfRequested(int32_t numFrames) {
+        if (hangTimeMillis > 0) {
+            hangCountdownFrames -= numFrames;
+            if (hangCountdownFrames <= 0) {
+                const int64_t startNanos = getNanoseconds();
+                usleep(hangTimeMillis * 1000);
+                const int64_t endNanos = getNanoseconds();
+                const int32_t elapsedMicros = (int32_t)
+                        ((endNanos - startNanos) / 1000);
+                printf("callback hanging for %d millis, actual = %d micros\n",
+                       hangTimeMillis, elapsedMicros);
+                hangCountdownFrames = (int64_t) hangPeriodMillis
+                        * AAudioStream_getSampleRate(outputStream)
+                        / 1000;
+            }
+        }
+
+
+    }
 };
 
 static void convertPcm16ToFloat(const int16_t *source,
@@ -166,6 +198,9 @@
             myData->numCallbacksToDrain--;
         }
 
+    } else if (myData->numCallbacksToNotRead > 0) {
+        // Let the input fill up a bit so we are not so close to the write pointer.
+        myData->numCallbacksToNotRead--;
     } else if (myData->numCallbacksToDiscard > 0) {
         // Ignore. Allow the input to fill back up to equilibrium with the output.
         actualFramesRead = readFormattedData(myData, numFrames);
@@ -175,6 +210,7 @@
         myData->numCallbacksToDiscard--;
 
     } else {
+        myData->hangIfRequested(numFrames);
 
         int32_t numInputBytes = numFrames * myData->actualInputChannelCount * sizeof(float);
         memset(myData->inputFloatData, 0 /* value */, numInputBytes);
@@ -191,7 +227,7 @@
 
             if (actualFramesRead < numFrames) {
                 if(actualFramesRead < (int32_t) framesAvailable) {
-                    printf("insufficient but numFrames = %d"
+                    printf("insufficient for no reason, numFrames = %d"
                                    ", actualFramesRead = %d"
                                    ", inputFramesWritten = %d"
                                    ", inputFramesRead = %d"
@@ -212,16 +248,25 @@
             if (myData->actualInputFormat == AAUDIO_FORMAT_PCM_I16) {
                 convertPcm16ToFloat(myData->inputShortData, myData->inputFloatData, numSamples);
             }
-            // Save for later.
-            myData->audioRecording.write(myData->inputFloatData,
-                                         myData->actualInputChannelCount,
-                                         numFrames);
+
             // Analyze the data.
-            myData->loopbackProcessor->process(myData->inputFloatData,
+            LoopbackProcessor::process_result procResult = myData->loopbackProcessor->process(myData->inputFloatData,
                                                myData->actualInputChannelCount,
                                                outputData,
                                                myData->actualOutputChannelCount,
                                                numFrames);
+
+            if (procResult == LoopbackProcessor::PROCESS_RESULT_GLITCH) {
+                if (myData->numGlitchEvents < kMaxGlitchEventsToSave) {
+                    myData->glitchFrames[myData->numGlitchEvents++] = myData->audioRecording.size();
+                }
+            }
+
+            // Save for later.
+            myData->audioRecording.write(myData->inputFloatData,
+                                         myData->actualInputChannelCount,
+                                         actualFramesRead);
+
             myData->isDone = myData->loopbackProcessor->isDone();
             if (myData->isDone) {
                 result = AAUDIO_CALLBACK_RESULT_STOP;
@@ -249,6 +294,7 @@
     printf("      -C{channels}      number of input channels\n");
     printf("      -F{0,1,2}         input format, 1=I16, 2=FLOAT\n");
     printf("      -g{gain}          recirculating loopback gain\n");
+    printf("      -h{hangMillis}    occasionally hang in the callback\n");
     printf("      -P{inPerf}        set input AAUDIO_PERFORMANCE_MODE*\n");
     printf("          n for _NONE\n");
     printf("          l for _LATENCY\n");
@@ -307,9 +353,7 @@
     return testMode;
 }
 
-void printAudioGraph(AudioRecording &recording, int numSamples) {
-    int32_t start = recording.size() / 2;
-    int32_t end = start + numSamples;
+void printAudioGraphRegion(AudioRecording &recording, int32_t start, int32_t end) {
     if (end >= recording.size()) {
         end = recording.size() - 1;
     }
@@ -360,6 +404,7 @@
 
     int                   testMode                   = TEST_ECHO_LATENCY;
     double                gain                       = 1.0;
+    int                   hangTimeMillis             = 0;
 
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
@@ -389,6 +434,15 @@
                     case 'g':
                         gain = atof(&arg[2]);
                         break;
+                    case 'h':
+                        // Was there a number after the "-h"?
+                        if (arg[2]) {
+                            hangTimeMillis = atoi(&arg[2]);
+                        } else {
+                            // If no number then use the default.
+                            hangTimeMillis = kDefaultHangTimeMillis;
+                        }
+                        break;
                     case 'P':
                         inputPerformanceLevel = parsePerformanceMode(arg[2]);
                         break;
@@ -453,7 +507,7 @@
         fprintf(stderr, "ERROR -  player.open() returned %d\n", result);
         exit(1);
     }
-    outputStream = player.getStream();
+    outputStream = loopbackData.outputStream = player.getStream();
 
     actualOutputFormat = AAudioStream_getFormat(outputStream);
     if (actualOutputFormat != AAUDIO_FORMAT_PCM_FLOAT) {
@@ -487,20 +541,24 @@
     }
     inputStream = loopbackData.inputStream = recorder.getStream();
 
-    {
-        int32_t actualCapacity = AAudioStream_getBufferCapacityInFrames(inputStream);
-        result = AAudioStream_setBufferSizeInFrames(inputStream, actualCapacity);
-        if (result < 0) {
-            fprintf(stderr, "ERROR -  AAudioStream_setBufferSizeInFrames() returned %d\n", result);
-            goto finish;
-        } else {}
-    }
-
     argParser.compareWithStream(inputStream);
 
-    // If the input stream is too small then we cannot satisfy the output callback.
     {
         int32_t actualCapacity = AAudioStream_getBufferCapacityInFrames(inputStream);
+        (void) AAudioStream_setBufferSizeInFrames(inputStream, actualCapacity);
+
+        if (testMode == TEST_SINE_MAGNITUDE) {
+            result = AAudioStream_setBufferSizeInFrames(outputStream, actualCapacity);
+            if (result < 0) {
+                fprintf(stderr, "ERROR -  AAudioStream_setBufferSizeInFrames(output) returned %d\n",
+                        result);
+                goto finish;
+            } else {
+                printf("Output buffer size set to match input capacity = %d frames.\n", result);
+            }
+        }
+
+        // If the input stream is too small then we cannot satisfy the output callback.
         if (actualCapacity < 2 * outputFramesPerBurst) {
             fprintf(stderr, "ERROR - input capacity < 2 * outputFramesPerBurst\n");
             goto finish;
@@ -525,6 +583,8 @@
 
     loopbackData.loopbackProcessor->reset();
 
+    loopbackData.hangTimeMillis = hangTimeMillis;
+
     // Start OUTPUT first so INPUT does not overflow.
     result = player.start();
     if (result != AAUDIO_OK) {
@@ -611,7 +671,17 @@
 
     if (loopbackData.inputError == AAUDIO_OK) {
         if (testMode == TEST_SINE_MAGNITUDE) {
-            printAudioGraph(loopbackData.audioRecording, 200);
+            if (loopbackData.numGlitchEvents > 0) {
+                // Graph around the first glitch if there is one.
+                const int32_t start = loopbackData.glitchFrames[0] - 8;
+                const int32_t end = start + outputFramesPerBurst + 8 + 8;
+                printAudioGraphRegion(loopbackData.audioRecording, start, end);
+            } else {
+                // Or graph the middle of the signal.
+                const int32_t start = loopbackData.audioRecording.size() / 2;
+                const int32_t end = start + 200;
+                printAudioGraphRegion(loopbackData.audioRecording, start, end);
+            }
         }
 
         loopbackData.loopbackProcessor->report();
@@ -661,6 +731,11 @@
     delete[] loopbackData.inputShortData;
 
 report_result:
+
+    for (int i = 0; i < loopbackData.numGlitchEvents; i++) {
+        printf("  glitch at frame %d\n", loopbackData.glitchFrames[i]);
+    }
+
     written = loopbackData.loopbackProcessor->save(FILENAME_PROCESSED);
     if (written > 0) {
         printf("main() wrote %8d processed samples to \"%s\" on Android device\n",
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index a5dc55f..f5ed7aa 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -130,12 +130,10 @@
     }
 
     int32_t getBufferCapacity() const {
-        printf("%s() returns %d\n", __func__, mBufferCapacity);
         return mBufferCapacity;
     }
 
     void setBufferCapacity(int32_t frames) {
-        printf("%s(%d)\n", __func__, frames);
         mBufferCapacity = frames;
     }
 
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index b83a441e..41a7ff0 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -872,13 +872,14 @@
                                         const audio_config_t *config,
                                         audio_output_flags_t flags,
                                         audio_port_handle_t *selectedDeviceId,
-                                        audio_port_handle_t *portId)
+                                        audio_port_handle_t *portId,
+                                        std::vector<audio_io_handle_t> *secondaryOutputs)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NO_INIT;
     return aps->getOutputForAttr(attr, output, session, stream, pid, uid,
                                  config,
-                                 flags, selectedDeviceId, portId);
+                                 flags, selectedDeviceId, portId, secondaryOutputs);
 }
 
 status_t AudioSystem::startOutput(audio_port_handle_t portId)
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 0db56e8..d9f6e36 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -189,16 +189,17 @@
         return static_cast <audio_io_handle_t> (reply.readInt32());
     }
 
-    virtual status_t getOutputForAttr(const audio_attributes_t *attr,
-                                        audio_io_handle_t *output,
-                                        audio_session_t session,
-                                        audio_stream_type_t *stream,
-                                        pid_t pid,
-                                        uid_t uid,
-                                        const audio_config_t *config,
-                                        audio_output_flags_t flags,
-                                        audio_port_handle_t *selectedDeviceId,
-                                        audio_port_handle_t *portId)
+    status_t getOutputForAttr(const audio_attributes_t *attr,
+                              audio_io_handle_t *output,
+                              audio_session_t session,
+                              audio_stream_type_t *stream,
+                              pid_t pid,
+                              uid_t uid,
+                              const audio_config_t *config,
+                              audio_output_flags_t flags,
+                              audio_port_handle_t *selectedDeviceId,
+                              audio_port_handle_t *portId,
+                              std::vector<audio_io_handle_t> *secondaryOutputs) override
         {
             Parcel data, reply;
             data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -224,6 +225,10 @@
                 ALOGE("getOutputForAttr NULL portId - shouldn't happen");
                 return BAD_VALUE;
             }
+            if (secondaryOutputs == NULL) {
+                ALOGE("getOutputForAttr NULL secondaryOutputs - shouldn't happen");
+                return BAD_VALUE;
+            }
             if (attr == NULL) {
                 data.writeInt32(0);
             } else {
@@ -258,7 +263,9 @@
             }
             *selectedDeviceId = (audio_port_handle_t)reply.readInt32();
             *portId = (audio_port_handle_t)reply.readInt32();
-            return status;
+            secondaryOutputs->resize(reply.readInt32());
+            return reply.read(secondaryOutputs->data(),
+                              secondaryOutputs->size() * sizeof(audio_io_handle_t));
         }
 
     virtual status_t startOutput(audio_port_handle_t portId)
@@ -1300,16 +1307,19 @@
             audio_port_handle_t selectedDeviceId = data.readInt32();
             audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
             audio_io_handle_t output = 0;
+            std::vector<audio_io_handle_t> secondaryOutputs;
             status_t status = getOutputForAttr(hasAttributes ? &attr : NULL,
                     &output, session, &stream, pid, uid,
                     &config,
-                    flags, &selectedDeviceId, &portId);
+                    flags, &selectedDeviceId, &portId, &secondaryOutputs);
             reply->writeInt32(status);
             reply->writeInt32(output);
             reply->writeInt32(stream);
             reply->writeInt32(selectedDeviceId);
             reply->writeInt32(portId);
-            return NO_ERROR;
+            reply->writeInt32(secondaryOutputs.size());
+            return reply->write(secondaryOutputs.data(),
+                                secondaryOutputs.size() * sizeof(audio_io_handle_t));
         } break;
 
         case START_OUTPUT: {
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 786fb9a..bf8d627 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -49,8 +49,12 @@
 #define MIX_STATE_IDLE 0
 #define MIX_STATE_MIXING 1
 
+/** Control to which device some audio is rendered */
 #define MIX_ROUTE_FLAG_RENDER 0x1
+/** Loop back some audio instead of rendering it */
 #define MIX_ROUTE_FLAG_LOOP_BACK (0x1 << 1)
+/** Loop back some audio while it is rendered */
+#define MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER (MIX_ROUTE_FLAG_RENDER | MIX_ROUTE_FLAG_LOOP_BACK)
 #define MIX_ROUTE_FLAG_ALL (MIX_ROUTE_FLAG_RENDER | MIX_ROUTE_FLAG_LOOP_BACK)
 
 #define MAX_MIXES_PER_POLICY 10
@@ -119,6 +123,11 @@
 #define RECORD_CONFIG_EVENT_START 1
 #define RECORD_CONFIG_EVENT_STOP  0
 
+static inline bool is_mix_loopback_render(uint32_t routeFlags) {
+    return (routeFlags & MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER)
+           == MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER;
+}
+
 }; // namespace android
 
 #endif  // ANDROID_AUDIO_POLICY_H
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 87a9919..6060894 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -231,7 +231,8 @@
                                      const audio_config_t *config,
                                      audio_output_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
-                                     audio_port_handle_t *portId);
+                                     audio_port_handle_t *portId,
+                                     std::vector<audio_io_handle_t> *secondaryOutputs);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index b2cda32..e89a55d 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -66,7 +66,8 @@
                                       const audio_config_t *config,
                                       audio_output_flags_t flags,
                                       audio_port_handle_t *selectedDeviceId,
-                                      audio_port_handle_t *portId) = 0;
+                                      audio_port_handle_t *portId,
+                                      std::vector<audio_io_handle_t> *secondaryOutputs) = 0;
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
     virtual status_t stopOutput(audio_port_handle_t portId) = 0;
     virtual void releaseOutput(audio_port_handle_t portId) = 0;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index c4015fb..6259b15 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3595,7 +3595,7 @@
 }
 
 int64_t MPEG4Writer::Track::getDurationUs() const {
-    return mTrackDurationUs + getStartTimeOffsetTimeUs();
+    return mTrackDurationUs + getStartTimeOffsetTimeUs() + mOwner->getStartTimeOffsetBFramesUs();
 }
 
 int64_t MPEG4Writer::Track::getEstimatedTrackSizeBytes() const {
@@ -4059,7 +4059,7 @@
     // Prepone video playback.
     if (mMinCttsOffsetTicks != mMaxCttsOffsetTicks) {
         int32_t mvhdTimeScale = mOwner->getTimeScale();
-        uint32_t tkhdDuration = (mTrackDurationUs * mvhdTimeScale + 5E5) / 1E6;
+        uint32_t tkhdDuration = (getDurationUs() * mvhdTimeScale + 5E5) / 1E6;
         int64_t mediaTime = ((kMaxCttsOffsetTimeUs - getMinCttsOffsetTimeUs())
             * mTimeScale + 5E5) / 1E6;
         if (tkhdDuration > 0 && mediaTime > 0) {
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index bc99099..7733071 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -292,13 +292,16 @@
         fullConfig.sample_rate = config->sample_rate;
         fullConfig.channel_mask = config->channel_mask;
         fullConfig.format = config->format;
+        std::vector<audio_io_handle_t> secondaryOutputs;
         ret = AudioSystem::getOutputForAttr(attr, &io,
                                             actualSessionId,
                                             &streamType, client.clientPid, client.clientUid,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
-                                            deviceId, &portId);
+                                            deviceId, &portId, &secondaryOutputs);
+        ALOGW_IF(!secondaryOutputs.empty(),
+                 "%s does not support secondary outputs, ignoring them", __func__);
     } else {
         ret = AudioSystem::getInputForAttr(attr, &io,
                                               actualSessionId,
@@ -678,6 +681,7 @@
     status_t lStatus;
     audio_stream_type_t streamType;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    std::vector<audio_io_handle_t> secondaryOutputs;
 
     bool updatePid = (input.clientInfo.clientPid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -712,7 +716,7 @@
 
     lStatus = AudioSystem::getOutputForAttr(&input.attr, &output.outputId, sessionId, &streamType,
                                             clientPid, clientUid, &input.config, input.flags,
-                                            &output.selectedDeviceId, &portId);
+                                            &output.selectedDeviceId, &portId, &secondaryOutputs);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -785,6 +789,59 @@
         output.afLatencyMs = thread->latency();
         output.portId = portId;
 
+        if (lStatus == NO_ERROR) {
+            // Connect secondary outputs. Failure on a secondary output must not imped the primary
+            // Any secondary output setup failure will lead to a desync between the AP and AF until
+            // the track is destroyed.
+            TeePatches teePatches;
+            for (audio_io_handle_t secondaryOutput : secondaryOutputs) {
+                PlaybackThread *secondaryThread = checkPlaybackThread_l(secondaryOutput);
+                if (secondaryThread == NULL) {
+                    ALOGE("no playback thread found for secondary output %d", output.outputId);
+                    continue;
+                }
+
+                size_t frameCount = std::lcm(thread->frameCount(), secondaryThread->frameCount());
+
+                using namespace std::chrono_literals;
+                auto inChannelMask = audio_channel_mask_out_to_in(input.config.channel_mask);
+                sp patchRecord = new RecordThread::PatchRecord(nullptr /* thread */,
+                                                               output.sampleRate,
+                                                               inChannelMask,
+                                                               input.config.format,
+                                                               frameCount,
+                                                               NULL /* buffer */,
+                                                               (size_t)0 /* bufferSize */,
+                                                               AUDIO_INPUT_FLAG_DIRECT,
+                                                               0ns /* timeout */);
+                status_t status = patchRecord->initCheck();
+                if (status != NO_ERROR) {
+                    ALOGE("Secondary output patchRecord init failed: %d", status);
+                    continue;
+                }
+                sp patchTrack = new PlaybackThread::PatchTrack(secondaryThread,
+                                                               streamType,
+                                                               output.sampleRate,
+                                                               input.config.channel_mask,
+                                                               input.config.format,
+                                                               frameCount,
+                                                               patchRecord->buffer(),
+                                                               patchRecord->bufferSize(),
+                                                               output.flags,
+                                                               0ns /* timeout */);
+                status = patchTrack->initCheck();
+                if (status != NO_ERROR) {
+                    ALOGE("Secondary output patchTrack init failed: %d", status);
+                    continue;
+                }
+                teePatches.push_back({patchRecord, patchTrack});
+                secondaryThread->addPatchTrack(patchTrack);
+                patchTrack->setPeerProxy(patchRecord.get());
+                patchRecord->setPeerProxy(patchTrack.get());
+            }
+            track->setTeePatches(std::move(teePatches));
+        }
+
         // move effect chain to this output thread if an effect on same session was waiting
         // for a track to be created
         if (lStatus == NO_ERROR && effectThread != NULL) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index d8c0da5..1441e15 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -21,8 +21,11 @@
 #include "Configuration.h"
 #include <atomic>
 #include <mutex>
+#include <chrono>
 #include <deque>
 #include <map>
+#include <numeric>
+#include <optional>
 #include <set>
 #include <string>
 #include <vector>
@@ -526,6 +529,9 @@
     class EffectChain;
 
     struct AudioStreamIn;
+    struct TeePatch;
+    using TeePatches = std::vector<TeePatch>;
+
 
     struct  stream_type_t {
         stream_type_t()
@@ -725,6 +731,11 @@
             audioHwDev(dev), stream(in), flags(flags) {}
     };
 
+    struct TeePatch {
+        sp<RecordThread::PatchRecord> patchRecord;
+        sp<PlaybackThread::PatchTrack> patchTrack;
+    };
+
     // for mAudioSessionRefs only
     struct AudioSessionRef {
         AudioSessionRef(audio_session_t sessionid, pid_t pid) :
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index bad3ca8..3f62bc3 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -43,9 +43,8 @@
 
             void        appendDumpHeader(String8& result);
             void        appendDump(String8& result, bool active);
-    virtual status_t    start(AudioSystem::sync_event_t event =
-                                    AudioSystem::SYNC_EVENT_NONE,
-                             audio_session_t triggerSession = AUDIO_SESSION_NONE);
+    virtual status_t    start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
+                              audio_session_t triggerSession = AUDIO_SESSION_NONE);
     virtual void        stop();
             void        pause();
 
@@ -129,6 +128,8 @@
             }
             sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
 
+            void    setTeePatches(TeePatches teePatches);
+
 protected:
     // for numerous
     friend class PlaybackThread;
@@ -139,8 +140,8 @@
     DISALLOW_COPY_AND_ASSIGN(Track);
 
     // AudioBufferProvider interface
-    virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer);
-    // releaseBuffer() not overridden
+    status_t getNextBuffer(AudioBufferProvider::Buffer* buffer) override;
+    void releaseBuffer(AudioBufferProvider::Buffer* buffer) override;
 
     // ExtendedAudioBufferProvider interface
     virtual size_t framesReady() const;
@@ -220,6 +221,12 @@
     sp<os::ExternalVibration>    mExternalVibration;
 
 private:
+    void                interceptBuffer(const AudioBufferProvider::Buffer& buffer);
+    template <class F>
+    void                forEachTeePatchTrack(F f) {
+        for (auto& tp : mTeePatches) { f(tp.patchTrack); }
+    };
+
     // The following fields are only for fast tracks, and should be in a subclass
     int                 mFastIndex; // index within FastMixerState::mFastTracks[];
                                     // either mFastIndex == -1 if not isFastTrack()
@@ -239,6 +246,7 @@
     audio_output_flags_t mFlags;
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag     mChangeNotified = ATOMIC_FLAG_INIT;
+    TeePatches  mTeePatches;
 };  // end of Track
 
 
@@ -318,7 +326,7 @@
 };  // end of OutputTrack
 
 // playback track, used by PatchPanel
-class PatchTrack : public Track, public PatchProxyBufferProvider {
+class PatchTrack : public Track, public PatchTrackBase {
 public:
 
                         PatchTrack(PlaybackThread *playbackThread,
@@ -329,7 +337,8 @@
                                    size_t frameCount,
                                    void *buffer,
                                    size_t bufferSize,
-                                   audio_output_flags_t flags);
+                                   audio_output_flags_t flags,
+                                   const Timeout& timeout = {});
     virtual             ~PatchTrack();
 
     virtual status_t    start(AudioSystem::sync_event_t event =
@@ -345,12 +354,7 @@
                                      const struct timespec *timeOut = NULL);
     virtual void        releaseBuffer(Proxy::Buffer* buffer);
 
-            void setPeerProxy(PatchProxyBufferProvider *proxy) { mPeerProxy = proxy; }
-
 private:
             void restartIfDisabled();
 
-    sp<ClientProxy>             mProxy;
-    PatchProxyBufferProvider*   mPeerProxy;
-    struct timespec             mPeerTimeout;
 };  // end of PatchTrack
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 32af7d5..ab4af33 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -113,7 +113,7 @@
 };
 
 // playback track, used by PatchPanel
-class PatchRecord : virtual public RecordTrack, public PatchProxyBufferProvider {
+class PatchRecord : public RecordTrack, public PatchTrackBase {
 public:
 
     PatchRecord(RecordThread *recordThread,
@@ -123,7 +123,8 @@
                 size_t frameCount,
                 void *buffer,
                 size_t bufferSize,
-                audio_input_flags_t flags);
+                audio_input_flags_t flags,
+                const Timeout& timeout = {});
     virtual             ~PatchRecord();
 
     // AudioBufferProvider interface
@@ -134,11 +135,4 @@
     virtual status_t    obtainBuffer(Proxy::Buffer *buffer,
                                      const struct timespec *timeOut = NULL);
     virtual void        releaseBuffer(Proxy::Buffer *buffer);
-
-    void setPeerProxy(PatchProxyBufferProvider *proxy) { mPeerProxy = proxy; }
-
-private:
-    sp<ClientProxy>             mProxy;
-    PatchProxyBufferProvider*   mPeerProxy;
-    struct timespec             mPeerTimeout;
 };  // end of PatchRecord
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 5a70864..dd1eabf 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8464,6 +8464,7 @@
         audio_output_flags_t flags =
                 (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
         audio_port_handle_t deviceId = mDeviceId;
+        std::vector<audio_io_handle_t> secondaryOutputs;
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
@@ -8472,7 +8473,10 @@
                                             &config,
                                             flags,
                                             &deviceId,
-                                            &portId);
+                                            &portId,
+                                            &secondaryOutputs);
+        ALOGD_IF(!secondaryOutputs.empty(),
+                 "MmapThread::start does not support secondary outputs, ignoring them");
     } else {
         audio_config_base_t config;
         config.sample_rate = mSampleRate;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index c94639b..0ba0ab4 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -329,3 +329,19 @@
                                      const struct timespec *requested = NULL) = 0;
     virtual void        releaseBuffer(Proxy::Buffer* buffer) = 0;
 };
+
+class PatchTrackBase : public PatchProxyBufferProvider
+{
+public:
+    using Timeout = std::optional<std::chrono::nanoseconds>;
+                        PatchTrackBase(sp<ClientProxy> proxy, const ThreadBase& thread,
+                                       const Timeout& timeout);
+            void        setPeerTimeout(std::chrono::nanoseconds timeout);
+            void        setPeerProxy(PatchProxyBufferProvider *proxy) { mPeerProxy = proxy; }
+
+protected:
+    const sp<ClientProxy>       mProxy;
+    PatchProxyBufferProvider*   mPeerProxy = nullptr;
+    struct timespec             mPeerTimeout{};
+
+};
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index e4af656..57dd568 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -99,7 +99,7 @@
         mId(android_atomic_inc(&nextTrackId)),
         mTerminated(false),
         mType(type),
-        mThreadIoHandle(thread->id()),
+        mThreadIoHandle(thread ? thread->id() : AUDIO_IO_HANDLE_NONE),
         mPortId(portId),
         mIsInvalid(false)
 {
@@ -277,6 +277,27 @@
     return NO_ERROR;
 }
 
+AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(sp<ClientProxy> proxy,
+                                                         const ThreadBase& thread,
+                                                         const Timeout& timeout)
+    : mProxy(proxy)
+{
+    if (timeout) {
+        setPeerTimeout(*timeout);
+    } else {
+        // Double buffer mixer
+        uint64_t mixBufferNs = ((uint64_t)2 * thread.frameCount() * 1000000000) /
+                                              thread.sampleRate();
+        setPeerTimeout(std::chrono::nanoseconds{mixBufferNs});
+    }
+}
+
+void AudioFlinger::ThreadBase::PatchTrackBase::setPeerTimeout(std::chrono::nanoseconds timeout) {
+    mPeerTimeout.tv_sec = timeout.count() / std::nano::den;
+    mPeerTimeout.tv_nsec = timeout.count() % std::nano::den;
+}
+
+
 // ----------------------------------------------------------------------------
 //      Playback
 // ----------------------------------------------------------------------------
@@ -504,6 +525,7 @@
             AudioSystem::releaseOutput(mPortId);
         }
     }
+    forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
 }
 
 void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result)
@@ -649,8 +671,7 @@
 }
 
 // AudioBufferProvider interface
-status_t AudioFlinger::PlaybackThread::Track::getNextBuffer(
-        AudioBufferProvider::Buffer* buffer)
+status_t AudioFlinger::PlaybackThread::Track::getNextBuffer(AudioBufferProvider::Buffer* buffer)
 {
     ServerProxy::Buffer buf;
     size_t desiredFrames = buffer->frameCount;
@@ -665,10 +686,39 @@
     } else {
         mAudioTrackServerProxy->tallyUnderrunFrames(0);
     }
-
     return status;
 }
 
+void AudioFlinger::PlaybackThread::Track::releaseBuffer(AudioBufferProvider::Buffer* buffer)
+{
+    interceptBuffer(*buffer);
+    TrackBase::releaseBuffer(buffer);
+}
+
+// TODO: compensate for time shift between HW modules.
+void AudioFlinger::PlaybackThread::Track::interceptBuffer(
+        const AudioBufferProvider::Buffer& buffer) {
+    for (auto& sink : mTeePatches) {
+        RecordThread::PatchRecord& patchRecord = *sink.patchRecord;
+        AudioBufferProvider::Buffer patchBuffer;
+        patchBuffer.frameCount = buffer.frameCount;
+        auto status = patchRecord.getNextBuffer(&patchBuffer);
+        if (status != NO_ERROR) {
+           ALOGW("%s PathRecord getNextBuffer failed with error %d: %s",
+                 __func__, status, strerror(-status));
+           continue;
+        }
+        // FIXME: On buffer wrap, the frame count will be less then requested,
+        //        retry to write the rest. (unlikely due to lcm buffer sizing)
+        ALOGW_IF(patchBuffer.frameCount != buffer.frameCount,
+                 "%s PatchRecord can not provide big enough buffer %zu/%zu, dropping %zu frames",
+                 __func__, patchBuffer.frameCount, buffer.frameCount,
+                 buffer.frameCount - patchBuffer.frameCount);
+        memcpy(patchBuffer.raw, buffer.raw, patchBuffer.frameCount * mFrameSize);
+        patchRecord.releaseBuffer(&patchBuffer);
+    }
+}
+
 // releaseBuffer() is not overridden
 
 // ExtendedAudioBufferProvider interface
@@ -816,6 +866,9 @@
     } else {
         status = BAD_VALUE;
     }
+    if (status == NO_ERROR) {
+        forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+    }
     return status;
 }
 
@@ -849,6 +902,7 @@
                     __func__, mId, (int)mThreadIoHandle);
         }
     }
+    forEachTeePatchTrack([](auto patchTrack) { patchTrack->stop(); });
 }
 
 void AudioFlinger::PlaybackThread::Track::pause()
@@ -881,6 +935,8 @@
             break;
         }
     }
+    // Pausing the TeePatch to avoid a glitch on underrun, at the cost of buffered audio loss.
+    forEachTeePatchTrack([](auto patchTrack) { patchTrack->pause(); });
 }
 
 void AudioFlinger::PlaybackThread::Track::flush()
@@ -942,6 +998,8 @@
         // because the hardware buffer could hold a large amount of audio
         playbackThread->broadcast_l();
     }
+    // Flush the Tee to avoid on resume playing old data and glitching on the transition to new data
+    forEachTeePatchTrack([](auto patchTrack) { patchTrack->flush(); });
 }
 
 // must be called with thread lock held
@@ -1060,6 +1118,11 @@
     };
 }
 
+void AudioFlinger::PlaybackThread::Track::setTeePatches(TeePatches teePatches) {
+    forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
+    mTeePatches = std::move(teePatches);
+}
+
 status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& timestamp)
 {
     if (!isOffloaded() && !isDirect()) {
@@ -1615,19 +1678,16 @@
                                                      size_t frameCount,
                                                      void *buffer,
                                                      size_t bufferSize,
-                                                     audio_output_flags_t flags)
+                                                     audio_output_flags_t flags,
+                                                     const Timeout& timeout)
     :   Track(playbackThread, NULL, streamType,
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, AID_AUDIOSERVER, flags, TYPE_PATCH),
-              mProxy(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true))
+        PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true),
+                       *playbackThread, timeout)
 {
-    uint64_t mixBufferNs = ((uint64_t)2 * playbackThread->frameCount() * 1000000000) /
-                                                                    playbackThread->sampleRate();
-    mPeerTimeout.tv_sec = mixBufferNs / 1000000000;
-    mPeerTimeout.tv_nsec = (int) (mixBufferNs % 1000000000);
-
     ALOGV("%s(%d): sampleRate %d mPeerTimeout %d.%03d sec",
                                       __func__, mId, sampleRate,
                                       (int)mPeerTimeout.tv_sec,
@@ -2088,19 +2148,16 @@
                                                      size_t frameCount,
                                                      void *buffer,
                                                      size_t bufferSize,
-                                                     audio_input_flags_t flags)
+                                                     audio_input_flags_t flags,
+                                                     const Timeout& timeout)
     :   RecordTrack(recordThread, NULL,
                 audio_attributes_t{} /* currently unused for patch track */,
                 sampleRate, format, channelMask, frameCount,
                 buffer, bufferSize, AUDIO_SESSION_NONE, AID_AUDIOSERVER,
                 flags, TYPE_PATCH),
-                mProxy(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, false, true))
+        PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, false, true),
+                       *recordThread, timeout)
 {
-    uint64_t mixBufferNs = ((uint64_t)2 * recordThread->frameCount() * 1000000000) /
-                                                                recordThread->sampleRate();
-    mPeerTimeout.tv_sec = mixBufferNs / 1000000000;
-    mPeerTimeout.tv_nsec = (int) (mixBufferNs % 1000000000);
-
     ALOGV("%s(%d): sampleRate %d mPeerTimeout %d.%03d sec",
                                       __func__, mId, sampleRate,
                                       (int)mPeerTimeout.tv_sec,
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index d7030f9..bb5441d 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -110,7 +110,8 @@
                                         const audio_config_t *config,
                                         audio_output_flags_t *flags,
                                         audio_port_handle_t *selectedDeviceId,
-                                        audio_port_handle_t *portId) = 0;
+                                        audio_port_handle_t *portId,
+                                        std::vector<audio_io_handle_t> *secondaryOutputs) = 0;
     // indicates to the audio policy manager that the output starts being used by corresponding stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
     // indicates to the audio policy manager that the output stops being used by corresponding stream.
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 7296c95..1abce6f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -68,13 +68,12 @@
      * Try to find an output descriptor for the given attributes.
      *
      * @param[in] attributes to consider fowr the research of output descriptor.
-     * @param[out] desc to return if an output could be found.
-     *
-     * @return NO_ERROR if an output was found for the given attribute (in this case, the
-     *                  descriptor output param is initialized), error code otherwise.
+     * @param[out] desc to return if an primary output could be found.
+     * @param[out] secondaryDesc other desc that the audio should be routed to.
      */
     status_t getOutputForAttr(audio_attributes_t attributes, uid_t uid,
-            sp<SwAudioOutputDescriptor> &desc);
+                sp<SwAudioOutputDescriptor> &primaryDesc,
+                std::vector<sp<SwAudioOutputDescriptor>> *secondaryDescs);
 
     sp<DeviceDescriptor> getDeviceAndMixForInputSource(audio_source_t inputSource,
                                                        const DeviceVector &availableDeviceTypes,
@@ -99,6 +98,11 @@
     status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
 
     void dump(String8 *dst) const;
+
+private:
+    enum class MixMatchStatus { MATCH, NO_MATCH, INVALID_MIX };
+    MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
+                            audio_attributes_t attributes, uid_t uid);
 };
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 4c069e4..2e44a60 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -86,10 +86,12 @@
                           audio_attributes_t attributes, audio_config_base_t config,
                           audio_port_handle_t preferredDeviceId, audio_stream_type_t stream,
                           product_strategy_t strategy, audio_output_flags_t flags,
-                          bool isPreferredDeviceForExclusiveUse) :
+                          bool isPreferredDeviceForExclusiveUse,
+                          std::vector<wp<SwAudioOutputDescriptor>> secondaryOutputs) :
         ClientDescriptor(portId, uid, sessionId, attributes, config, preferredDeviceId,
                          isPreferredDeviceForExclusiveUse),
-        mStream(stream), mStrategy(strategy), mFlags(flags) {}
+        mStream(stream), mStrategy(strategy), mFlags(flags),
+        mSecondaryOutputs(std::move(secondaryOutputs)) {}
     ~TrackClientDescriptor() override = default;
 
     using ClientDescriptor::dump;
@@ -99,11 +101,15 @@
     audio_output_flags_t flags() const { return mFlags; }
     audio_stream_type_t stream() const { return mStream; }
     product_strategy_t strategy() const { return mStrategy; }
+    const std::vector<wp<SwAudioOutputDescriptor>>& getSecondaryOutputs() const {
+        return mSecondaryOutputs;
+    };
 
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
     const audio_output_flags_t mFlags;
+    const std::vector<wp<SwAudioOutputDescriptor>> mSecondaryOutputs;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 2489e76..6b6d9d2 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -156,128 +156,163 @@
     }
 }
 
-status_t AudioPolicyMixCollection::getOutputForAttr(audio_attributes_t attributes, uid_t uid,
-                                                    sp<SwAudioOutputDescriptor> &desc)
+status_t AudioPolicyMixCollection::getOutputForAttr(
+        audio_attributes_t attributes, uid_t uid, sp<SwAudioOutputDescriptor> &primaryDesc,
+        std::vector<sp<SwAudioOutputDescriptor>> *secondaryDescs)
 {
     ALOGV("getOutputForAttr() querying %zu mixes:", size());
-    desc = 0;
+    primaryDesc = 0;
     for (size_t i = 0; i < size(); i++) {
         sp<AudioPolicyMix> policyMix = valueAt(i);
+        sp<SwAudioOutputDescriptor> policyDesc = policyMix->getOutput();
+        if (!policyDesc) {
+            ALOGV("%s: Skiping %zu: Mix has no output", __func__, i);
+            continue;
+        }
+
         AudioMix *mix = policyMix->getMix();
+        const bool primaryOutputMix = !is_mix_loopback_render(mix->mRouteFlags);
 
-        if (mix->mMixType == MIX_TYPE_PLAYERS) {
-            // TODO if adding more player rules (currently only 2), make rule handling "generic"
-            //      as there is no difference in the treatment of usage- or uid-based rules
-            bool hasUsageMatchRules = false;
-            bool hasUsageExcludeRules = false;
-            bool usageMatchFound = false;
-            bool usageExclusionFound = false;
+        if (primaryOutputMix && primaryDesc != 0) {
+            ALOGV("%s: Skiping %zu: Primary output already found", __func__, i);
+            continue; // Primary output already found
+        }
 
-            bool hasUidMatchRules = false;
-            bool hasUidExcludeRules = false;
-            bool uidMatchFound = false;
-            bool uidExclusionFound = false;
+        switch (mixMatch(mix, i, attributes, uid)) {
+            case MixMatchStatus::INVALID_MIX: return BAD_VALUE; // TODO: Do we really want to abort?
+            case MixMatchStatus::NO_MATCH:
+                ALOGV("%s: Mix %zu: does not match", __func__, i);
+                continue; // skip the mix
+            case MixMatchStatus::MATCH:;
+        }
 
-            bool hasAddrMatch = false;
-
-            // iterate over all mix criteria to list what rules this mix contains
-            for (size_t j = 0; j < mix->mCriteria.size(); j++) {
-                ALOGV(" getOutputForAttr: mix %zu: inspecting mix criteria %zu of %zu",
-                        i, j, mix->mCriteria.size());
-
-                // if there is an address match, prioritize that match
-                if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
-                        strncmp(attributes.tags + strlen("addr="),
-                                mix->mDeviceAddress.string(),
-                                AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
-                    hasAddrMatch = true;
-                    break;
-                }
-
-                switch (mix->mCriteria[j].mRule) {
-                case RULE_MATCH_ATTRIBUTE_USAGE:
-                    ALOGV("\tmix has RULE_MATCH_ATTRIBUTE_USAGE for usage %d",
-                                                mix->mCriteria[j].mValue.mUsage);
-                    hasUsageMatchRules = true;
-                    if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
-                        // found one match against all allowed usages
-                        usageMatchFound = true;
-                    }
-                    break;
-                case RULE_EXCLUDE_ATTRIBUTE_USAGE:
-                    ALOGV("\tmix has RULE_EXCLUDE_ATTRIBUTE_USAGE for usage %d",
-                            mix->mCriteria[j].mValue.mUsage);
-                    hasUsageExcludeRules = true;
-                    if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
-                        // found this usage is to be excluded
-                        usageExclusionFound = true;
-                    }
-                    break;
-                case RULE_MATCH_UID:
-                    ALOGV("\tmix has RULE_MATCH_UID for uid %d", mix->mCriteria[j].mValue.mUid);
-                    hasUidMatchRules = true;
-                    if (mix->mCriteria[j].mValue.mUid == uid) {
-                        // found one UID match against all allowed UIDs
-                        uidMatchFound = true;
-                    }
-                    break;
-                case RULE_EXCLUDE_UID:
-                    ALOGV("\tmix has RULE_EXCLUDE_UID for uid %d", mix->mCriteria[j].mValue.mUid);
-                    hasUidExcludeRules = true;
-                    if (mix->mCriteria[j].mValue.mUid == uid) {
-                        // found this UID is to be excluded
-                        uidExclusionFound = true;
-                    }
-                    break;
-                default:
-                    break;
-                }
-
-                // consistency checks: for each "dimension" of rules (usage, uid...), we can
-                // only have MATCH rules, or EXCLUDE rules in each dimension, not a combination
-                if (hasUsageMatchRules && hasUsageExcludeRules) {
-                    ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_ATTRIBUTE_USAGE"
-                            " and RULE_EXCLUDE_ATTRIBUTE_USAGE in mix %zu", i);
-                    return BAD_VALUE;
-                }
-                if (hasUidMatchRules && hasUidExcludeRules) {
-                    ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_UID"
-                            " and RULE_EXCLUDE_UID in mix %zu", i);
-                    return BAD_VALUE;
-                }
-
-                if ((hasUsageExcludeRules && usageExclusionFound)
-                        || (hasUidExcludeRules && uidExclusionFound)) {
-                    break; // stop iterating on criteria because an exclusion was found (will fail)
-                }
-
-            }//iterate on mix criteria
-
-            // determine if exiting on success (or implicit failure as desc is 0)
-            if (hasAddrMatch ||
-                    !((hasUsageExcludeRules && usageExclusionFound) ||
-                      (hasUsageMatchRules && !usageMatchFound)  ||
-                      (hasUidExcludeRules && uidExclusionFound) ||
-                      (hasUidMatchRules && !uidMatchFound))) {
-                ALOGV("\tgetOutputForAttr will use mix %zu", i);
-                desc = policyMix->getOutput();
+        policyDesc->mPolicyMix = mix;
+        if (primaryOutputMix) {
+            primaryDesc = policyDesc;
+            ALOGV("%s: Mix %zu: set primary desc", __func__, i);
+        } else {
+            if (policyDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) {
+                ALOGV("%s: Mix %zu ignored as secondaryOutput because not opened yet", __func__, i);
+            } else {
+                ALOGV("%s: Add a secondary desc %zu", __func__, i);
+                secondaryDescs->push_back(policyDesc);
             }
+        }
+    }
+    return (primaryDesc == nullptr && secondaryDescs->empty()) ? BAD_VALUE : NO_ERROR;
+}
 
-        } else if (mix->mMixType == MIX_TYPE_RECORDERS) {
-            if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
-                    strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
+AudioPolicyMixCollection::MixMatchStatus AudioPolicyMixCollection::mixMatch(
+        const AudioMix* mix, size_t mixIndex, audio_attributes_t attributes, uid_t uid) {
+
+    if (mix->mMixType == MIX_TYPE_PLAYERS) {
+        // TODO if adding more player rules (currently only 2), make rule handling "generic"
+        //      as there is no difference in the treatment of usage- or uid-based rules
+        bool hasUsageMatchRules = false;
+        bool hasUsageExcludeRules = false;
+        bool usageMatchFound = false;
+        bool usageExclusionFound = false;
+
+        bool hasUidMatchRules = false;
+        bool hasUidExcludeRules = false;
+        bool uidMatchFound = false;
+        bool uidExclusionFound = false;
+
+        bool hasAddrMatch = false;
+
+        // iterate over all mix criteria to list what rules this mix contains
+        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+            ALOGV(" getOutputForAttr: mix %zu: inspecting mix criteria %zu of %zu",
+                    mixIndex, j, mix->mCriteria.size());
+
+            // if there is an address match, prioritize that match
+            if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
                     strncmp(attributes.tags + strlen("addr="),
                             mix->mDeviceAddress.string(),
                             AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
-                desc = policyMix->getOutput();
+                hasAddrMatch = true;
+                break;
             }
+
+            switch (mix->mCriteria[j].mRule) {
+            case RULE_MATCH_ATTRIBUTE_USAGE:
+                ALOGV("\tmix has RULE_MATCH_ATTRIBUTE_USAGE for usage %d",
+                                            mix->mCriteria[j].mValue.mUsage);
+                hasUsageMatchRules = true;
+                if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
+                    // found one match against all allowed usages
+                    usageMatchFound = true;
+                }
+                break;
+            case RULE_EXCLUDE_ATTRIBUTE_USAGE:
+                ALOGV("\tmix has RULE_EXCLUDE_ATTRIBUTE_USAGE for usage %d",
+                        mix->mCriteria[j].mValue.mUsage);
+                hasUsageExcludeRules = true;
+                if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
+                    // found this usage is to be excluded
+                    usageExclusionFound = true;
+                }
+                break;
+            case RULE_MATCH_UID:
+                ALOGV("\tmix has RULE_MATCH_UID for uid %d", mix->mCriteria[j].mValue.mUid);
+                hasUidMatchRules = true;
+                if (mix->mCriteria[j].mValue.mUid == uid) {
+                    // found one UID match against all allowed UIDs
+                    uidMatchFound = true;
+                }
+                break;
+            case RULE_EXCLUDE_UID:
+                ALOGV("\tmix has RULE_EXCLUDE_UID for uid %d", mix->mCriteria[j].mValue.mUid);
+                hasUidExcludeRules = true;
+                if (mix->mCriteria[j].mValue.mUid == uid) {
+                    // found this UID is to be excluded
+                    uidExclusionFound = true;
+                }
+                break;
+            default:
+                break;
+            }
+
+            // consistency checks: for each "dimension" of rules (usage, uid...), we can
+            // only have MATCH rules, or EXCLUDE rules in each dimension, not a combination
+            if (hasUsageMatchRules && hasUsageExcludeRules) {
+                ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_ATTRIBUTE_USAGE"
+                        " and RULE_EXCLUDE_ATTRIBUTE_USAGE in mix %zu", mixIndex);
+                return MixMatchStatus::INVALID_MIX;
+            }
+            if (hasUidMatchRules && hasUidExcludeRules) {
+                ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_UID"
+                        " and RULE_EXCLUDE_UID in mix %zu", mixIndex);
+                return MixMatchStatus::INVALID_MIX;
+            }
+
+            if ((hasUsageExcludeRules && usageExclusionFound)
+                    || (hasUidExcludeRules && uidExclusionFound)) {
+                break; // stop iterating on criteria because an exclusion was found (will fail)
+            }
+
+        }//iterate on mix criteria
+
+        // determine if exiting on success (or implicit failure as desc is 0)
+        if (hasAddrMatch ||
+                !((hasUsageExcludeRules && usageExclusionFound) ||
+                  (hasUsageMatchRules && !usageMatchFound)  ||
+                  (hasUidExcludeRules && uidExclusionFound) ||
+                  (hasUidMatchRules && !uidMatchFound))) {
+            ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
+            return MixMatchStatus::MATCH;
         }
-        if (desc != 0) {
-            desc->mPolicyMix = mix;
-            return NO_ERROR;
+
+    } else if (mix->mMixType == MIX_TYPE_RECORDERS) {
+        if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
+                strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
+                strncmp(attributes.tags + strlen("addr="),
+                        mix->mDeviceAddress.string(),
+                        AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
+            return MixMatchStatus::MATCH;
         }
     }
-    return BAD_VALUE;
+    return MixMatchStatus::NO_MATCH;
 }
 
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForOutput(
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index a6f6c3b..633c40e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -85,7 +85,8 @@
          product_strategy_t strategy) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         AUDIO_CONFIG_BASE_INITIALIZER, AUDIO_PORT_HANDLE_NONE,
-        stream, strategy, AUDIO_OUTPUT_FLAG_NONE, false),
+        stream, strategy, AUDIO_OUTPUT_FLAG_NONE, false,
+        {} /* Sources do not support secondary outputs*/),
         mPatchDesc(patchDesc), mSrcDevice(srcDevice)
 {
 }
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index 6f48eae..7c76d8a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -45,6 +45,7 @@
 const RouteFlagTypeConverter::Table RouteFlagTypeConverter::mTable[] = {
     MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_RENDER),
     MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_LOOP_BACK),
+    MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER),
     MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_ALL),
     TERMINATOR
 };
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 0eee3f2..ccec93f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -33,9 +33,12 @@
 #define AUDIO_POLICY_XML_CONFIG_FILE_NAME "audio_policy_configuration.xml"
 #define AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME \
         "audio_policy_configuration_a2dp_offload_disabled.xml"
+#define AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME \
+        "audio_policy_configuration_bluetooth_hal_enabled.xml"
 
 #include <inttypes.h>
 #include <math.h>
+#include <set>
 #include <unordered_set>
 #include <vector>
 
@@ -907,16 +910,18 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getOutputForAttrInt(audio_attributes_t *resultAttr,
-                                                 audio_io_handle_t *output,
-                                                 audio_session_t session,
-                                                 const audio_attributes_t *attr,
-                                                 audio_stream_type_t *stream,
-                                                 uid_t uid,
-                                                 const audio_config_t *config,
-                                                 audio_output_flags_t *flags,
-                                                 audio_port_handle_t *selectedDeviceId,
-                                                 bool *isRequestedDeviceForExclusiveUse)
+status_t AudioPolicyManager::getOutputForAttrInt(
+        audio_attributes_t *resultAttr,
+        audio_io_handle_t *output,
+        audio_session_t session,
+        const audio_attributes_t *attr,
+        audio_stream_type_t *stream,
+        uid_t uid,
+        const audio_config_t *config,
+        audio_output_flags_t *flags,
+        audio_port_handle_t *selectedDeviceId,
+        bool *isRequestedDeviceForExclusiveUse,
+        std::vector<sp<SwAudioOutputDescriptor>> *secondaryDescs)
 {
     DeviceVector outputDevices;
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
@@ -935,19 +940,26 @@
     ALOGV("%s() attributes=%s stream=%s session %d selectedDeviceId %d", __func__,
           toString(*resultAttr).c_str(), toString(*stream).c_str(), session, requestedPortId);
 
-    // 1/ First check for explicit routing (eg. setPreferredDevice): NOTE: now handled by engine
-    // 2/ If no explict route, is there a matching dynamic policy that applies?
-    //    NOTE: new engine product strategy does not make use of dynamic routing, keep it for
-    //          remote-submix and legacy
-    sp<SwAudioOutputDescriptor> desc;
-    if (requestedDevice == nullptr &&
-            mPolicyMixes.getOutputForAttr(*resultAttr, uid, desc) == NO_ERROR) {
-        ALOG_ASSERT(desc != 0, "Invalid desc returned by getOutputForAttr");
-        if (!audio_has_proportional_frames(config->format)) {
-            return BAD_VALUE;
-        }
-        *output = desc->mIoHandle;
-        AudioMix *mix = desc->mPolicyMix;
+    // The primary output is the explicit routing (eg. setPreferredDevice) if specified,
+    //       otherwise, fallback to the dynamic policies, if none match, query the engine.
+    // Secondary outputs are always found by dynamic policies as the engine do not support them
+    sp<SwAudioOutputDescriptor> policyDesc;
+    if (mPolicyMixes.getOutputForAttr(*resultAttr, uid, policyDesc, secondaryDescs) != NO_ERROR) {
+        policyDesc = nullptr; // reset getOutputForAttr in case of failure
+        secondaryDescs->clear();
+    }
+    // Explicit routing is higher priority then any dynamic policy primary output
+    bool usePrimaryOutputFromPolicyMixes = requestedDevice == nullptr && policyDesc != nullptr;
+
+    // FIXME: in case of RENDER policy, the output capabilities should be checked
+    if ((usePrimaryOutputFromPolicyMixes || !secondaryDescs->empty())
+        && !audio_has_proportional_frames(config->format)) {
+        ALOGW("%s: audio loopback only supports proportional frames", __func__);
+        return BAD_VALUE;
+    }
+    if (usePrimaryOutputFromPolicyMixes) {
+        *output = policyDesc->mIoHandle;
+        AudioMix *mix = policyDesc->mPolicyMix;
         sp<DeviceDescriptor> deviceDesc =
                 mAvailableOutputDevices.getDevice(mix->mDeviceType,
                                                   mix->mDeviceAddress,
@@ -1022,7 +1034,8 @@
                                               const audio_config_t *config,
                                               audio_output_flags_t *flags,
                                               audio_port_handle_t *selectedDeviceId,
-                                              audio_port_handle_t *portId)
+                                              audio_port_handle_t *portId,
+                                              std::vector<audio_io_handle_t> *secondaryOutputs)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1031,11 +1044,18 @@
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
     audio_attributes_t resultAttr;
     bool isRequestedDeviceForExclusiveUse = false;
+    std::vector<sp<SwAudioOutputDescriptor>> secondaryOutputDescs;
     status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
-            config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse);
+            config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+            &secondaryOutputDescs);
     if (status != NO_ERROR) {
         return status;
     }
+    std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryOutputDescs;
+    for (auto& secondaryDesc : secondaryOutputDescs) {
+        secondaryOutputs->push_back(secondaryDesc->mIoHandle);
+        weakSecondaryOutputDescs.push_back(secondaryDesc);
+    }
 
     audio_config_base_t clientConfig = {.sample_rate = config->sample_rate,
         .format = config->format,
@@ -1046,7 +1066,8 @@
         new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
                                   requestedPortId, *stream,
                                   mEngine->getProductStrategyForAttributes(resultAttr),
-                                  *flags, isRequestedDeviceForExclusiveUse);
+                                  *flags, isRequestedDeviceForExclusiveUse,
+                                  std::move(weakSecondaryOutputDescs));
     sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
     outputDesc->addClient(clientDesc);
 
@@ -1562,13 +1583,15 @@
         policyMix = outputDesc->mPolicyMix;
         audio_devices_t newDeviceType;
         address = policyMix->mDeviceAddress.string();
-        if ((policyMix->mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
-            newDeviceType = policyMix->mDeviceType;
-        } else {
+        if ((policyMix->mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
             newDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+        } else {
+            newDeviceType = policyMix->mDeviceType;
         }
-        devices.add(mAvailableOutputDevices.getDevice(newDeviceType,
-                                                      String8(address), AUDIO_FORMAT_DEFAULT));
+        sp device = mAvailableOutputDevices.getDevice(newDeviceType, String8(address),
+                                                        AUDIO_FORMAT_DEFAULT);
+        ALOG_ASSERT(device, "%s: no device found t=%u, a=%s", __func__, newDeviceType, address);
+        devices.add(device);
     }
 
     // requiresMuteCheck is false when we can bypass mute strategy.
@@ -2609,18 +2632,24 @@
     // examine each mix's route type
     for (size_t i = 0; i < mixes.size(); i++) {
         AudioMix mix = mixes[i];
-        // we only support MIX_ROUTE_FLAG_LOOP_BACK or MIX_ROUTE_FLAG_RENDER, not the combination
-        if ((mix.mRouteFlags & MIX_ROUTE_FLAG_ALL) == MIX_ROUTE_FLAG_ALL) {
+        // Only capture of playback is allowed in LOOP_BACK & RENDER mode
+        if (is_mix_loopback_render(mix.mRouteFlags) && mix.mMixType != MIX_TYPE_PLAYERS) {
+            ALOGE("Unsupported Policy Mix %zu of %zu: "
+                  "Only capture of playback is allowed in LOOP_BACK & RENDER mode",
+                   i, mixes.size());
             res = INVALID_OPERATION;
             break;
         }
+        // LOOP_BACK and LOOP_BACK | RENDER have the same remote submix backend and are handled
+        // in the same way.
         if ((mix.mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
-            ALOGV("registerPolicyMixes() mix %zu of %zu is LOOP_BACK", i, mixes.size());
+            ALOGV("registerPolicyMixes() mix %zu of %zu is LOOP_BACK %d", i, mixes.size(),
+                  mix.mRouteFlags);
             if (rSubmixModule == 0) {
                 rSubmixModule = mHwModules.getModuleFromName(
                         AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
                 if (rSubmixModule == 0) {
-                    ALOGE(" Unable to find audio module for submix, aborting mix %zu registration",
+                    ALOGE("Unable to find audio module for submix, aborting mix %zu registration",
                             i);
                     res = INVALID_OPERATION;
                     break;
@@ -2635,7 +2664,7 @@
             }
 
             if (mPolicyMixes.registerMix(address, mix, 0 /*output desc*/) != NO_ERROR) {
-                ALOGE(" Error registering mix %zu for address %s", i, address.string());
+                ALOGE("Error registering mix %zu for address %s", i, address.string());
                 res = INVALID_OPERATION;
                 break;
             }
@@ -2679,6 +2708,8 @@
 
                 if (desc->supportedDevices().contains(device)) {
                     if (mPolicyMixes.registerMix(address, mix, desc) != NO_ERROR) {
+                        ALOGE("Could not register mix RENDER,  dev=0x%X addr=%s", type,
+                              address.string());
                         res = INVALID_OPERATION;
                     } else {
                         foundOutput = true;
@@ -2746,7 +2777,7 @@
             rSubmixModule->removeOutputProfile(address);
             rSubmixModule->removeInputProfile(address);
 
-        } if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
+        } else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
             if (mPolicyMixes.unregisterMix(mix.mDeviceAddress) != NO_ERROR) {
                 res = INVALID_OPERATION;
                 continue;
@@ -3635,9 +3666,11 @@
         audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
         audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
         bool isRequestedDeviceForExclusiveUse = false;
+        std::vector<sp<SwAudioOutputDescriptor>> secondaryOutputs;
         getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE,
                 &attributes, &stream, sourceDesc->uid(), &config, &flags,
-                &selectedDeviceId, &isRequestedDeviceForExclusiveUse);
+                &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+                &secondaryOutputs);
         if (output == AUDIO_IO_HANDLE_NONE) {
             ALOGV("%s no output for device %08x", __FUNCTION__, sinkDevices.types());
             return INVALID_OPERATION;
@@ -4004,7 +4037,11 @@
     if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
         property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
         // A2DP offload supported but disabled: try to use special XML file
-        fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+        if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+            fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
+        } else {
+            fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+        }
     }
     fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
 
@@ -4782,6 +4819,7 @@
     // output is suspended before any tracks are moved to it
     checkA2dpSuspend();
     checkOutputForAllStrategies();
+    checkSecondaryOutputs();
     if (onOutputsChecked != nullptr && onOutputsChecked()) checkA2dpSuspend();
     updateDevicesAndOutputs();
     if (mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD) != 0) {
@@ -4870,6 +4908,29 @@
     }
 }
 
+void AudioPolicyManager::checkSecondaryOutputs() {
+    std::set<audio_stream_type_t> streamsToInvalidate;
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        const sp<SwAudioOutputDescriptor>& outputDescriptor = mOutputs[i];
+        for (const sp<TrackClientDescriptor>& client : outputDescriptor->getClientIterable()) {
+            // FIXME code duplicated from getOutputForAttrInt
+            sp<SwAudioOutputDescriptor> desc;
+            std::vector<sp<SwAudioOutputDescriptor>> secondaryDescs;
+            mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(), desc,
+                                          &secondaryDescs);
+            if (!std::equal(client->getSecondaryOutputs().begin(),
+                            client->getSecondaryOutputs().end(),
+                            secondaryDescs.begin(), secondaryDescs.end())) {
+                streamsToInvalidate.insert(client->stream());
+            }
+        }
+    }
+    for (audio_stream_type_t stream : streamsToInvalidate) {
+        ALOGD("%s Invalidate stream %d due to secondary output change", __func__, stream);
+        mpClientInterface->invalidateStream(stream);
+    }
+}
+
 void AudioPolicyManager::checkA2dpSuspend()
 {
     audio_io_handle_t a2dpOutput = mOutputs.getA2dpOutput();
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 73c3b56..70ad6ac 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -113,15 +113,16 @@
         virtual void setSystemProperty(const char* property, const char* value);
         virtual status_t initCheck();
         virtual audio_io_handle_t getOutput(audio_stream_type_t stream);
-        virtual status_t getOutputForAttr(const audio_attributes_t *attr,
-                                          audio_io_handle_t *output,
-                                          audio_session_t session,
-                                          audio_stream_type_t *stream,
-                                          uid_t uid,
-                                          const audio_config_t *config,
-                                          audio_output_flags_t *flags,
-                                          audio_port_handle_t *selectedDeviceId,
-                                          audio_port_handle_t *portId);
+        status_t getOutputForAttr(const audio_attributes_t *attr,
+                                  audio_io_handle_t *output,
+                                  audio_session_t session,
+                                  audio_stream_type_t *stream,
+                                  uid_t uid,
+                                  const audio_config_t *config,
+                                  audio_output_flags_t *flags,
+                                  audio_port_handle_t *selectedDeviceId,
+                                  audio_port_handle_t *portId,
+                                  std::vector<audio_io_handle_t> *secondaryOutputs) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual void releaseOutput(audio_port_handle_t portId);
@@ -431,6 +432,10 @@
          */
         void checkOutputForAllStrategies();
 
+        // Same as checkOutputForStrategy but for secondary outputs. Make sure if a secondary
+        // output condition changes, the track is properly rerouted
+        void checkSecondaryOutputs();
+
         // manages A2DP output suspend/restore according to phone state and BT SCO usage
         void checkA2dpSuspend();
 
@@ -711,7 +716,8 @@
                 const audio_config_t *config,
                 audio_output_flags_t *flags,
                 audio_port_handle_t *selectedDeviceId,
-                bool *isRequestedDeviceForExclusiveUse);
+                bool *isRequestedDeviceForExclusiveUse,
+                std::vector<sp<SwAudioOutputDescriptor>> *secondaryDescs);
         // internal method to return the output handle for the given device and format
         audio_io_handle_t getOutputForDevices(
                 const DeviceVector &devices,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 7768ea3..8ddf824 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -175,7 +175,8 @@
                                               const audio_config_t *config,
                                               audio_output_flags_t flags,
                                               audio_port_handle_t *selectedDeviceId,
-                                              audio_port_handle_t *portId)
+                                              audio_port_handle_t *portId,
+                                              std::vector<audio_io_handle_t> *secondaryOutputs)
 {
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
@@ -193,7 +194,8 @@
     AutoCallerClear acc;
     status_t result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
                                                  config,
-                                                 &flags, selectedDeviceId, portId);
+                                                 &flags, selectedDeviceId, portId,
+                                                 secondaryOutputs);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if ((result == NO_ERROR) &&
@@ -205,9 +207,10 @@
         flags = originalFlags;
         *selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
         *portId = AUDIO_PORT_HANDLE_NONE;
-        result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
-                                                 config,
-                                                 &flags, selectedDeviceId, portId);
+        secondaryOutputs->clear();
+        result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid, config,
+                                                       &flags, selectedDeviceId, portId,
+                                                       secondaryOutputs);
     }
 
     if (result == NO_ERROR) {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index ee293a7..8cd6e81 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -74,16 +74,17 @@
     virtual status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
     virtual audio_io_handle_t getOutput(audio_stream_type_t stream);
-    virtual status_t getOutputForAttr(const audio_attributes_t *attr,
-                                      audio_io_handle_t *output,
-                                      audio_session_t session,
-                                      audio_stream_type_t *stream,
-                                      pid_t pid,
-                                      uid_t uid,
-                                      const audio_config_t *config,
-                                      audio_output_flags_t flags,
-                                      audio_port_handle_t *selectedDeviceId,
-                                      audio_port_handle_t *portId);
+    status_t getOutputForAttr(const audio_attributes_t *attr,
+                              audio_io_handle_t *output,
+                              audio_session_t session,
+                              audio_stream_type_t *stream,
+                              pid_t pid,
+                              uid_t uid,
+                              const audio_config_t *config,
+                              audio_output_flags_t flags,
+                              audio_port_handle_t *selectedDeviceId,
+                              audio_port_handle_t *portId,
+                              std::vector<audio_io_handle_t> *secondaryOutputs) override;
     virtual status_t startOutput(audio_port_handle_t portId);
     virtual status_t stopOutput(audio_port_handle_t portId);
     virtual void releaseOutput(audio_port_handle_t portId);
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index e9f4657..de5670c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -214,7 +214,7 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     ASSERT_EQ(OK, mManager->getOutputForAttr(
                     &attr, &output, AUDIO_SESSION_NONE, &stream, 0 /*uid*/, &config, &flags,
-                    selectedDeviceId, portId));
+                    selectedDeviceId, portId, {}));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
 
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index 6a71d7d..f78c671 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -108,6 +108,9 @@
     libutils \
     libziparchive \
 
+LOCAL_HEADER_LIBRARIES := \
+    libnativeloader-dummy-headers \
+
 LOCAL_MODULE := mediaswcodec
 LOCAL_INIT_RC := mediaswcodec.rc
 LOCAL_SANITIZE := scudo
diff --git a/services/mediacodec/MediaCodecUpdateService.cpp b/services/mediacodec/MediaCodecUpdateService.cpp
index 0e6892d..50ccbce 100644
--- a/services/mediacodec/MediaCodecUpdateService.cpp
+++ b/services/mediacodec/MediaCodecUpdateService.cpp
@@ -20,28 +20,12 @@
 #include <android/dlext.h>
 #include <dlfcn.h>
 #include <media/CodecServiceRegistrant.h>
+#include <nativeloader/dlext_namespaces.h>
 #include <utils/Log.h>
 #include <utils/String8.h>
 
 #include "MediaCodecUpdateService.h"
 
-// Copied from GraphicsEnv.cpp
-// TODO(b/37049319) Get this from a header once one exists
-extern "C" {
-  android_namespace_t* android_create_namespace(const char* name,
-                                                const char* ld_library_path,
-                                                const char* default_library_path,
-                                                uint64_t type,
-                                                const char* permitted_when_isolated_path,
-                                                android_namespace_t* parent);
-  bool android_link_namespaces(android_namespace_t* from,
-                               android_namespace_t* to,
-                               const char* shared_libs_sonames);
-  enum {
-     ANDROID_NAMESPACE_TYPE_ISOLATED = 1,
-  };
-}
-
 namespace android {
 
 void loadFromApex(const char *libDirPath) {