Merge "Remove goto used in SurfaceUtils"
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 2c5fcd7..19d1abc 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -73,7 +73,6 @@
             }
         }
         ap->stop();
-        ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb);
     }
 }
 
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index ede62cc..b7a2d60 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -49,17 +49,11 @@
     void TearDown() override {
         if (mPlayback) {
             mPlayback->stop();
-            if (auto handle = mPlayback->getAudioTrackHandle(); handle) {
-                handle->removeAudioDeviceCallback(mCbPlayback);
-            }
             mCbPlayback.clear();
             mPlayback.clear();
         }
         if (mCapture) {
             mCapture->stop();
-            if (auto handle = mCapture->getAudioRecordHandle(); handle) {
-                handle->removeAudioDeviceCallback(mCbRecord);
-            }
             mCbRecord.clear();
             mCapture.clear();
         }
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index 84ac64c..f88915d 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -105,12 +105,12 @@
 
 bool hasAidlHalService(const InterfaceName& interface, const AudioHalVersionInfo& version) {
     const std::string name = interface.first + "." + interface.second + "/default";
-    AIBinder* binder = AServiceManager_checkService(name.c_str());
-    if (binder == nullptr) {
-        ALOGW("%s Service %s doesn't exist", __func__, name.c_str());
+    const bool isDeclared = AServiceManager_isDeclared(name.c_str());
+    if (!isDeclared) {
+        ALOGW("%s %s: false", __func__, name.c_str());
         return false;
     }
-    ALOGI("%s AIDL Service %s exist: %s", __func__, name.c_str(), version.toString().c_str());
+    ALOGI("%s %s: true, version %s", __func__, name.c_str(), version.toString().c_str());
     return true;
 }
 
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 6c882a6..32ebe36 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -20,6 +20,8 @@
 #include <algorithm>
 #include <forward_list>
 
+#include <aidl/android/hardware/audio/core/BnStreamCallback.h>
+#include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
 #include <aidl/android/hardware/audio/core/StreamDescriptor.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
@@ -37,6 +39,7 @@
 using aidl::android::media::audio::common::AudioDeviceType;
 using aidl::android::media::audio::common::AudioInputFlags;
 using aidl::android::media::audio::common::AudioIoFlags;
+using aidl::android::media::audio::common::AudioLatencyMode;
 using aidl::android::media::audio::common::AudioMode;
 using aidl::android::media::audio::common::AudioOutputFlags;
 using aidl::android::media::audio::common::AudioPort;
@@ -298,6 +301,123 @@
     return OK;
 }
 
+namespace {
+
+class StreamCallbackBase {
+  protected:
+    explicit StreamCallbackBase(const sp<CallbackBroker>& broker) : mBroker(broker) {}
+  public:
+    void* getCookie() const { return mCookie; }
+    void setCookie(void* cookie) { mCookie = cookie; }
+    sp<CallbackBroker> getBroker() const {
+        if (void* cookie = mCookie; cookie != nullptr) return mBroker.promote();
+        return nullptr;
+    }
+  private:
+    const wp<CallbackBroker> mBroker;
+    std::atomic<void*> mCookie;
+};
+
+template<class C>
+class StreamCallbackBaseHelper {
+  protected:
+    explicit StreamCallbackBaseHelper(const StreamCallbackBase& base) : mBase(base) {}
+    sp<C> getCb(const sp<CallbackBroker>& broker, void* cookie);
+    using CbRef = const sp<C>&;
+    ndk::ScopedAStatus runCb(const std::function<void(CbRef cb)>& f) {
+        if (auto cb = getCb(mBase.getBroker(), mBase.getCookie()); cb != nullptr) f(cb);
+        return ndk::ScopedAStatus::ok();
+    }
+  private:
+    const StreamCallbackBase& mBase;
+};
+
+template<>
+sp<StreamOutHalInterfaceCallback> StreamCallbackBaseHelper<StreamOutHalInterfaceCallback>::getCb(
+        const sp<CallbackBroker>& broker, void* cookie) {
+    if (broker != nullptr) return broker->getStreamOutCallback(cookie);
+    return nullptr;
+}
+
+template<>
+sp<StreamOutHalInterfaceEventCallback>
+StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::getCb(
+        const sp<CallbackBroker>& broker, void* cookie) {
+    if (broker != nullptr) return broker->getStreamOutEventCallback(cookie);
+    return nullptr;
+}
+
+template<>
+sp<StreamOutHalInterfaceLatencyModeCallback>
+StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>::getCb(
+        const sp<CallbackBroker>& broker, void* cookie) {
+    if (broker != nullptr) return broker->getStreamOutLatencyModeCallback(cookie);
+    return nullptr;
+}
+
+/*
+Note on the callback ownership.
+
+In the Binder ownership model, the server implementation is kept alive
+as long as there is any client (proxy object) alive. This is done by
+incrementing the refcount of the server-side object by the Binder framework.
+When it detects that the last client is gone, it decrements the refcount back.
+
+Thus, it is not needed to keep any references to StreamCallback on our
+side (after we have sent an instance to the client), because we are
+the server-side. The callback object will be kept alive as long as the HAL server
+holds a strong ref to IStreamCallback proxy.
+*/
+
+class OutputStreamCallbackAidl : public StreamCallbackBase,
+                             public StreamCallbackBaseHelper<StreamOutHalInterfaceCallback>,
+                             public ::aidl::android::hardware::audio::core::BnStreamCallback {
+  public:
+    explicit OutputStreamCallbackAidl(const sp<CallbackBroker>& broker)
+            : StreamCallbackBase(broker),
+              StreamCallbackBaseHelper<StreamOutHalInterfaceCallback>(
+                      *static_cast<StreamCallbackBase*>(this)) {}
+    ndk::ScopedAStatus onTransferReady() override {
+        return runCb([](CbRef cb) { cb->onWriteReady(); });
+    }
+    ndk::ScopedAStatus onError() override {
+        return runCb([](CbRef cb) { cb->onError(); });
+    }
+    ndk::ScopedAStatus onDrainReady() override {
+        return runCb([](CbRef cb) { cb->onDrainReady(); });
+    }
+};
+
+class OutputStreamEventCallbackAidl :
+            public StreamCallbackBase,
+            public StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>,
+            public StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>,
+            public ::aidl::android::hardware::audio::core::BnStreamOutEventCallback {
+  public:
+    explicit OutputStreamEventCallbackAidl(const sp<CallbackBroker>& broker)
+            : StreamCallbackBase(broker),
+              StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>(
+                      *static_cast<StreamCallbackBase*>(this)),
+              StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>(
+                      *static_cast<StreamCallbackBase*>(this)) {}
+    ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& in_audioMetadata) override {
+        std::basic_string<uint8_t> halMetadata(in_audioMetadata.begin(), in_audioMetadata.end());
+        return StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::runCb(
+                [&halMetadata](auto cb) { cb->onCodecFormatChanged(halMetadata); });
+    }
+    ndk::ScopedAStatus onRecommendedLatencyModeChanged(
+            const std::vector<AudioLatencyMode>& in_modes) override {
+        auto halModes = VALUE_OR_FATAL(
+                ::aidl::android::convertContainer<std::vector<audio_latency_mode_t>>(
+                        in_modes,
+                        ::aidl::android::aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
+        return StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>::runCb(
+                [&halModes](auto cb) { cb->onRecommendedLatencyModeChanged(halModes); });
+    }
+};
+
+}  // namespace
+
 status_t DeviceHalAidl::openOutputStream(
         audio_io_handle_t handle, audio_devices_t devices,
         audio_output_flags_t flags, struct audio_config* config,
@@ -325,8 +445,19 @@
                     &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
-    args.offloadInfo = aidlConfig.offloadInfo;
+    const bool isOffload = isBitPositionFlagSet(
+            aidlOutputFlags, AudioOutputFlags::COMPRESS_OFFLOAD);
+    std::shared_ptr<OutputStreamCallbackAidl> streamCb;
+    if (isOffload) {
+        streamCb = ndk::SharedRefBase::make<OutputStreamCallbackAidl>(this);
+    }
+    auto eventCb = ndk::SharedRefBase::make<OutputStreamEventCallbackAidl>(this);
+    if (isOffload) {
+        args.offloadInfo = aidlConfig.offloadInfo;
+        args.callback = streamCb;
+    }
     args.bufferSizeFrames = aidlConfig.frameCount;
+    args.eventCallback = eventCb;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
     StreamContextAidl context(ret.desc);
@@ -336,7 +467,14 @@
         return NO_INIT;
     }
     *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), nominalLatency,
-            std::move(ret.stream));
+            std::move(ret.stream), this /*callbackBroker*/);
+    void* cbCookie = (*outStream).get();
+    {
+        std::lock_guard l(mLock);
+        mCallbacks.emplace(cbCookie, Callbacks{});
+    }
+    if (streamCb) streamCb->setCookie(cbCookie);
+    eventCb->setCookie(cbCookie);
     cleanups.disarmAll();
     return OK;
 }
@@ -862,4 +1000,55 @@
     ALOGE("%s: port config id %d not found", __func__, portConfigId);
 }
 
+void DeviceHalAidl::clearCallbacks(void* cookie) {
+    std::lock_guard l(mLock);
+    mCallbacks.erase(cookie);
+}
+
+sp<StreamOutHalInterfaceCallback> DeviceHalAidl::getStreamOutCallback(void* cookie) {
+    return getCallbackImpl(cookie, &Callbacks::out);
+}
+
+void DeviceHalAidl::setStreamOutCallback(
+        void* cookie, const sp<StreamOutHalInterfaceCallback>& cb) {
+    setCallbackImpl(cookie, &Callbacks::out, cb);
+}
+
+sp<StreamOutHalInterfaceEventCallback> DeviceHalAidl::getStreamOutEventCallback(
+        void* cookie) {
+    return getCallbackImpl(cookie, &Callbacks::event);
+}
+
+void DeviceHalAidl::setStreamOutEventCallback(
+        void* cookie, const sp<StreamOutHalInterfaceEventCallback>& cb) {
+    setCallbackImpl(cookie, &Callbacks::event, cb);
+}
+
+sp<StreamOutHalInterfaceLatencyModeCallback> DeviceHalAidl::getStreamOutLatencyModeCallback(
+        void* cookie) {
+    return getCallbackImpl(cookie, &Callbacks::latency);
+}
+
+void DeviceHalAidl::setStreamOutLatencyModeCallback(
+        void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>& cb) {
+    setCallbackImpl(cookie, &Callbacks::latency, cb);
+}
+
+template<class C>
+sp<C> DeviceHalAidl::getCallbackImpl(void* cookie, wp<C> DeviceHalAidl::Callbacks::* field) {
+    std::lock_guard l(mLock);
+    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+        return ((it->second).*field).promote();
+    }
+    return nullptr;
+}
+template<class C>
+void DeviceHalAidl::setCallbackImpl(
+        void* cookie, wp<C> DeviceHalAidl::Callbacks::* field, const sp<C>& cb) {
+    std::lock_guard l(mLock);
+    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+        (it->second).*field = cb;
+    }
+}
+
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 82299b5..6f16daf 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -21,6 +21,7 @@
 #include <vector>
 
 #include <aidl/android/hardware/audio/core/BpModule.h>
+#include <android-base/thread_annotations.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/EffectHalInterface.h>
 
@@ -28,7 +29,35 @@
 
 namespace android {
 
-class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl {
+class StreamOutHalInterfaceCallback;
+class StreamOutHalInterfaceEventCallback;
+class StreamOutHalInterfaceLatencyModeCallback;
+
+// The role of the broker is to connect AIDL callback interface implementations
+// with StreamOut callback implementations. Since AIDL requires all callbacks
+// to be provided upfront, while libaudiohal interfaces allow late registration,
+// there is a need to coordinate the matching process.
+class CallbackBroker : public virtual RefBase {
+  public:
+    virtual ~CallbackBroker() = default;
+    // The cookie is always the stream instance pointer. We don't use weak pointers to avoid extra
+    // costs on reference counting. The stream cleans up related entries on destruction. Since
+    // access to the callbacks map is synchronized, the possibility for pointer aliasing due to
+    // allocation of a new stream at the address of previously deleted stream is avoided.
+    virtual void clearCallbacks(void* cookie) = 0;
+    virtual sp<StreamOutHalInterfaceCallback> getStreamOutCallback(void* cookie) = 0;
+    virtual void setStreamOutCallback(void* cookie, const sp<StreamOutHalInterfaceCallback>&) = 0;
+    virtual sp<StreamOutHalInterfaceEventCallback> getStreamOutEventCallback(void* cookie) = 0;
+    virtual void setStreamOutEventCallback(void* cookie,
+            const sp<StreamOutHalInterfaceEventCallback>&) = 0;
+    virtual sp<StreamOutHalInterfaceLatencyModeCallback> getStreamOutLatencyModeCallback(
+            void* cookie) = 0;
+    virtual void setStreamOutLatencyModeCallback(
+            void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>&) = 0;
+};
+
+class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl,
+                      public CallbackBroker {
   public:
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
     status_t getSupportedDevices(uint32_t *devices) override;
@@ -123,6 +152,12 @@
 
   private:
     friend class sp<DeviceHalAidl>;
+
+    struct Callbacks {  // No need to use `atomic_wp` because access is serialized.
+        wp<StreamOutHalInterfaceCallback> out;
+        wp<StreamOutHalInterfaceEventCallback> event;
+        wp<StreamOutHalInterfaceLatencyModeCallback> latency;
+    };
     using Patches = std::map<int32_t /*patch ID*/,
             ::aidl::android::hardware::audio::core::AudioPatch>;
     using PortConfigs = std::map<int32_t /*port config ID*/,
@@ -191,14 +226,31 @@
     void resetPatch(int32_t patchId);
     void resetPortConfig(int32_t portConfigId);
 
+    // CallbackBroker implementation
+    void clearCallbacks(void* cookie) override;
+    sp<StreamOutHalInterfaceCallback> getStreamOutCallback(void* cookie) override;
+    void setStreamOutCallback(void* cookie, const sp<StreamOutHalInterfaceCallback>& cb) override;
+    sp<StreamOutHalInterfaceEventCallback> getStreamOutEventCallback(void* cookie) override;
+    void setStreamOutEventCallback(void* cookie,
+            const sp<StreamOutHalInterfaceEventCallback>& cb) override;
+    sp<StreamOutHalInterfaceLatencyModeCallback> getStreamOutLatencyModeCallback(
+            void* cookie) override;
+    void setStreamOutLatencyModeCallback(
+            void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>& cb) override;
+    // Implementation helpers.
+    template<class C> sp<C> getCallbackImpl(void* cookie, wp<C> Callbacks::* field);
+    template<class C> void setCallbackImpl(void* cookie, wp<C> Callbacks::* field, const sp<C>& cb);
+
     const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
     Ports mPorts;
-    int32_t mDefaultInputPortId;
-    int32_t mDefaultOutputPortId;
+    int32_t mDefaultInputPortId = -1;
+    int32_t mDefaultOutputPortId = -1;
     PortConfigs mPortConfigs;
     Patches mPatches;
-    std::map<audio_patch_handle_t, int32_t /* patch ID */> mFwkHandles;
+    std::map<audio_patch_handle_t, int32_t /*patch ID*/> mFwkHandles;
+    std::mutex mLock;
+    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
 };
 
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 6190596..7e25b04 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -136,29 +136,15 @@
     return ret;
 }
 
-status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize, const void* pCmdData,
+status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize,
+                                                     const void* pCmdData __unused,
                                                      uint32_t* replySize, void* pReplyData) {
     if (!replySize || *replySize != sizeof(int) || !pReplyData ||
         cmdSize != sizeof(effect_config_t)) {
         return BAD_VALUE;
     }
 
-    return *static_cast<int32_t*>(pReplyData) = OK;
-    const auto& legacyConfig = static_cast<const effect_config_t*>(pCmdData);
-    // already open, apply latest settings
-    Parameter::Common common;
-    common.input.base =
-            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
-                    legacyConfig->inputCfg, true /* isInput */));
-    common.output.base =
-            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
-                    legacyConfig->outputCfg, false /* isInput */));
-    common.session = mSessionId;
-    common.ioHandle = mIoId;
-    // TODO: add access mode support
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            mEffect->setParameter(Parameter::make<Parameter::common>(common))));
-    mCommon = common;
+    // TODO: need to implement setConfig with setParameter(common)
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 2b85f97..a97bd02 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -20,7 +20,6 @@
 #include <algorithm>
 #include <cstdint>
 
-#include <aidl/android/hardware/audio/core/BnStreamCallback.h>
 #include <audio_utils/clock.h>
 #include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
@@ -448,43 +447,18 @@
     return OK;
 }
 
-namespace {
-
-/* Notes on callback ownership.
-
-This is how Binder ownership model looks like. The server implementation
-is owned by Binder framework (via sp<>). Proxies are owned by clients.
-When the last proxy disappears, Binder framework releases the server impl.
-
-Thus, it is not needed to keep any references to StreamCallback (this is
-the server impl) -- it will live as long as HAL server holds a strong ref to
-IStreamCallback proxy.
-
-The callback only keeps a weak reference to the stream. The stream is owned
-by AudioFlinger.
-
-*/
-
-class StreamCallback : public ::aidl::android::hardware::audio::core::BnStreamCallback {
-    ndk::ScopedAStatus onTransferReady() override {
-        return ndk::ScopedAStatus::ok();
-    }
-    ndk::ScopedAStatus onError() override {
-        return ndk::ScopedAStatus::ok();
-    }
-    ndk::ScopedAStatus onDrainReady() override {
-        return ndk::ScopedAStatus::ok();
-    }
-};
-
-}  // namespace
-
 StreamOutHalAidl::StreamOutHalAidl(
         const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
-        const std::shared_ptr<IStreamOut>& stream)
+        const std::shared_ptr<IStreamOut>& stream, const sp<CallbackBroker>& callbackBroker)
         : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency,
                 std::move(context), getStreamCommon(stream)),
-          mStream(stream) {}
+          mStream(stream), mCallbackBroker(callbackBroker) {}
+
+StreamOutHalAidl::~StreamOutHalAidl() {
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        broker->clearCallbacks(this);
+    }
+}
 
 status_t StreamOutHalAidl::getLatency(uint32_t *latency) {
     return StreamHalAidl::getLatency(latency);
@@ -529,10 +503,18 @@
     return INVALID_OPERATION;
 }
 
-status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback __unused) {
+status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        if (auto cb = callback.promote(); cb != nullptr) {
+            broker->setStreamOutCallback(this, cb);
+        } else {
+            // It is expected that the framework never passes a null pointer.
+            // In the AIDL model callbacks can't be "unregistered".
+            LOG_ALWAYS_FATAL("%s: received an expired or null callback pointer", __func__);
+        }
+    }
     return OK;
 }
 
@@ -639,23 +621,15 @@
 }
 
 status_t StreamOutHalAidl::setEventCallback(
-        const sp<StreamOutHalInterfaceEventCallback>& callback __unused) {
+        const sp<StreamOutHalInterfaceEventCallback>& callback) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        broker->setStreamOutEventCallback(this, callback);
+    }
     return OK;
 }
 
-namespace {
-
-struct StreamOutEventCallback {
-    StreamOutEventCallback(const wp<StreamOutHalAidl>& stream) : mStream(stream) {}
-  private:
-    wp<StreamOutHalAidl> mStream;
-};
-
-}  // namespace
-
 status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode __unused) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
@@ -672,48 +646,15 @@
 };
 
 status_t StreamOutHalAidl::setLatencyModeCallback(
-        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) {
+        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        broker->setStreamOutLatencyModeCallback(this, callback);
+    }
     return OK;
 };
 
-void StreamOutHalAidl::onWriteReady() {
-    sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
-    if (callback == 0) return;
-    ALOGV("asyncCallback onWriteReady");
-    callback->onWriteReady();
-}
-
-void StreamOutHalAidl::onDrainReady() {
-    sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
-    if (callback == 0) return;
-    ALOGV("asyncCallback onDrainReady");
-    callback->onDrainReady();
-}
-
-void StreamOutHalAidl::onError() {
-    sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
-    if (callback == 0) return;
-    ALOGV("asyncCallback onError");
-    callback->onError();
-}
-
-void StreamOutHalAidl::onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs __unused) {
-    sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.load().promote();
-    if (callback == nullptr) return;
-    ALOGV("asyncCodecFormatCallback %s", __func__);
-    callback->onCodecFormatChanged(metadataBs);
-}
-
-void StreamOutHalAidl::onRecommendedLatencyModeChanged(
-        const std::vector<audio_latency_mode_t>& modes __unused) {
-    sp<StreamOutHalInterfaceLatencyModeCallback> callback = mLatencyModeCallback.load().promote();
-    if (callback == nullptr) return;
-    callback->onRecommendedLatencyModeChanged(modes);
-}
-
 status_t StreamOutHalAidl::exit() {
     return StreamHalAidl::exit();
 }
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index ce6c31c..162c7bc 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -27,7 +27,6 @@
 #include <fmq/AidlMessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
-#include <mediautils/Synchronization.h>
 
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
@@ -221,6 +220,8 @@
     std::atomic<pid_t> mWorkerTid = -1;
 };
 
+class CallbackBroker;
+
 class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -294,33 +295,21 @@
     status_t setLatencyModeCallback(
             const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) override;
 
-    void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
-
     status_t exit() override;
 
-    void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
-
-    // Methods used by StreamOutCallback ().
-    // FIXME: Consider the required visibility.
-    void onWriteReady();
-    void onDrainReady();
-    void onError();
-
   private:
     friend class sp<StreamOutHalAidl>;
 
-    mediautils::atomic_wp<StreamOutHalInterfaceCallback> mCallback;
-    mediautils::atomic_wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-    mediautils::atomic_wp<StreamOutHalInterfaceLatencyModeCallback> mLatencyModeCallback;
-
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
+    const wp<CallbackBroker> mCallbackBroker;
 
     // Can not be constructed directly by clients.
     StreamOutHalAidl(
             const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
-            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut>& stream);
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut>& stream,
+            const sp<CallbackBroker>& callbackBroker);
 
-    ~StreamOutHalAidl() override = default;
+    ~StreamOutHalAidl() override;
 };
 
 class StreamInHalAidl : public StreamInHalInterface, public StreamHalAidl {
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index efa9e89..a10d271 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -69,25 +69,7 @@
             break;
         }
         case EQ_PARAM_PROPERTIES: {
-            if (value >= 0) {
-                aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)value);
-            } else {
-                std::vector<Equalizer::BandLevel> bandLevels;
-                uint16_t numBands;
-                if (OK != param.readFromValue(&numBands)) {
-                    ALOGE("%s invalid bandLevel param %s", __func__, param.toString().c_str());
-                    return BAD_VALUE;
-                }
-                for (int i = 0; i < numBands; i++) {
-                    uint16_t level;
-                    if (OK != param.readFromValue(&level)) {
-                        ALOGE("%s invalid property param %s", __func__, param.toString().c_str());
-                        return BAD_VALUE;
-                    }
-                    bandLevels.push_back({.index = i, .levelMb = level});
-                }
-                aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, bandLevels, bandLevels);
-            }
+            // TODO: handle properties setting
             break;
         }
         default: {
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 094b415..c685345 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -31,7 +31,7 @@
 class StreamInHalInterface;
 class StreamOutHalInterface;
 
-class DeviceHalInterface : public RefBase
+class DeviceHalInterface : public virtual RefBase
 {
   public:
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 1d52b7d..a651d9b 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -110,8 +110,8 @@
     virtual void onError() {}
 
   protected:
-    StreamOutHalInterfaceCallback() {}
-    virtual ~StreamOutHalInterfaceCallback() {}
+    StreamOutHalInterfaceCallback() = default;
+    virtual ~StreamOutHalInterfaceCallback() = default;
 };
 
 class StreamOutHalInterfaceEventCallback : public virtual RefBase {
@@ -119,8 +119,8 @@
     virtual void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) = 0;
 
 protected:
-    StreamOutHalInterfaceEventCallback() {}
-    virtual ~StreamOutHalInterfaceEventCallback() {}
+    StreamOutHalInterfaceEventCallback() = default;
+    virtual ~StreamOutHalInterfaceEventCallback() = default;
 };
 
 class StreamOutHalInterfaceLatencyModeCallback : public virtual RefBase {
@@ -131,8 +131,8 @@
     virtual void onRecommendedLatencyModeChanged(std::vector<audio_latency_mode_t> modes) = 0;
 
 protected:
-    StreamOutHalInterfaceLatencyModeCallback() {}
-    virtual ~StreamOutHalInterfaceLatencyModeCallback() {}
+    StreamOutHalInterfaceLatencyModeCallback() = default;
+    virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
 class StreamOutHalInterface : public virtual StreamHalInterface {
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index c601c38..e303efd 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -24,8 +24,9 @@
 
 namespace aidl::android::hardware::audio::effect {
 
-using aidl::android::media::audio::common::AudioDeviceDescription;
-using aidl::android::media::audio::common::AudioDeviceType;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioDeviceType;
 
 RetCode BundleContext::init() {
     std::lock_guard lg(mMutex);
@@ -317,6 +318,11 @@
     return true;
 }
 
+bool BundleContext::isConfigSupportedVirtualizer(size_t channelCount,
+                                                 const AudioDeviceDescription& device) {
+    return (channelCount >= 1 && channelCount <= FCC_2) && isDeviceSupportedVirtualizer({device});
+}
+
 RetCode BundleContext::setOutputDevice(
         const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
     mOutputDevice = devices;
@@ -469,6 +475,23 @@
     return bandLevels;
 }
 
+std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
+    std::vector<int32_t> freqs;
+
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        /* Get the current settings */
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params), freqs,
+                        " getControlParamFailed");
+        for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+            freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
+        }
+    }
+
+    return freqs;
+}
+
 bool BundleContext::isBandLevelIndexInRange(
         const std::vector<Equalizer::BandLevel>& bandLevels) const {
     const auto [min, max] =
@@ -583,6 +606,15 @@
     return limitLevel();
 }
 
+
+RetCode BundleContext::setForcedDevice(
+        const ::aidl::android::media::audio::common::AudioDeviceDescription& device) {
+    RETURN_VALUE_IF(true != isDeviceSupportedVirtualizer({device}), RetCode::ERROR_EFFECT_LIB_ERROR,
+                    " deviceNotSupportVirtualizer");
+    mForceDevice = device;
+    return RetCode::SUCCESS;
+}
+
 void BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
     /* General parameters */
     params.OperatingMode = LVM_MODE_ON;
@@ -668,6 +700,28 @@
     return HeadroomBandDef;
 }
 
+std::vector<Virtualizer::ChannelAngle> BundleContext::getSpeakerAngles(
+        const Virtualizer::SpeakerAnglesPayload payload) {
+    std::vector<Virtualizer::ChannelAngle> angles;
+    auto chCount = ::android::hardware::audio::common::getChannelCount(payload.layout);
+    RETURN_VALUE_IF(!isConfigSupportedVirtualizer(chCount, payload.device), angles,
+                    "payloadNotSupported");
+
+    if (chCount == 1) {
+        angles = {{.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_LEFT,
+                   .azimuthDegree = 0,
+                   .elevationDegree = 0}};
+    } else {
+        angles = {{.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_LEFT,
+                   .azimuthDegree = -90,
+                   .elevationDegree = 0},
+                  {.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_RIGHT,
+                   .azimuthDegree = 90,
+                   .elevationDegree = 0}};
+    }
+    return angles;
+}
+
 IEffect::Status BundleContext::lvmProcess(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 1f328fc..47d5e5a 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -62,6 +62,10 @@
     bool isDeviceSupportedVirtualizer(
             const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
                     devices);
+    bool isConfigSupportedVirtualizer(
+            size_t channelCount,
+            const aidl::android::media::audio::common::AudioDeviceDescription& device);
+
     RetCode setOutputDevice(
             const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices)
             override;
@@ -71,6 +75,8 @@
     RetCode setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels);
     std::vector<Equalizer::BandLevel> getEqualizerBandLevels() const;
 
+    std::vector<int32_t> getEqualizerCenterFreqs();
+
     RetCode setBassBoostStrength(int strength);
     int getBassBoostStrength() const { return mBassStrengthSaved; }
 
@@ -83,6 +89,14 @@
     RetCode setVirtualizerStrength(int strength);
     int getVirtualizerStrength() const { return mVirtStrengthSaved; }
 
+    RetCode setForcedDevice(
+            const ::aidl::android::media::audio::common::AudioDeviceDescription& device);
+    aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const {
+        return mForceDevice;
+    }
+    std::vector<Virtualizer::ChannelAngle> getSpeakerAngles(
+            const Virtualizer::SpeakerAnglesPayload payload);
+
     RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
     Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
 
@@ -125,6 +139,7 @@
     // Virtualizer
     int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
     bool mVirtualizerTempDisabled = false;
+    ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
     // Volume
     int mLevelSaved = 0; /* for when mute is set, level must be saved */
     int mVolume = 0;
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 81b8aca..fd9f3dc 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -218,10 +218,18 @@
                       EX_ILLEGAL_ARGUMENT, "setStrengthFailed");
             return ndk::ScopedAStatus::ok();
         }
-        default:
-            LOG(ERROR) << __func__ << " unsupported parameter " << specific.toString();
+        case Virtualizer::device: {
+            RETURN_IF(mContext->setForcedDevice(vr.get<Virtualizer::device>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Virtualizer::speakerAngles:
+            FALLTHROUGH_INTENDED;
+        case Virtualizer::vendor: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(vrTag);
             return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "vrTagNotSupported");
+                                                                    "VirtualizerTagNotSupported");
+        }
     }
 }
 
@@ -283,6 +291,10 @@
             eqParam.set<Equalizer::preset>(mContext->getEqualizerPreset());
             break;
         }
+        case Equalizer::centerFreqMh: {
+            eqParam.set<Equalizer::centerFreqMh>(mContext->getEqualizerCenterFreqs());
+            break;
+        }
         default: {
             LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
             return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
@@ -354,14 +366,27 @@
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
     Virtualizer vrParam;
 
+    if (id.getTag() == Virtualizer::Id::speakerAnglesPayload) {
+        auto angles = mContext->getSpeakerAngles(id.get<Virtualizer::Id::speakerAnglesPayload>());
+        Virtualizer param = Virtualizer::make<Virtualizer::speakerAngles>(angles);
+        specific->set<Parameter::Specific::virtualizer>(param);
+        return ndk::ScopedAStatus::ok();
+    }
+
     auto tag = id.get<Virtualizer::Id::commonTag>();
     switch (tag) {
         case Virtualizer::strengthPm: {
             vrParam.set<Virtualizer::strengthPm>(mContext->getVirtualizerStrength());
             break;
         }
-        default: {
-            LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
+        case Virtualizer::device: {
+            vrParam.set<Virtualizer::device>(mContext->getForcedDevice());
+            break;
+        }
+        case Virtualizer::speakerAngles:
+            FALLTHROUGH_INTENDED;
+        case Virtualizer::vendor: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
             return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
                                                                     "VirtualizerTagNotSupported");
         }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index cf4b849..ebbbb5f 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -863,6 +863,9 @@
             return NAME_NOT_FOUND;
         };
     }
+
+    // we want an empty metrics record for any early getMetrics() call
+    // this should be the *only* initMediametrics() call that's not on the Looper thread
     initMediametrics();
 }
 
@@ -871,8 +874,17 @@
     mResourceManagerProxy->removeClient();
 
     flushMediametrics();
+
+    // clean any saved metrics info we stored as part of configure()
+    if (mConfigureMsg != nullptr) {
+        mediametrics_handle_t metricsHandle;
+        if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+            mediametrics_delete(metricsHandle);
+        }
+    }
 }
 
+// except for in constructor, called from the looper thread (and therefore mutexed)
 void MediaCodec::initMediametrics() {
     if (mMetricsHandle == 0) {
         mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -902,11 +914,12 @@
 }
 
 void MediaCodec::updateMediametrics() {
-    ALOGV("MediaCodec::updateMediametrics");
     if (mMetricsHandle == 0) {
         return;
     }
 
+    Mutex::Autolock _lock(mMetricsLock);
+
     if (mLatencyHist.getCount() != 0 ) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1019,6 +1032,8 @@
 }
 
 
+// called to update info being passed back via getMetrics(), which is a
+// unique copy for that call, no concurrent access worries.
 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
     ALOGD("MediaCodec::updateEphemeralMediametrics()");
 
@@ -1058,7 +1073,13 @@
 }
 
 void MediaCodec::flushMediametrics() {
+    ALOGD("flushMediametrics");
+
+    // update does its own mutex locking
     updateMediametrics();
+
+    // ensure mutex while we do our own work
+    Mutex::Autolock _lock(mMetricsLock);
     if (mMetricsHandle != 0) {
         if (mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
@@ -1572,6 +1593,8 @@
     }
     msg->setString("name", name);
 
+    // initial naming setup covers the period before the first call to ::configure().
+    // after that, we manage this through ::configure() and the setup message.
     if (mMetricsHandle != 0) {
         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
         mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
@@ -1644,23 +1667,28 @@
         const sp<IDescrambler> &descrambler,
         uint32_t flags) {
     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+    mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
 
     // TODO: validity check log-session-id: it should be a 32-hex-digit.
     format->findString("log-session-id", &mLogSessionId);
 
-    if (mMetricsHandle != 0) {
+    if (nextMetricsHandle != 0) {
         int32_t profile = 0;
         if (format->findInt32("profile", &profile)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
+            mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
         }
         int32_t level = 0;
         if (format->findInt32("level", &level)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
+            mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
         }
-        mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
+        mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
                               (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
 
-        mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+        mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+
+        // moved here from ::init()
+        mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
+        mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
     }
 
     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
@@ -1670,38 +1698,38 @@
             mRotationDegrees = 0;
         }
 
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
-            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
-            mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
+        if (nextMetricsHandle != 0) {
+            mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
+            mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
             int32_t maxWidth = 0;
             if (format->findInt32("max-width", &maxWidth)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
+                mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
             }
             int32_t maxHeight = 0;
             if (format->findInt32("max-height", &maxHeight)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
+                mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
             }
             int32_t colorFormat = -1;
             if (format->findInt32("color-format", &colorFormat)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
+                mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
             }
             if (mDomain == DOMAIN_VIDEO) {
                 float frameRate = -1.0;
                 if (format->findFloat("frame-rate", &frameRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
                 }
                 float captureRate = -1.0;
                 if (format->findFloat("capture-rate", &captureRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
                 }
                 float operatingRate = -1.0;
                 if (format->findFloat("operating-rate", &operatingRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
                 }
                 int32_t priority = -1;
                 if (format->findInt32("priority", &priority)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+                    mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
                 }
             }
             int32_t colorStandard = -1;
@@ -1732,14 +1760,14 @@
         }
 
     } else {
-        if (mMetricsHandle != 0) {
+        if (nextMetricsHandle != 0) {
             int32_t channelCount;
             if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+                mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
             }
             int32_t sampleRate;
             if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+                mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
             }
         }
     }
@@ -1749,41 +1777,41 @@
                                                  enableMediaFormatShapingDefault);
         if (!enableShaping) {
             ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
-            if (mMetricsHandle != 0) {
-                mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
+            if (nextMetricsHandle != 0) {
+                mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
             }
         } else {
-            (void) shapeMediaFormat(format, flags);
+            (void) shapeMediaFormat(format, flags, nextMetricsHandle);
             // XXX: do we want to do this regardless of shaping enablement?
             mapFormat(mComponentName, format, nullptr, false);
         }
     }
 
     // push min/max QP to MediaMetrics after shaping
-    if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
+    if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
         int32_t qpIMin = -1;
         if (format->findInt32("video-qp-i-min", &qpIMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
         }
         int32_t qpIMax = -1;
         if (format->findInt32("video-qp-i-max", &qpIMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
         }
         int32_t qpPMin = -1;
         if (format->findInt32("video-qp-p-min", &qpPMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
         }
         int32_t qpPMax = -1;
         if (format->findInt32("video-qp-p-max", &qpPMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
         }
         int32_t qpBMin = -1;
         if (format->findInt32("video-qp-b-min", &qpBMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
         }
         int32_t qpBMax = -1;
         if (format->findInt32("video-qp-b-max", &qpBMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
         }
     }
 
@@ -1799,13 +1827,23 @@
         } else {
             msg->setPointer("descrambler", descrambler.get());
         }
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
+        if (nextMetricsHandle != 0) {
+            mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
         ALOGW("Crypto or descrambler should be given for secure codec");
     }
 
+    if (mConfigureMsg != nullptr) {
+        // if re-configuring, we have one of these from before.
+        // Recover the space before we discard the old mConfigureMsg
+        mediametrics_handle_t metricsHandle;
+        if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+            mediametrics_delete(metricsHandle);
+        }
+    }
+    msg->setInt64("metrics", nextMetricsHandle);
+
     // save msg for reset
     mConfigureMsg = msg;
 
@@ -2132,7 +2170,8 @@
 
 status_t MediaCodec::shapeMediaFormat(
             const sp<AMessage> &format,
-            uint32_t flags) {
+            uint32_t flags,
+            mediametrics_handle_t metricsHandle) {
     ALOGV("shapeMediaFormat entry");
 
     if (!(flags & CONFIGURE_FLAG_ENCODE)) {
@@ -2188,39 +2227,39 @@
         sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
         size_t changeCount = deltas->countEntries();
         ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
+        if (metricsHandle != 0) {
+            mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
         }
         if (changeCount > 0) {
-            if (mMetricsHandle != 0) {
+            if (metricsHandle != 0) {
                 // save some old properties before we fold in the new ones
                 int32_t bitrate;
                 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
                 }
                 int32_t qpIMin = -1;
                 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
                 }
                 int32_t qpIMax = -1;
                 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
                 }
                 int32_t qpPMin = -1;
                 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
                 }
                 int32_t qpPMax = -1;
                 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
                 }
                  int32_t qpBMin = -1;
                 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
                 }
                 int32_t qpBMax = -1;
                 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
                 }
             }
             // NB: for any field in both format and deltas, the deltas copy wins
@@ -2806,26 +2845,44 @@
     return OK;
 }
 
+// this is the user-callable entry point
 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
 
     reply = 0;
 
-    // shouldn't happen, but be safe
-    if (mMetricsHandle == 0) {
-        return UNKNOWN_ERROR;
+    sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
+    sp<AMessage> response;
+    status_t err;
+    if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+        return err;
     }
 
-    // update any in-flight data that's not carried within the record
-    updateMediametrics();
-
-    // send it back to the caller.
-    reply = mediametrics_dup(mMetricsHandle);
-
-    updateEphemeralMediametrics(reply);
+    CHECK(response->findInt64("metrics", &reply));
 
     return OK;
 }
 
+// runs on the looper thread (for mutex purposes)
+void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
+
+    mediametrics_handle_t results = 0;
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+
+    if (mMetricsHandle != 0) {
+        updateMediametrics();
+        results = mediametrics_dup(mMetricsHandle);
+        updateEphemeralMediametrics(results);
+    } else {
+        results = mediametrics_dup(mMetricsHandle);
+    }
+
+    sp<AMessage> response = new AMessage;
+    response->setInt64("metrics", results);
+    response->postReply(replyID);
+}
+
 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
     msg->setInt32("portIndex", kPortIndexInput);
@@ -3887,6 +3944,13 @@
             break;
         }
 
+        case kWhatGetMetrics:
+        {
+            onGetMetrics(msg);
+            break;
+        }
+
+
         case kWhatConfigure:
         {
             if (mState != INITIALIZED) {
@@ -3907,6 +3971,18 @@
             sp<AMessage> format;
             CHECK(msg->findMessage("format", &format));
 
+            // start with a copy of the passed metrics info for use in this run
+            mediametrics_handle_t handle;
+            CHECK(msg->findInt64("metrics", &handle));
+            if (handle != 0) {
+                if (mMetricsHandle != 0) {
+                    flushMediametrics();
+                }
+                mMetricsHandle = mediametrics_dup(handle);
+                // and set some additional metrics values
+                initMediametrics();
+            }
+
             int32_t push;
             if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
                 mFlags |= kFlagPushBlankBuffersOnShutdown;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 6f6a4e6..703f7ad 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -357,6 +357,7 @@
         kWhatSetNotification                = 'setN',
         kWhatDrmReleaseCrypto               = 'rDrm',
         kWhatCheckBatteryStats              = 'chkB',
+        kWhatGetMetrics                     = 'getM',
     };
 
     enum {
@@ -427,6 +428,7 @@
     sp<Surface> mSurface;
     SoftwareRenderer *mSoftRenderer;
 
+    Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
@@ -434,6 +436,7 @@
     void flushMediametrics();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    void onGetMetrics(const sp<AMessage>& msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
     void updatePlaybackDuration(const sp<AMessage> &msg);
@@ -472,7 +475,8 @@
     // the (possibly) updated format is returned in place.
     status_t shapeMediaFormat(
             const sp<AMessage> &format,
-            uint32_t flags);
+            uint32_t flags,
+            mediametrics_handle_t handle);
 
     // populate the format shaper library with information for this codec encoding
     // for the indicated media type
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index ddf797c..88f7be7 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -332,6 +332,11 @@
 }
 
 bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        ALOGE("b/230630526 buffer->size() == 0");
+        android_errorWriteLog(0x534e4554, "230630526");
+        return false;
+    }
     const uint8_t *data = buffer->data();
     unsigned nalType = data[0] & 0x1f;
     if (!mFirstIFrameProvided && nalType < 0x5) {
@@ -624,8 +629,7 @@
     int32_t firstSeqNo = buffer->int32Data();
 
     // This only works for FU-A type & non-start sequence
-    int32_t nalType = buffer->size() >= 1 ? buffer->data()[0] & 0x1f : -1;
-    if (nalType != 28 || (buffer->size() >= 2 && buffer->data()[1] & 0x80)) {
+    if (buffer->size() < 2 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[1] & 0x80) {
         return firstSeqNo;
     }
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index bb42d1f..72dd981 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -629,13 +629,13 @@
 
 int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
         uint32_t first, int64_t play, int64_t jit) {
+    CHECK(!queue->empty());
     // pick the first sequence number has the start bit.
     sp<ABuffer> buffer = *(queue->begin());
     int32_t firstSeqNo = buffer->int32Data();
 
     // This only works for FU-A type & non-start sequence
-    unsigned nalType = buffer->data()[0] & 0x1f;
-    if (nalType != 28 || buffer->data()[2] & 0x80) {
+    if (buffer->size() < 3 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[2] & 0x80) {
         return firstSeqNo;
     }
 
@@ -645,7 +645,7 @@
         if (rtpTime + jit >= play) {
             break;
         }
-        if ((data[2] & 0x80)) {
+        if (it->size() >= 3 && (data[2] & 0x80)) {
             const int32_t seqNo = it->int32Data();
             ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
             firstSeqNo = seqNo;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 07f4529..83b84e3 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -103,11 +103,10 @@
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
     myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
-    if (callerAttributionSource.token != nullptr) {
-        myAttributionSource.token = callerAttributionSource.token;
-    } else {
-        myAttributionSource.token = sp<BBinder>::make();
-    }
+    // Create a static token for audioserver requests, which identifies the
+    // audioserver to the app ops system
+    static sp<BBinder> appOpsToken = sp<BBinder>::make();
+    myAttributionSource.token = appOpsToken;
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e4c4107..f1f8a8f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -288,7 +288,6 @@
                 return opPackageLegacy == package; }) == packages.end()) {
             ALOGW("The package name(%s) provided does not correspond to the uid %d",
                     attributionSource.packageName.value_or("").c_str(), attributionSource.uid);
-            checkedAttributionSource.packageName = std::optional<std::string>();
         }
     }
     return checkedAttributionSource;
@@ -588,6 +587,33 @@
     audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     audio_attributes_t localAttr = *attr;
+
+    // TODO b/182392553: refactor or make clearer
+    pid_t clientPid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+    bool updatePid = (clientPid == (pid_t)-1);
+    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+
+    AttributionSourceState adjAttributionSource = client.attributionSource;
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        uid_t clientUid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        updatePid = true;
+    }
+    if (updatePid) {
+        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                 "%s uid %d pid %d tried to pass itself off as pid %d",
+                 __func__, callingUid, callingPid, clientPid);
+        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+    }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
+
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
         fullConfig.sample_rate = config->sample_rate;
@@ -597,7 +623,7 @@
         bool isSpatialized;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
-                                            &streamType, client.attributionSource,
+                                            &streamType, adjAttributionSource,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
@@ -608,7 +634,7 @@
         ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
                                               actualSessionId,
-                                              client.attributionSource,
+                                              adjAttributionSource,
                                               config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
     }
@@ -1057,7 +1083,7 @@
     audio_attributes_t localAttr = input.attr;
 
     AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         ALOGW_IF(clientUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, clientUid);
@@ -1073,6 +1099,8 @@
         clientPid = callingPid;
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -2317,7 +2345,7 @@
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
            adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         ALOGW_IF(currentUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, currentUid);
@@ -2333,7 +2361,8 @@
                  __func__, callingUid, callingPid, currentPid);
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
-
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
     // we don't yet support anything other than linear PCM
     if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -3967,7 +3996,7 @@
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
     pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW_IF(currentPid != -1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
@@ -3975,6 +4004,7 @@
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
         currentPid = callingPid;
     }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(adjAttributionSource);
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 9837574..355f411 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8369,8 +8369,6 @@
     audio_input_flags_t inputFlags = mInput->flags;
     audio_input_flags_t requestedFlags = *flags;
     uint32_t sampleRate;
-    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
-            attributionSource);
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
@@ -8385,7 +8383,7 @@
     }
 
     if (maxSharedAudioHistoryMs != 0) {
-        if (!captureHotwordAllowed(checkedAttributionSource)) {
+        if (!captureHotwordAllowed(attributionSource)) {
             lStatus = PERMISSION_DENIED;
             goto Exit;
         }
@@ -8506,16 +8504,16 @@
         Mutex::Autolock _l(mLock);
         int32_t startFrames = -1;
         if (!mSharedAudioPackageName.empty()
-                && mSharedAudioPackageName == checkedAttributionSource.packageName
+                && mSharedAudioPackageName == attributionSource.packageName
                 && mSharedAudioSessionId == sessionId
-                && captureHotwordAllowed(checkedAttributionSource)) {
+                && captureHotwordAllowed(attributionSource)) {
             startFrames = mSharedAudioStartFrames;
         }
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
                       nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
-                      checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+                      attributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
                       startFrames);
 
         lStatus = track->initCheck();
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 02c4aaf..58f99e8 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -531,10 +531,7 @@
             id, attr.flags);
         return nullptr;
     }
-
-    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
-            attributionSource);
-    return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
+    return new OpPlayAudioMonitor(attributionSource, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 4212c1c..fdf5787 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -352,31 +352,20 @@
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
 
-    // TODO b/182392553: refactor or remove
-    AttributionSourceState adjAttributionSource = attributionSource;
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (!isAudioServerOrMediaServerUid(callingUid) || attributionSource.uid == -1) {
-        int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
-            legacy2aidl_uid_t_int32_t(callingUid));
-        ALOGW_IF(attributionSource.uid != -1 && attributionSource.uid != callingUidAidl,
-                "%s uid %d tried to pass itself off as %d", __func__,
-                callingUidAidl, attributionSource.uid);
-        adjAttributionSource.uid = callingUidAidl;
-    }
     if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
-        aidl2legacy_int32_t_uid_t(adjAttributionSource.uid)))) {
+        aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(adjAttributionSource)) {
+            && !bypassInterruptionPolicyAllowed(attributionSource)) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
 
     if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+        if (!accessUltrasoundAllowed(attributionSource)) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
-                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+                    __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
         }
     }
@@ -386,7 +375,7 @@
     bool isSpatialized = false;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
-                                                            adjAttributionSource,
+                                                            attributionSource,
                                                             &config,
                                                             &flags, &selectedDeviceId, &portId,
                                                             &secondaryOutputs,
@@ -401,20 +390,20 @@
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
             if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
-                && !callAudioInterceptionAllowed(adjAttributionSource)) {
+                && !callAudioInterceptionAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: call redirection not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
-            } else if (!modifyPhoneStateAllowed(adjAttributionSource)) {
+            } else if (!modifyPhoneStateAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
+            if (!modifyAudioRoutingAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
@@ -427,7 +416,7 @@
 
     if (result == NO_ERROR) {
         sp<AudioPlaybackClient> client =
-                new AudioPlaybackClient(attr, output, adjAttributionSource, session,
+                new AudioPlaybackClient(attr, output, attributionSource, session,
                     portId, selectedDeviceId, stream, isSpatialized);
         mAudioPlaybackClients.add(portId, client);
 
@@ -613,33 +602,8 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    // Make sure attribution source represents the current caller
-    AttributionSourceState adjAttributionSource = attributionSource;
-    // TODO b/182392553: refactor or remove
-    bool updatePid = (attributionSource.pid == -1);
-    const uid_t callingUid =IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(
-            attributionSource.uid));
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
-                currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(
-                callingUid));
-        updatePid = true;
-    }
-
-    if (updatePid) {
-        const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
-            IPCThreadState::self()->getCallingPid()));
-        ALOGW_IF(attributionSource.pid != -1 && attributionSource.pid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, adjAttributionSource.uid, callingPid, attributionSource.pid);
-        adjAttributionSource.pid = callingPid;
-    }
-
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
-            adjAttributionSource)));
+            attributionSource)));
 
     // check calling permissions.
     // Capturing from the following sources does not require permission RECORD_AUDIO
@@ -650,17 +614,17 @@
     // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
     // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
     // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
-    if (!(recordingAllowed(adjAttributionSource, inputSource)
+    if (!(recordingAllowed(attributionSource, inputSource)
             || inputSource == AUDIO_SOURCE_FM_TUNER
             || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
             || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
         ALOGE("%s permission denied: recording not allowed for %s",
-                __func__, adjAttributionSource.toString().c_str());
+                __func__, attributionSource.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
-    bool canInterceptCallAudio = callAudioInterceptionAllowed(adjAttributionSource);
+    bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
+    bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
     bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
              || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
              || inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -674,11 +638,11 @@
     }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
         && !canCaptureOutput
-        && !captureTunerAudioInputAllowed(adjAttributionSource)) {
+        && !captureTunerAudioInputAllowed(attributionSource)) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(adjAttributionSource);
+    bool canCaptureHotword = captureHotwordAllowed(attributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -686,14 +650,14 @@
     if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
             && !canCaptureHotword) {
         ALOGE("%s: permission denied: hotword mode not allowed"
-              " for uid %d pid %d", __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+              " for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+        if (!accessUltrasoundAllowed(attributionSource)) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
-                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+                    __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
         }
     }
@@ -708,7 +672,7 @@
             AutoCallerClear acc;
             // the audio_in_acoustics_t parameter is ignored by get_input()
             status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
-                                                          adjAttributionSource, &config,
+                                                          attributionSource, &config,
                                                           flags, &selectedDeviceId,
                                                           &inputType, &portId);
 
@@ -737,7 +701,7 @@
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!(modifyAudioRoutingAllowed(adjAttributionSource)
+                if (!(modifyAudioRoutingAllowed(attributionSource)
                         || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
                             && canInterceptCallAudio))) {
                     ALOGE("%s permission denied for remote submix capture", __func__);
@@ -760,7 +724,7 @@
         }
 
         sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
-                                                             selectedDeviceId, adjAttributionSource,
+                                                             selectedDeviceId, attributionSource,
                                                              canCaptureOutput, canCaptureHotword,
                                                              mOutputCommandThread);
         mAudioRecordClients.add(portId, client);