Merge "audio: optimize variable assignment method to reduce resource consumption" into main
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
new file mode 100644
index 0000000..82f3d85
--- /dev/null
+++ b/media/audio/aconfig/Android.bp
@@ -0,0 +1,37 @@
+// media_audio namespace flags
+
+cc_defaults {
+    name: "audio-aconfig-cc-defaults",
+    host_supported: true,
+}
+
+// Framework available flags to follow
+// Care must be taken to avoid namespace conflicts.
+// These flags are accessible outside of the platform! Limit usage to @FlaggedApi wherever possible
+
+aconfig_declarations {
+    name: "android.media.audio-aconfig",
+    package: "android.media.audio",
+    container: "system",
+    srcs: ["audio_framework.aconfig"],
+    visibility: ["//frameworks/base/api"],
+}
+
+java_aconfig_library {
+    name: "android.media.audio-aconfig-java",
+    aconfig_declarations: "android.media.audio-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+cc_aconfig_library {
+    name: "android.media.audio-aconfig-cc",
+    aconfig_declarations: "android.media.audio-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
+aconfig_declarations_group {
+    name: "audio-framework-aconfig",
+    java_aconfig_libraries: [
+        "android.media.audio-aconfig-java",
+    ],
+}
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
new file mode 100644
index 0000000..2cafe58
--- /dev/null
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -0,0 +1,15 @@
+# Top level framework (android.media) flags
+# Only add flags here which must be included in framework.jar
+#
+# Please add flags in alphabetical order.
+
+package: "android.media.audio"
+
+flag {
+    name: "sco_managed_by_audio"
+    namespace: "media_audio"
+    description: "\
+Enable new implementation of headset profile device connection and\
+SCO audio activation."
+    bug: "265057196"
+}
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 3a71520..bcd5c37 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -96,18 +96,23 @@
     return (C2Component::kind_t)(mKind.value);
 }
 
-void MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
-        uint32_t &sampleRate_, uint32_t &channelCount_) const {
+bool MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+        uint32_t * const sampleRate_, uint32_t * const channelCount_) const {
+    if (sampleRate_ == nullptr || channelCount_ == nullptr) {
+        return false;
+    }
     if (mC2ComponentIntf) {
         C2StreamSampleRateInfo::output sampleRate;
         C2StreamChannelCountInfo::output channelCount;
         c2_status_t res = mC2ComponentIntf->query_vb(
                 {&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
-        if (res == C2_OK) {
-            sampleRate_ = sampleRate.value;
-            channelCount_ = channelCount.value;
+        if (res == C2_OK && sampleRate.value > 0 && channelCount.value > 0) {
+            *sampleRate_ = sampleRate.value;
+            *channelCount_ = channelCount.value;
+            return true;
         }
     }
+    return false;
 }
 
 //C2MultiAccessUnitBuffer
@@ -164,6 +169,7 @@
         std::list<std::unique_ptr<C2Work>> * const worklist) {
     if (worklist == nullptr) {
         LOG(ERROR) << "Provided null worklist for error()";
+        mFrameHolder.clear();
         return C2_OK;
     }
     std::unique_lock<std::mutex> l(mLock);
@@ -272,6 +278,7 @@
                 LOG(ERROR) << "ERROR: Work has Large frame info but has no linear blocks.";
                 return C2_CORRUPTED;
             }
+            frameInfo.mInputC2Ref = inBuffers;
             const std::vector<C2ConstLinearBlock>& multiAU =
                     inBuffers.front()->data().linearBlocks();
             std::shared_ptr<const C2AccessUnitInfos::input> auInfo =
@@ -320,26 +327,10 @@
             }
         }
         if (!processedWork->empty()) {
-            {
-                C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
-                if (mInterface->kind() == C2Component::KIND_DECODER) {
-                    uint32_t sampleRate = 0;
-                    uint32_t channelCount = 0;
-                    uint32_t frameSize = 0;
-                    mInterface->getDecoderSampleRateAndChannelCount(
-                            sampleRate, channelCount);
-                    if (sampleRate > 0 && channelCount > 0) {
-                        frameSize = channelCount * 2;
-                        multiAccessParams.maxSize =
-                                (multiAccessParams.maxSize / frameSize) * frameSize;
-                        multiAccessParams.thresholdSize =
-                                (multiAccessParams.thresholdSize / frameSize) * frameSize;
-                    }
-                }
-                frameInfo.mLargeFrameTuning = multiAccessParams;
-                std::lock_guard<std::mutex> l(mLock);
-                mFrameHolder.push_back(std::move(frameInfo));
-            }
+            C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+            frameInfo.mLargeFrameTuning = multiAccessParams;
+            std::lock_guard<std::mutex> l(mLock);
+            mFrameHolder.push_back(std::move(frameInfo));
         }
     }
     return C2_OK;
@@ -506,6 +497,20 @@
             frame.reset();
             return C2_OK;
         }
+        int64_t sampleTimeUs = 0;
+        uint32_t frameSize = 0;
+        uint32_t sampleRate = 0;
+        uint32_t channelCount = 0;
+        if (mInterface->getDecoderSampleRateAndChannelCount(&sampleRate, &channelCount)) {
+            sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+            frameSize = channelCount * 2;
+            if (mInterface->kind() == C2Component::KIND_DECODER) {
+                frame.mLargeFrameTuning.maxSize =
+                        (frame.mLargeFrameTuning.maxSize / frameSize) * frameSize;
+                frame.mLargeFrameTuning.thresholdSize =
+                        (frame.mLargeFrameTuning.thresholdSize / frameSize) * frameSize;
+            }
+        }
         c2_status_t c2ret = allocateWork(frame, true);
         if (c2ret != C2_OK) {
             return c2ret;
@@ -520,15 +525,7 @@
         outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
                 (*worklet)->output.infoBuffers.begin(),
                 (*worklet)->output.infoBuffers.end());
-        int64_t sampleTimeUs = 0;
-        uint32_t frameSize = 0;
-        uint32_t sampleRate = 0;
-        uint32_t channelCount = 0;
-        mInterface->getDecoderSampleRateAndChannelCount(sampleRate, channelCount);
-        if (sampleRate > 0 && channelCount > 0) {
-            sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
-            frameSize = channelCount * 2;
-        }
+
         LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
                 << " threshold " << frame.mLargeFrameTuning.thresholdSize;
         if ((*worklet)->output.buffers.size() > 0) {
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index a90ae56..e321111 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -44,8 +44,8 @@
     bool isValidField(const C2ParamField &field) const;
 
 protected:
-    void getDecoderSampleRateAndChannelCount(
-            uint32_t &sampleRate_, uint32_t &channelCount_) const;
+    bool getDecoderSampleRateAndChannelCount(
+            uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
     const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
     std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
     C2ComponentKindSetting mKind;
@@ -153,6 +153,11 @@
          */
         std::unique_ptr<C2Work> mLargeWork;
 
+        /*
+         * For holding a reference to the incoming buffer
+         */
+        std::vector<std::shared_ptr<C2Buffer>> mInputC2Ref;
+
         MultiAccessUnitInfo(C2WorkOrdinalStruct ordinal):inOrdinal(ordinal) {
 
         }
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 18c2468..4de2347 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -45,6 +45,7 @@
 
     static_libs: [
         "libSurfaceFlingerProperties",
+        "aconfig_mediacodec_flags_c_lib",
         "android.media.codec-aconfig-cc",
     ],
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 40656ff..f58dc65 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -552,8 +552,7 @@
     }
 
     ssize_t result = -1;
-    ssize_t codecDataOffset = 0;
-    size_t inBufferOffset = 0;
+    size_t srcOffset = offset;
     size_t outBufferSize = 0;
     uint32_t cryptoInfoIdx = 0;
     int32_t heapSeqNum = getHeapSeqNum(memory);
@@ -565,18 +564,20 @@
     for (int i = 0; i < bufferInfos->value.size(); i++) {
         if (bufferInfos->value[i].mSize > 0) {
             std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+            src.offset = srcOffset;
+            src.size = bufferInfos->value[i].mSize;
             result = mCrypto->decrypt(
                     (uint8_t*)info->mKey,
                     (uint8_t*)info->mIv,
                     info->mMode,
                     info->mPattern,
                     src,
-                    inBufferOffset,
+                    0,
                     info->mSubSamples,
                     info->mNumSubSamples,
                     dst,
                     errorDetailMsg);
-            inBufferOffset += bufferInfos->value[i].mSize;
+            srcOffset += bufferInfos->value[i].mSize;
             if (result < 0) {
                 ALOGI("[%s] attachEncryptedBuffers: decrypt failed: result = %zd",
                         mName, result);
@@ -599,7 +600,7 @@
         wView.setOffset(0);
     }
     std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
-            block->share(codecDataOffset, outBufferSize - codecDataOffset, C2Fence{}))};
+            block->share(0, outBufferSize, C2Fence{}))};
     if (!buffer->copy(c2Buffer)) {
         ALOGI("[%s] attachEncryptedBuffers: buffer copy failed", mName);
         return -ENOSYS;
@@ -980,8 +981,7 @@
     }
     // size of cryptoInfo and accessUnitInfo should be the same?
     ssize_t result = -1;
-    ssize_t codecDataOffset = 0;
-    size_t inBufferOffset = 0;
+    size_t srcOffset = 0;
     size_t outBufferSize = 0;
     uint32_t cryptoInfoIdx = 0;
     {
@@ -994,6 +994,7 @@
         encryptedBuffer->getMappedBlock(&mappedBlock);
         hardware::drm::V1_0::SharedBuffer source;
         encryptedBuffer->fillSourceBuffer(&source);
+        srcOffset = source.offset;
         for (int i = 0 ; i < bufferInfos->value.size(); i++) {
             if (bufferInfos->value[i].mSize > 0) {
                 std::unique_ptr<CodecCryptoInfo> info =
@@ -1004,18 +1005,20 @@
                     // no data so we only populate the bufferInfo
                     result = 0;
                 } else {
+                    source.offset = srcOffset;
+                    source.size = bufferInfos->value[i].mSize;
                     result = mCrypto->decrypt(
                             (uint8_t*)info->mKey,
                             (uint8_t*)info->mIv,
                             info->mMode,
                             info->mPattern,
                             source,
-                            inBufferOffset,
+                            buffer->offset(),
                             info->mSubSamples,
                             info->mNumSubSamples,
                             destination,
                             errorDetailMsg);
-                    inBufferOffset += bufferInfos->value[i].mSize;
+                    srcOffset += bufferInfos->value[i].mSize;
                     if (result < 0) {
                         ALOGI("[%s] decrypt failed: result=%zd", mName, result);
                         return result;
@@ -1028,7 +1031,7 @@
                 }
             }
         }
-        buffer->setRange(codecDataOffset, outBufferSize - codecDataOffset);
+        buffer->setRange(0, outBufferSize);
     }
     return queueInputBufferInternal(buffer, block, bufferSize);
 }
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 8dce789..37a7a4f 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,7 @@
 
 #include <strings.h>
 
+#include <com_android_media_codec_flags.h>
 #include <android_media_codec.h>
 
 #include <C2Component.h>
@@ -755,7 +756,8 @@
                 addSupportedColorFormats(
                         intf, caps.get(), trait, mediaType, it->second);
 
-                if (android::media::codec::provider_->large_audio_frame_finish()) {
+                if (com::android::media::codec::flags::provider_->large_audio_frame()
+                        && android::media::codec::provider_->large_audio_frame_finish()) {
                     // Adding feature-multiple-frames when C2LargeFrame param is present
                     if (trait.domain == C2Component::DOMAIN_AUDIO) {
                         std::vector<std::shared_ptr<C2ParamDescriptor>> params;
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index fa651a6..9b1bac6 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -59,7 +59,8 @@
 const std::string VisualizerImpl::kEffectName = "Visualizer";
 const std::vector<Range::VisualizerRange> VisualizerImpl::kRanges = {
         MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerContext::kMaxLatencyMs),
-        MAKE_RANGE(Visualizer, captureSamples, 0, VisualizerContext::kMaxCaptureBufSize),
+        MAKE_RANGE(Visualizer, captureSamples, VisualizerContext::kMinCaptureBufSize,
+                   VisualizerContext::kMaxCaptureBufSize),
         /* get only parameters, set invalid range (min > max) to indicate not support set */
         MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.rms = 1, .peak = 1}),
                    Visualizer::Measurement({.rms = 0, .peak = 0})),
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d2bb3a..c763b1a 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -93,7 +93,7 @@
     mCaptureSamples = samples;
     return RetCode::SUCCESS;
 }
-int VisualizerContext::getCaptureSamples() {
+int32_t VisualizerContext::getCaptureSamples() {
     std::lock_guard lg(mMutex);
     return mCaptureSamples;
 }
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 958035f..b03e038 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -18,6 +18,7 @@
 
 #include <android-base/thread_annotations.h>
 #include <audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_visualizer.h>
 
 #include "effect-impl/EffectContext.h"
 
@@ -25,8 +26,11 @@
 
 class VisualizerContext final : public EffectContext {
   public:
-    static const uint32_t kMaxCaptureBufSize = 65536;
-    static const uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
+    // need align the min/max capture size to VISUALIZER_CAPTURE_SIZE_MIN and
+    // VISUALIZER_CAPTURE_SIZE_MAX because of limitation in audio_utils fixedfft.
+    static constexpr int32_t kMinCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MIN;
+    static constexpr int32_t kMaxCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MAX;
+    static constexpr uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
 
     VisualizerContext(int statusDepth, const Parameter::Common& common);
     ~VisualizerContext();
@@ -38,8 +42,8 @@
     // keep all parameters and reset buffer.
     void reset();
 
-    RetCode setCaptureSamples(int captureSize);
-    int getCaptureSamples();
+    RetCode setCaptureSamples(int32_t captureSize);
+    int32_t getCaptureSamples();
     RetCode setMeasurementMode(Visualizer::MeasurementMode mode);
     Visualizer::MeasurementMode getMeasurementMode();
     RetCode setScalingMode(Visualizer::ScalingMode mode);
@@ -86,7 +90,7 @@
     // capture buf with 8 bits mono PCM samples
     std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
     uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
-    uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+    int32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
 
     // to avoid recomputing it every time a buffer is processed
     uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 305d42f..aab8e66 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3397,9 +3397,6 @@
     if (bufferInfos == nullptr || bufferInfos->value.empty()) {
         return BAD_VALUE;
     }
-    if (cryptoInfos == nullptr || cryptoInfos->value.empty()) {
-        return BAD_VALUE;
-    }
     status_t err = OK;
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
     msg->setSize("index", index);
@@ -3407,8 +3404,12 @@
         new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
     msg->setObject("memory", memory);
     msg->setSize("offset", offset);
-    msg->setSize("ssize", size);
-    msg->setObject("cryptoInfos", cryptoInfos);
+    if (cryptoInfos != nullptr) {
+        msg->setSize("ssize", size);
+        msg->setObject("cryptoInfos", cryptoInfos);
+    } else {
+        msg->setSize("size", size);
+    }
     msg->setObject("accessUnitInfo", bufferInfos);
     if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
         return err;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c82a303..72045af 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -611,6 +611,10 @@
             // ACodec is waiting for all buffers to be returned, do NOT
             // submit any more buffers to the codec.
             bufferSource->onOmxIdle();
+        } else if (param == OMX_StateExecuting) {
+            // Initiating transition from Idle -> Executing
+            // Start submitting buffers to codec.
+            bufferSource->onOmxExecuting();
         } else if (param == OMX_StateLoaded) {
             // Initiating transition from Idle/Executing -> Loaded
             // Buffers are about to be freed.
@@ -2367,13 +2371,6 @@
             asString(event), event, arg1String, arg1, arg2String, arg2);
     const sp<IOMXBufferSource> bufferSource(getBufferSource());
 
-    if (bufferSource != NULL
-            && event == OMX_EventCmdComplete
-            && arg1 == OMX_CommandStateSet
-            && arg2 == OMX_StateExecuting) {
-        bufferSource->onOmxExecuting();
-    }
-
     // allow configuration if we return to the loaded state
     if (event == OMX_EventCmdComplete
             && arg1 == OMX_CommandStateSet
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 4eed4b5..294a8e9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1037,7 +1037,7 @@
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // TODO: does this need to be adjusted for long exposure requests?
-        static const nsecs_t kRequestSubmitTimeout = 200e6; // 200 ms
+        static const nsecs_t kRequestSubmitTimeout = 500e6; // 500 ms
 
         // Used to prepare a batch of requests.
         struct NextRequest {