| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright (C) 2022 The Android Open Source Project | 
|  | 3 | * | 
|  | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 5 | * you may not use this file except in compliance with the License. | 
|  | 6 | * You may obtain a copy of the License at | 
|  | 7 | * | 
|  | 8 | *      http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 9 | * | 
|  | 10 | * Unless required by applicable law or agreed to in writing, software | 
|  | 11 | * distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 13 | * See the License for the specific language governing permissions and | 
|  | 14 | * limitations under the License. | 
|  | 15 | */ | 
|  | 16 |  | 
|  | 17 | #include "VisualizerContext.h" | 
|  | 18 |  | 
|  | 19 | #include <algorithm> | 
| Mikhail Naganov | 6352e82 | 2023-03-09 18:22:36 -0800 | [diff] [blame] | 20 | #include <math.h> | 
|  | 21 | #include <time.h> | 
|  | 22 |  | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 23 | #include <android/binder_status.h> | 
|  | 24 | #include <audio_utils/primitives.h> | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 25 | #include <system/audio.h> | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 26 | #include <Utils.h> | 
|  | 27 |  | 
|  | 28 | #ifndef BUILD_FLOAT | 
|  | 29 | #error AIDL Visualizer only support float 32bits, make sure add cflags -DBUILD_FLOAT, | 
|  | 30 | #endif | 
|  | 31 |  | 
| Mikhail Naganov | 6352e82 | 2023-03-09 18:22:36 -0800 | [diff] [blame] | 32 | using aidl::android::hardware::audio::common::getChannelCount; | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 33 |  | 
|  | 34 | namespace aidl::android::hardware::audio::effect { | 
|  | 35 |  | 
|  | 36 | VisualizerContext::VisualizerContext(int statusDepth, const Parameter::Common& common) | 
|  | 37 | : EffectContext(statusDepth, common) { | 
|  | 38 | } | 
|  | 39 |  | 
|  | 40 | VisualizerContext::~VisualizerContext() { | 
|  | 41 | std::lock_guard lg(mMutex); | 
|  | 42 | LOG(DEBUG) << __func__; | 
|  | 43 | mState = State::UNINITIALIZED; | 
|  | 44 | } | 
|  | 45 |  | 
|  | 46 | RetCode VisualizerContext::initParams(const Parameter::Common& common) { | 
|  | 47 | std::lock_guard lg(mMutex); | 
|  | 48 | LOG(DEBUG) << __func__; | 
|  | 49 | if (common.input != common.output) { | 
|  | 50 | LOG(ERROR) << __func__ << " mismatch input: " << common.input.toString() | 
|  | 51 | << " and output: " << common.output.toString(); | 
|  | 52 | return RetCode::ERROR_ILLEGAL_PARAMETER; | 
|  | 53 | } | 
|  | 54 |  | 
|  | 55 | mState = State::INITIALIZED; | 
|  | 56 | auto channelCount = getChannelCount(common.input.base.channelMask); | 
|  | 57 | #ifdef SUPPORT_MC | 
|  | 58 | if (channelCount < 1 || channelCount > FCC_LIMIT) return RetCode::ERROR_ILLEGAL_PARAMETER; | 
|  | 59 | #else | 
|  | 60 | if (channelCount != FCC_2) return RetCode::ERROR_ILLEGAL_PARAMETER; | 
|  | 61 | #endif | 
|  | 62 | mChannelCount = channelCount; | 
|  | 63 | mCommon = common; | 
|  | 64 | return RetCode::SUCCESS; | 
|  | 65 | } | 
|  | 66 |  | 
|  | 67 | RetCode VisualizerContext::enable() { | 
|  | 68 | std::lock_guard lg(mMutex); | 
|  | 69 | if (mState != State::INITIALIZED) { | 
|  | 70 | return RetCode::ERROR_EFFECT_LIB_ERROR; | 
|  | 71 | } | 
|  | 72 | mState = State::ACTIVE; | 
|  | 73 | return RetCode::SUCCESS; | 
|  | 74 | } | 
|  | 75 |  | 
|  | 76 | RetCode VisualizerContext::disable() { | 
|  | 77 | std::lock_guard lg(mMutex); | 
|  | 78 | if (mState != State::ACTIVE) { | 
|  | 79 | return RetCode::ERROR_EFFECT_LIB_ERROR; | 
|  | 80 | } | 
|  | 81 | mState = State::INITIALIZED; | 
|  | 82 | return RetCode::SUCCESS; | 
|  | 83 | } | 
|  | 84 |  | 
|  | 85 | void VisualizerContext::reset() { | 
|  | 86 | std::lock_guard lg(mMutex); | 
|  | 87 | std::fill_n(mCaptureBuf.begin(), kMaxCaptureBufSize, 0x80); | 
|  | 88 | } | 
|  | 89 |  | 
|  | 90 | RetCode VisualizerContext::setCaptureSamples(int samples) { | 
|  | 91 | std::lock_guard lg(mMutex); | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 92 | mCaptureSamples = samples; | 
|  | 93 | return RetCode::SUCCESS; | 
|  | 94 | } | 
|  | 95 | int VisualizerContext::getCaptureSamples() { | 
|  | 96 | std::lock_guard lg(mMutex); | 
|  | 97 | return mCaptureSamples; | 
|  | 98 | } | 
|  | 99 |  | 
|  | 100 | RetCode VisualizerContext::setMeasurementMode(Visualizer::MeasurementMode mode) { | 
|  | 101 | std::lock_guard lg(mMutex); | 
|  | 102 | mMeasurementMode = mode; | 
|  | 103 | return RetCode::SUCCESS; | 
|  | 104 | } | 
|  | 105 | Visualizer::MeasurementMode VisualizerContext::getMeasurementMode() { | 
|  | 106 | std::lock_guard lg(mMutex); | 
|  | 107 | return mMeasurementMode; | 
|  | 108 | } | 
|  | 109 |  | 
|  | 110 | RetCode VisualizerContext::setScalingMode(Visualizer::ScalingMode mode) { | 
|  | 111 | std::lock_guard lg(mMutex); | 
|  | 112 | mScalingMode = mode; | 
|  | 113 | return RetCode::SUCCESS; | 
|  | 114 | } | 
|  | 115 | Visualizer::ScalingMode VisualizerContext::getScalingMode() { | 
|  | 116 | std::lock_guard lg(mMutex); | 
|  | 117 | return mScalingMode; | 
|  | 118 | } | 
|  | 119 |  | 
|  | 120 | RetCode VisualizerContext::setDownstreamLatency(int latency) { | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 121 | std::lock_guard lg(mMutex); | 
|  | 122 | mDownstreamLatency = latency; | 
|  | 123 | return RetCode::SUCCESS; | 
|  | 124 | } | 
|  | 125 |  | 
| Shunkai Yao | 6b857c9 | 2023-02-13 17:44:52 +0000 | [diff] [blame] | 126 | int VisualizerContext::getDownstreamLatency() { | 
|  | 127 | std::lock_guard lg(mMutex); | 
|  | 128 | return mDownstreamLatency; | 
|  | 129 | } | 
|  | 130 |  | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 131 | uint32_t VisualizerContext::getDeltaTimeMsFromUpdatedTime_l() { | 
|  | 132 | uint32_t deltaMs = 0; | 
|  | 133 | if (mBufferUpdateTime.tv_sec != 0) { | 
|  | 134 | struct timespec ts; | 
|  | 135 | if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) { | 
|  | 136 | time_t secs = ts.tv_sec - mBufferUpdateTime.tv_sec; | 
|  | 137 | long nsec = ts.tv_nsec - mBufferUpdateTime.tv_nsec; | 
|  | 138 | if (nsec < 0) { | 
|  | 139 | --secs; | 
|  | 140 | nsec += 1000000000; | 
|  | 141 | } | 
|  | 142 | deltaMs = secs * 1000 + nsec / 1000000; | 
|  | 143 | } | 
|  | 144 | } | 
|  | 145 | return deltaMs; | 
|  | 146 | } | 
|  | 147 |  | 
| Shunkai Yao | 6b857c9 | 2023-02-13 17:44:52 +0000 | [diff] [blame] | 148 | Visualizer::Measurement VisualizerContext::getMeasure() { | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 149 | uint16_t peakU16 = 0; | 
|  | 150 | float sumRmsSquared = 0.0f; | 
|  | 151 | uint8_t nbValidMeasurements = 0; | 
|  | 152 |  | 
|  | 153 | { | 
|  | 154 | std::lock_guard lg(mMutex); | 
|  | 155 | // reset measurements if last measurement was too long ago (which implies stored | 
|  | 156 | // measurements aren't relevant anymore and shouldn't bias the new one) | 
|  | 157 | const uint32_t delayMs = getDeltaTimeMsFromUpdatedTime_l(); | 
|  | 158 | if (delayMs > kDiscardMeasurementsTimeMs) { | 
|  | 159 | LOG(INFO) << __func__ << " Discarding " << delayMs << " ms old measurements"; | 
|  | 160 | for (uint32_t i = 0; i < mMeasurementWindowSizeInBuffers; i++) { | 
|  | 161 | mPastMeasurements[i].mIsValid = false; | 
|  | 162 | mPastMeasurements[i].mPeakU16 = 0; | 
|  | 163 | mPastMeasurements[i].mRmsSquared = 0; | 
|  | 164 | } | 
|  | 165 | mMeasurementBufferIdx = 0; | 
|  | 166 | } else { | 
|  | 167 | // only use actual measurements, otherwise the first RMS measure happening before | 
|  | 168 | // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially | 
|  | 169 | // low | 
|  | 170 | for (uint32_t i = 0; i < mMeasurementWindowSizeInBuffers; i++) { | 
|  | 171 | if (mPastMeasurements[i].mIsValid) { | 
|  | 172 | if (mPastMeasurements[i].mPeakU16 > peakU16) { | 
|  | 173 | peakU16 = mPastMeasurements[i].mPeakU16; | 
|  | 174 | } | 
|  | 175 | sumRmsSquared += mPastMeasurements[i].mRmsSquared; | 
|  | 176 | nbValidMeasurements++; | 
|  | 177 | } | 
|  | 178 | } | 
|  | 179 | } | 
|  | 180 | } | 
|  | 181 |  | 
|  | 182 | float rms = nbValidMeasurements == 0 ? 0.0f : sqrtf(sumRmsSquared / nbValidMeasurements); | 
| Shunkai Yao | 6b857c9 | 2023-02-13 17:44:52 +0000 | [diff] [blame] | 183 | Visualizer::Measurement measure; | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 184 | // convert from I16 sample values to mB and write results | 
|  | 185 | measure.rms = (rms < 0.000016f) ? -9600 : (int32_t)(2000 * log10(rms / 32767.0f)); | 
|  | 186 | measure.peak = (peakU16 == 0) ? -9600 : (int32_t)(2000 * log10(peakU16 / 32767.0f)); | 
|  | 187 | LOG(INFO) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms | 
|  | 188 | << " (" << measure.rms << "mB)"; | 
|  | 189 | return measure; | 
|  | 190 | } | 
|  | 191 |  | 
|  | 192 | std::vector<uint8_t> VisualizerContext::capture() { | 
|  | 193 | std::vector<uint8_t> result; | 
|  | 194 | std::lock_guard lg(mMutex); | 
| Shunkai Yao | b851b3c | 2023-02-27 22:50:00 +0000 | [diff] [blame] | 195 | // cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running | 
|  | 196 | // RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState"); | 
|  | 197 | if (mState != State::ACTIVE) { | 
|  | 198 | result.resize(mCaptureSamples); | 
|  | 199 | memset(result.data(), 0x80, mCaptureSamples); | 
|  | 200 | return result; | 
|  | 201 | } | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 202 |  | 
| Shunkai Yao | b851b3c | 2023-02-27 22:50:00 +0000 | [diff] [blame] | 203 | const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l(); | 
| Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 204 | // if audio framework has stopped playing audio although the effect is still active we must | 
|  | 205 | // clear the capture buffer to return silence | 
|  | 206 | if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) && | 
|  | 207 | (deltaMs > kMaxStallTimeMs)) { | 
|  | 208 | LOG(INFO) << __func__ << " capture going to idle"; | 
|  | 209 | mBufferUpdateTime.tv_sec = 0; | 
|  | 210 | return result; | 
|  | 211 | } | 
|  | 212 | int32_t latencyMs = mDownstreamLatency; | 
|  | 213 | latencyMs -= deltaMs; | 
|  | 214 | if (latencyMs < 0) { | 
|  | 215 | latencyMs = 0; | 
|  | 216 | } | 
|  | 217 | uint32_t deltaSamples = mCaptureSamples + mCommon.input.base.sampleRate * latencyMs / 1000; | 
|  | 218 |  | 
|  | 219 | // large sample rate, latency, or capture size, could cause overflow. | 
|  | 220 | // do not offset more than the size of buffer. | 
|  | 221 | if (deltaSamples > kMaxCaptureBufSize) { | 
|  | 222 | android_errorWriteLog(0x534e4554, "31781965"); | 
|  | 223 | deltaSamples = kMaxCaptureBufSize; | 
|  | 224 | } | 
|  | 225 |  | 
|  | 226 | int32_t capturePoint; | 
|  | 227 | //capturePoint = (int32_t)mCaptureIdx - deltaSamples; | 
|  | 228 | __builtin_sub_overflow((int32_t) mCaptureIdx, deltaSamples, &capturePoint); | 
|  | 229 | // a negative capturePoint means we wrap the buffer. | 
|  | 230 | if (capturePoint < 0) { | 
|  | 231 | uint32_t size = -capturePoint; | 
|  | 232 | if (size > mCaptureSamples) { | 
|  | 233 | size = mCaptureSamples; | 
|  | 234 | } | 
|  | 235 | result.insert(result.end(), &mCaptureBuf[kMaxCaptureBufSize + capturePoint], | 
|  | 236 | &mCaptureBuf[kMaxCaptureBufSize + capturePoint + size]); | 
|  | 237 | mCaptureSamples -= size; | 
|  | 238 | capturePoint = 0; | 
|  | 239 | } | 
|  | 240 | result.insert(result.end(), &mCaptureBuf[capturePoint], | 
|  | 241 | &mCaptureBuf[capturePoint + mCaptureSamples]); | 
|  | 242 | mLastCaptureIdx = mCaptureIdx; | 
|  | 243 | return result; | 
|  | 244 | } | 
|  | 245 |  | 
|  | 246 | IEffect::Status VisualizerContext::process(float* in, float* out, int samples) { | 
|  | 247 | IEffect::Status result = {STATUS_NOT_ENOUGH_DATA, 0, 0}; | 
|  | 248 | RETURN_VALUE_IF(in == nullptr || out == nullptr || samples == 0, result, "dataBufferError"); | 
|  | 249 |  | 
|  | 250 | std::lock_guard lg(mMutex); | 
|  | 251 | result.status = STATUS_INVALID_OPERATION; | 
|  | 252 | RETURN_VALUE_IF(mState != State::ACTIVE, result, "stateNotActive"); | 
|  | 253 | LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples; | 
|  | 254 | // perform measurements if needed | 
|  | 255 | if (mMeasurementMode == Visualizer::MeasurementMode::PEAK_RMS) { | 
|  | 256 | // find the peak and RMS squared for the new buffer | 
|  | 257 | float rmsSqAcc = 0; | 
|  | 258 | float maxSample = 0.f; | 
|  | 259 | for (size_t inIdx = 0; inIdx < (unsigned)samples; ++inIdx) { | 
|  | 260 | maxSample = fmax(maxSample, fabs(in[inIdx])); | 
|  | 261 | rmsSqAcc += in[inIdx] * in[inIdx]; | 
|  | 262 | } | 
|  | 263 | maxSample *= 1 << 15; // scale to int16_t, with exactly 1 << 15 representing positive num. | 
|  | 264 | rmsSqAcc *= 1 << 30; // scale to int16_t * 2 | 
|  | 265 | mPastMeasurements[mMeasurementBufferIdx] = { | 
|  | 266 | .mPeakU16 = (uint16_t)maxSample, | 
|  | 267 | .mRmsSquared = rmsSqAcc / samples, | 
|  | 268 | .mIsValid = true }; | 
|  | 269 | if (++mMeasurementBufferIdx >= mMeasurementWindowSizeInBuffers) { | 
|  | 270 | mMeasurementBufferIdx = 0; | 
|  | 271 | } | 
|  | 272 | } | 
|  | 273 |  | 
|  | 274 | float fscale;  // multiplicative scale | 
|  | 275 | if (mScalingMode == Visualizer::ScalingMode::NORMALIZED) { | 
|  | 276 | // derive capture scaling factor from peak value in current buffer | 
|  | 277 | // this gives more interesting captures for display. | 
|  | 278 | float maxSample = 0.f; | 
|  | 279 | for (size_t inIdx = 0; inIdx < (unsigned)samples; ) { | 
|  | 280 | // we reconstruct the actual summed value to ensure proper normalization | 
|  | 281 | // for multichannel outputs (channels > 2 may often be 0). | 
|  | 282 | float smp = 0.f; | 
|  | 283 | for (int i = 0; i < mChannelCount; ++i) { | 
|  | 284 | smp += in[inIdx++]; | 
|  | 285 | } | 
|  | 286 | maxSample = fmax(maxSample, fabs(smp)); | 
|  | 287 | } | 
|  | 288 | if (maxSample > 0.f) { | 
|  | 289 | fscale = 0.99f / maxSample; | 
|  | 290 | int exp; // unused | 
|  | 291 | const float significand = frexp(fscale, &exp); | 
|  | 292 | if (significand == 0.5f) { | 
|  | 293 | fscale *= 255.f / 256.f; // avoid returning unaltered PCM signal | 
|  | 294 | } | 
|  | 295 | } else { | 
|  | 296 | // scale doesn't matter, the values are all 0. | 
|  | 297 | fscale = 1.f; | 
|  | 298 | } | 
|  | 299 | } else { | 
|  | 300 | assert(mScalingMode == Visualizer::ScalingMode::AS_PLAYED); | 
|  | 301 | // Note: if channels are uncorrelated, 1/sqrt(N) could be used at the risk of clipping. | 
|  | 302 | fscale = 1.f / mChannelCount;  // account for summing all the channels together. | 
|  | 303 | } | 
|  | 304 |  | 
|  | 305 | uint32_t captIdx; | 
|  | 306 | uint32_t inIdx; | 
|  | 307 | for (inIdx = 0, captIdx = mCaptureIdx; inIdx < (unsigned)samples; captIdx++) { | 
|  | 308 | // wrap | 
|  | 309 | if (captIdx >= kMaxCaptureBufSize) { | 
|  | 310 | captIdx = 0; | 
|  | 311 | } | 
|  | 312 |  | 
|  | 313 | float smp = 0.f; | 
|  | 314 | for (uint32_t i = 0; i < mChannelCount; ++i) { | 
|  | 315 | smp += in[inIdx++]; | 
|  | 316 | } | 
|  | 317 | mCaptureBuf[captIdx] = clamp8_from_float(smp * fscale); | 
|  | 318 | } | 
|  | 319 |  | 
|  | 320 | // the following two should really be atomic, though it probably doesn't | 
|  | 321 | // matter much for visualization purposes | 
|  | 322 | mCaptureIdx = captIdx; | 
|  | 323 | // update last buffer update time stamp | 
|  | 324 | if (clock_gettime(CLOCK_MONOTONIC, &mBufferUpdateTime) < 0) { | 
|  | 325 | mBufferUpdateTime.tv_sec = 0; | 
|  | 326 | } | 
|  | 327 |  | 
|  | 328 | // TODO: handle access_mode | 
|  | 329 | memcpy(out, in, samples * sizeof(float)); | 
|  | 330 | return {STATUS_OK, samples, samples}; | 
|  | 331 | } | 
|  | 332 |  | 
|  | 333 | }  // namespace aidl::android::hardware::audio::effect |