Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2022 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "VisualizerContext.h" |
| 18 | |
| 19 | #include <algorithm> |
| 20 | #include <android/binder_status.h> |
| 21 | #include <audio_utils/primitives.h> |
| 22 | #include <math.h> |
| 23 | #include <system/audio.h> |
| 24 | #include <time.h> |
| 25 | #include <Utils.h> |
| 26 | |
| 27 | #ifndef BUILD_FLOAT |
| 28 | #error AIDL Visualizer only support float 32bits, make sure add cflags -DBUILD_FLOAT, |
| 29 | #endif |
| 30 | |
| 31 | using android::hardware::audio::common::getChannelCount; |
| 32 | |
| 33 | namespace aidl::android::hardware::audio::effect { |
| 34 | |
| 35 | VisualizerContext::VisualizerContext(int statusDepth, const Parameter::Common& common) |
| 36 | : EffectContext(statusDepth, common) { |
| 37 | } |
| 38 | |
| 39 | VisualizerContext::~VisualizerContext() { |
| 40 | std::lock_guard lg(mMutex); |
| 41 | LOG(DEBUG) << __func__; |
| 42 | mState = State::UNINITIALIZED; |
| 43 | } |
| 44 | |
| 45 | RetCode VisualizerContext::initParams(const Parameter::Common& common) { |
| 46 | std::lock_guard lg(mMutex); |
| 47 | LOG(DEBUG) << __func__; |
| 48 | if (common.input != common.output) { |
| 49 | LOG(ERROR) << __func__ << " mismatch input: " << common.input.toString() |
| 50 | << " and output: " << common.output.toString(); |
| 51 | return RetCode::ERROR_ILLEGAL_PARAMETER; |
| 52 | } |
| 53 | |
| 54 | mState = State::INITIALIZED; |
| 55 | auto channelCount = getChannelCount(common.input.base.channelMask); |
| 56 | #ifdef SUPPORT_MC |
| 57 | if (channelCount < 1 || channelCount > FCC_LIMIT) return RetCode::ERROR_ILLEGAL_PARAMETER; |
| 58 | #else |
| 59 | if (channelCount != FCC_2) return RetCode::ERROR_ILLEGAL_PARAMETER; |
| 60 | #endif |
| 61 | mChannelCount = channelCount; |
| 62 | mCommon = common; |
| 63 | return RetCode::SUCCESS; |
| 64 | } |
| 65 | |
| 66 | RetCode VisualizerContext::enable() { |
| 67 | std::lock_guard lg(mMutex); |
| 68 | if (mState != State::INITIALIZED) { |
| 69 | return RetCode::ERROR_EFFECT_LIB_ERROR; |
| 70 | } |
| 71 | mState = State::ACTIVE; |
| 72 | return RetCode::SUCCESS; |
| 73 | } |
| 74 | |
| 75 | RetCode VisualizerContext::disable() { |
| 76 | std::lock_guard lg(mMutex); |
| 77 | if (mState != State::ACTIVE) { |
| 78 | return RetCode::ERROR_EFFECT_LIB_ERROR; |
| 79 | } |
| 80 | mState = State::INITIALIZED; |
| 81 | return RetCode::SUCCESS; |
| 82 | } |
| 83 | |
| 84 | void VisualizerContext::reset() { |
| 85 | std::lock_guard lg(mMutex); |
| 86 | std::fill_n(mCaptureBuf.begin(), kMaxCaptureBufSize, 0x80); |
| 87 | } |
| 88 | |
| 89 | RetCode VisualizerContext::setCaptureSamples(int samples) { |
| 90 | std::lock_guard lg(mMutex); |
Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 91 | mCaptureSamples = samples; |
| 92 | return RetCode::SUCCESS; |
| 93 | } |
| 94 | int VisualizerContext::getCaptureSamples() { |
| 95 | std::lock_guard lg(mMutex); |
| 96 | return mCaptureSamples; |
| 97 | } |
| 98 | |
| 99 | RetCode VisualizerContext::setMeasurementMode(Visualizer::MeasurementMode mode) { |
| 100 | std::lock_guard lg(mMutex); |
| 101 | mMeasurementMode = mode; |
| 102 | return RetCode::SUCCESS; |
| 103 | } |
| 104 | Visualizer::MeasurementMode VisualizerContext::getMeasurementMode() { |
| 105 | std::lock_guard lg(mMutex); |
| 106 | return mMeasurementMode; |
| 107 | } |
| 108 | |
| 109 | RetCode VisualizerContext::setScalingMode(Visualizer::ScalingMode mode) { |
| 110 | std::lock_guard lg(mMutex); |
| 111 | mScalingMode = mode; |
| 112 | return RetCode::SUCCESS; |
| 113 | } |
| 114 | Visualizer::ScalingMode VisualizerContext::getScalingMode() { |
| 115 | std::lock_guard lg(mMutex); |
| 116 | return mScalingMode; |
| 117 | } |
| 118 | |
| 119 | RetCode VisualizerContext::setDownstreamLatency(int latency) { |
Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 120 | std::lock_guard lg(mMutex); |
| 121 | mDownstreamLatency = latency; |
| 122 | return RetCode::SUCCESS; |
| 123 | } |
| 124 | |
Shunkai Yao | 6b857c9 | 2023-02-13 17:44:52 +0000 | [diff] [blame] | 125 | int VisualizerContext::getDownstreamLatency() { |
| 126 | std::lock_guard lg(mMutex); |
| 127 | return mDownstreamLatency; |
| 128 | } |
| 129 | |
Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 130 | uint32_t VisualizerContext::getDeltaTimeMsFromUpdatedTime_l() { |
| 131 | uint32_t deltaMs = 0; |
| 132 | if (mBufferUpdateTime.tv_sec != 0) { |
| 133 | struct timespec ts; |
| 134 | if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) { |
| 135 | time_t secs = ts.tv_sec - mBufferUpdateTime.tv_sec; |
| 136 | long nsec = ts.tv_nsec - mBufferUpdateTime.tv_nsec; |
| 137 | if (nsec < 0) { |
| 138 | --secs; |
| 139 | nsec += 1000000000; |
| 140 | } |
| 141 | deltaMs = secs * 1000 + nsec / 1000000; |
| 142 | } |
| 143 | } |
| 144 | return deltaMs; |
| 145 | } |
| 146 | |
Shunkai Yao | 6b857c9 | 2023-02-13 17:44:52 +0000 | [diff] [blame] | 147 | Visualizer::Measurement VisualizerContext::getMeasure() { |
Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 148 | uint16_t peakU16 = 0; |
| 149 | float sumRmsSquared = 0.0f; |
| 150 | uint8_t nbValidMeasurements = 0; |
| 151 | |
| 152 | { |
| 153 | std::lock_guard lg(mMutex); |
| 154 | // reset measurements if last measurement was too long ago (which implies stored |
| 155 | // measurements aren't relevant anymore and shouldn't bias the new one) |
| 156 | const uint32_t delayMs = getDeltaTimeMsFromUpdatedTime_l(); |
| 157 | if (delayMs > kDiscardMeasurementsTimeMs) { |
| 158 | LOG(INFO) << __func__ << " Discarding " << delayMs << " ms old measurements"; |
| 159 | for (uint32_t i = 0; i < mMeasurementWindowSizeInBuffers; i++) { |
| 160 | mPastMeasurements[i].mIsValid = false; |
| 161 | mPastMeasurements[i].mPeakU16 = 0; |
| 162 | mPastMeasurements[i].mRmsSquared = 0; |
| 163 | } |
| 164 | mMeasurementBufferIdx = 0; |
| 165 | } else { |
| 166 | // only use actual measurements, otherwise the first RMS measure happening before |
| 167 | // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially |
| 168 | // low |
| 169 | for (uint32_t i = 0; i < mMeasurementWindowSizeInBuffers; i++) { |
| 170 | if (mPastMeasurements[i].mIsValid) { |
| 171 | if (mPastMeasurements[i].mPeakU16 > peakU16) { |
| 172 | peakU16 = mPastMeasurements[i].mPeakU16; |
| 173 | } |
| 174 | sumRmsSquared += mPastMeasurements[i].mRmsSquared; |
| 175 | nbValidMeasurements++; |
| 176 | } |
| 177 | } |
| 178 | } |
| 179 | } |
| 180 | |
| 181 | float rms = nbValidMeasurements == 0 ? 0.0f : sqrtf(sumRmsSquared / nbValidMeasurements); |
Shunkai Yao | 6b857c9 | 2023-02-13 17:44:52 +0000 | [diff] [blame] | 182 | Visualizer::Measurement measure; |
Shunkai Yao | 05b190a | 2022-12-22 00:21:31 +0000 | [diff] [blame] | 183 | // convert from I16 sample values to mB and write results |
| 184 | measure.rms = (rms < 0.000016f) ? -9600 : (int32_t)(2000 * log10(rms / 32767.0f)); |
| 185 | measure.peak = (peakU16 == 0) ? -9600 : (int32_t)(2000 * log10(peakU16 / 32767.0f)); |
| 186 | LOG(INFO) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms |
| 187 | << " (" << measure.rms << "mB)"; |
| 188 | return measure; |
| 189 | } |
| 190 | |
| 191 | std::vector<uint8_t> VisualizerContext::capture() { |
| 192 | std::vector<uint8_t> result; |
| 193 | std::lock_guard lg(mMutex); |
| 194 | RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState"); |
| 195 | const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l(); |
| 196 | |
| 197 | // if audio framework has stopped playing audio although the effect is still active we must |
| 198 | // clear the capture buffer to return silence |
| 199 | if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) && |
| 200 | (deltaMs > kMaxStallTimeMs)) { |
| 201 | LOG(INFO) << __func__ << " capture going to idle"; |
| 202 | mBufferUpdateTime.tv_sec = 0; |
| 203 | return result; |
| 204 | } |
| 205 | int32_t latencyMs = mDownstreamLatency; |
| 206 | latencyMs -= deltaMs; |
| 207 | if (latencyMs < 0) { |
| 208 | latencyMs = 0; |
| 209 | } |
| 210 | uint32_t deltaSamples = mCaptureSamples + mCommon.input.base.sampleRate * latencyMs / 1000; |
| 211 | |
| 212 | // large sample rate, latency, or capture size, could cause overflow. |
| 213 | // do not offset more than the size of buffer. |
| 214 | if (deltaSamples > kMaxCaptureBufSize) { |
| 215 | android_errorWriteLog(0x534e4554, "31781965"); |
| 216 | deltaSamples = kMaxCaptureBufSize; |
| 217 | } |
| 218 | |
| 219 | int32_t capturePoint; |
| 220 | //capturePoint = (int32_t)mCaptureIdx - deltaSamples; |
| 221 | __builtin_sub_overflow((int32_t) mCaptureIdx, deltaSamples, &capturePoint); |
| 222 | // a negative capturePoint means we wrap the buffer. |
| 223 | if (capturePoint < 0) { |
| 224 | uint32_t size = -capturePoint; |
| 225 | if (size > mCaptureSamples) { |
| 226 | size = mCaptureSamples; |
| 227 | } |
| 228 | result.insert(result.end(), &mCaptureBuf[kMaxCaptureBufSize + capturePoint], |
| 229 | &mCaptureBuf[kMaxCaptureBufSize + capturePoint + size]); |
| 230 | mCaptureSamples -= size; |
| 231 | capturePoint = 0; |
| 232 | } |
| 233 | result.insert(result.end(), &mCaptureBuf[capturePoint], |
| 234 | &mCaptureBuf[capturePoint + mCaptureSamples]); |
| 235 | mLastCaptureIdx = mCaptureIdx; |
| 236 | return result; |
| 237 | } |
| 238 | |
| 239 | IEffect::Status VisualizerContext::process(float* in, float* out, int samples) { |
| 240 | IEffect::Status result = {STATUS_NOT_ENOUGH_DATA, 0, 0}; |
| 241 | RETURN_VALUE_IF(in == nullptr || out == nullptr || samples == 0, result, "dataBufferError"); |
| 242 | |
| 243 | std::lock_guard lg(mMutex); |
| 244 | result.status = STATUS_INVALID_OPERATION; |
| 245 | RETURN_VALUE_IF(mState != State::ACTIVE, result, "stateNotActive"); |
| 246 | LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples; |
| 247 | // perform measurements if needed |
| 248 | if (mMeasurementMode == Visualizer::MeasurementMode::PEAK_RMS) { |
| 249 | // find the peak and RMS squared for the new buffer |
| 250 | float rmsSqAcc = 0; |
| 251 | float maxSample = 0.f; |
| 252 | for (size_t inIdx = 0; inIdx < (unsigned)samples; ++inIdx) { |
| 253 | maxSample = fmax(maxSample, fabs(in[inIdx])); |
| 254 | rmsSqAcc += in[inIdx] * in[inIdx]; |
| 255 | } |
| 256 | maxSample *= 1 << 15; // scale to int16_t, with exactly 1 << 15 representing positive num. |
| 257 | rmsSqAcc *= 1 << 30; // scale to int16_t * 2 |
| 258 | mPastMeasurements[mMeasurementBufferIdx] = { |
| 259 | .mPeakU16 = (uint16_t)maxSample, |
| 260 | .mRmsSquared = rmsSqAcc / samples, |
| 261 | .mIsValid = true }; |
| 262 | if (++mMeasurementBufferIdx >= mMeasurementWindowSizeInBuffers) { |
| 263 | mMeasurementBufferIdx = 0; |
| 264 | } |
| 265 | } |
| 266 | |
| 267 | float fscale; // multiplicative scale |
| 268 | if (mScalingMode == Visualizer::ScalingMode::NORMALIZED) { |
| 269 | // derive capture scaling factor from peak value in current buffer |
| 270 | // this gives more interesting captures for display. |
| 271 | float maxSample = 0.f; |
| 272 | for (size_t inIdx = 0; inIdx < (unsigned)samples; ) { |
| 273 | // we reconstruct the actual summed value to ensure proper normalization |
| 274 | // for multichannel outputs (channels > 2 may often be 0). |
| 275 | float smp = 0.f; |
| 276 | for (int i = 0; i < mChannelCount; ++i) { |
| 277 | smp += in[inIdx++]; |
| 278 | } |
| 279 | maxSample = fmax(maxSample, fabs(smp)); |
| 280 | } |
| 281 | if (maxSample > 0.f) { |
| 282 | fscale = 0.99f / maxSample; |
| 283 | int exp; // unused |
| 284 | const float significand = frexp(fscale, &exp); |
| 285 | if (significand == 0.5f) { |
| 286 | fscale *= 255.f / 256.f; // avoid returning unaltered PCM signal |
| 287 | } |
| 288 | } else { |
| 289 | // scale doesn't matter, the values are all 0. |
| 290 | fscale = 1.f; |
| 291 | } |
| 292 | } else { |
| 293 | assert(mScalingMode == Visualizer::ScalingMode::AS_PLAYED); |
| 294 | // Note: if channels are uncorrelated, 1/sqrt(N) could be used at the risk of clipping. |
| 295 | fscale = 1.f / mChannelCount; // account for summing all the channels together. |
| 296 | } |
| 297 | |
| 298 | uint32_t captIdx; |
| 299 | uint32_t inIdx; |
| 300 | for (inIdx = 0, captIdx = mCaptureIdx; inIdx < (unsigned)samples; captIdx++) { |
| 301 | // wrap |
| 302 | if (captIdx >= kMaxCaptureBufSize) { |
| 303 | captIdx = 0; |
| 304 | } |
| 305 | |
| 306 | float smp = 0.f; |
| 307 | for (uint32_t i = 0; i < mChannelCount; ++i) { |
| 308 | smp += in[inIdx++]; |
| 309 | } |
| 310 | mCaptureBuf[captIdx] = clamp8_from_float(smp * fscale); |
| 311 | } |
| 312 | |
| 313 | // the following two should really be atomic, though it probably doesn't |
| 314 | // matter much for visualization purposes |
| 315 | mCaptureIdx = captIdx; |
| 316 | // update last buffer update time stamp |
| 317 | if (clock_gettime(CLOCK_MONOTONIC, &mBufferUpdateTime) < 0) { |
| 318 | mBufferUpdateTime.tv_sec = 0; |
| 319 | } |
| 320 | |
| 321 | // TODO: handle access_mode |
| 322 | memcpy(out, in, samples * sizeof(float)); |
| 323 | return {STATUS_OK, samples, samples}; |
| 324 | } |
| 325 | |
| 326 | } // namespace aidl::android::hardware::audio::effect |