Merge "Fix failure to check HIDL return status" into oc-mr1-dev
diff --git a/media/libaaudio/examples/input_monitor/src/input_monitor.cpp b/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
index edf644a..2dfd0a7 100644
--- a/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
+++ b/media/libaaudio/examples/input_monitor/src/input_monitor.cpp
@@ -27,9 +27,11 @@
#include "AAudioSimpleRecorder.h"
// TODO support FLOAT
-#define REQUIRED_FORMAT AAUDIO_FORMAT_PCM_I16
+#define REQUIRED_FORMAT AAUDIO_FORMAT_PCM_I16
#define MIN_FRAMES_TO_READ 48 /* arbitrary, 1 msec at 48000 Hz */
+static const int FRAMES_PER_LINE = 20000;
+
int main(int argc, const char **argv)
{
AAudioArgsParser argParser;
@@ -46,7 +48,10 @@
int32_t framesPerRead = 0;
int32_t framesToRecord = 0;
int32_t framesLeft = 0;
+ int32_t nextFrameCount = 0;
+ int32_t frameCount = 0;
int32_t xRunCount = 0;
+ int64_t previousFramePosition = -1;
int16_t *data = nullptr;
float peakLevel = 0.0;
int loopCounter = 0;
@@ -56,7 +61,7 @@
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
- printf("%s - Monitor input level using AAudio\n", argv[0]);
+ printf("%s - Monitor input level using AAudio V0.1.1\n", argv[0]);
argParser.setFormat(REQUIRED_FORMAT);
if (argParser.parseArgs(argc, argv)) {
@@ -133,6 +138,7 @@
goto finish;
}
framesLeft -= actual;
+ frameCount += actual;
// Peak finder.
for (int frameIndex = 0; frameIndex < actual; frameIndex++) {
@@ -143,9 +149,36 @@
}
// Display level as stars, eg. "******".
- if ((loopCounter++ % 10) == 0) {
+ if (frameCount > nextFrameCount) {
displayPeakLevel(peakLevel);
peakLevel = 0.0;
+ nextFrameCount += FRAMES_PER_LINE;
+ }
+
+ // Print timestamps.
+ int64_t framePosition = 0;
+ int64_t frameTime = 0;
+ aaudio_result_t timeResult;
+ timeResult = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+ &framePosition, &frameTime);
+
+ if (timeResult == AAUDIO_OK) {
+ if (framePosition > (previousFramePosition + FRAMES_PER_LINE)) {
+ int64_t realTime = getNanoseconds();
+ int64_t framesRead = AAudioStream_getFramesRead(aaudioStream);
+
+ double latencyMillis = calculateLatencyMillis(framesRead, realTime,
+ framePosition, frameTime,
+ actualSampleRate);
+
+ printf("--- timestamp: result = %4d, position = %lld, at %lld nanos"
+ ", latency = %7.2f msec\n",
+ timeResult,
+ (long long) framePosition,
+ (long long) frameTime,
+ latencyMillis);
+ previousFramePosition = framePosition;
+ }
}
}
diff --git a/media/libaaudio/examples/loopback/jni/Android.mk b/media/libaaudio/examples/loopback/jni/Android.mk
index dc933e3..d78f286 100644
--- a/media/libaaudio/examples/loopback/jni/Android.mk
+++ b/media/libaaudio/examples/loopback/jni/Android.mk
@@ -4,7 +4,8 @@
LOCAL_MODULE_TAGS := tests
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
- frameworks/av/media/libaaudio/include
+ frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/examples/utils
# NDK recommends using this kind of relative path instead of an absolute path.
LOCAL_SRC_FILES:= ../src/loopback.cpp
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
new file mode 100644
index 0000000..21cf341
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
@@ -0,0 +1,794 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Tools for measuring latency and for detecting glitches.
+ * These classes are pure math and can be used with any audio system.
+ */
+
+#ifndef AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H
+#define AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H
+
+#include <algorithm>
+#include <assert.h>
+#include <cctype>
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+// Tag for machine readable results as property = value pairs
+#define LOOPBACK_RESULT_TAG "RESULT: "
+#define LOOPBACK_SAMPLE_RATE 48000
+
+#define MILLIS_PER_SECOND 1000
+
+#define MAX_ZEROTH_PARTIAL_BINS 40
+
+static const float s_Impulse[] = {
+ 0.0f, 0.0f, 0.0f, 0.0f, 0.2f, // silence on each side of the impulse
+ 0.5f, 0.9999f, 0.0f, -0.9999, -0.5f, // bipolar
+ -0.2f, 0.0f, 0.0f, 0.0f, 0.0f
+};
+
+class PseudoRandom {
+public:
+ PseudoRandom() {}
+ PseudoRandom(int64_t seed)
+ : mSeed(seed)
+ {}
+
+ /**
+ * Returns the next random double from -1.0 to 1.0
+ *
+ * @return value from -1.0 to 1.0
+ */
+ double nextRandomDouble() {
+ return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
+ }
+
+ /** Calculate random 32 bit number using linear-congruential method. */
+ int32_t nextRandomInteger() {
+ // Use values for 64-bit sequence from MMIX by Donald Knuth.
+ mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
+ return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
+ }
+
+private:
+ int64_t mSeed = 99887766;
+};
+
+static double calculateCorrelation(const float *a,
+ const float *b,
+ int windowSize)
+{
+ double correlation = 0.0;
+ double sumProducts = 0.0;
+ double sumSquares = 0.0;
+
+ // Correlate a against b.
+ for (int i = 0; i < windowSize; i++) {
+ float s1 = a[i];
+ float s2 = b[i];
+ // Use a normalized cross-correlation.
+ sumProducts += s1 * s2;
+ sumSquares += ((s1 * s1) + (s2 * s2));
+ }
+
+ if (sumSquares >= 0.00000001) {
+ correlation = (float) (2.0 * sumProducts / sumSquares);
+ }
+ return correlation;
+}
+
+static int calculateCorrelations(const float *haystack, int haystackSize,
+ const float *needle, int needleSize,
+ float *results, int resultSize)
+{
+ int maxCorrelations = haystackSize - needleSize;
+ int numCorrelations = std::min(maxCorrelations, resultSize);
+
+ for (int ic = 0; ic < numCorrelations; ic++) {
+ double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
+ results[ic] = correlation;
+ }
+
+ return numCorrelations;
+}
+
+/*==========================================================================================*/
+/**
+ * Scan until we get a correlation of a single scan that goes over the tolerance level,
+ * peaks then drops back down.
+ */
+static double findFirstMatch(const float *haystack, int haystackSize,
+ const float *needle, int needleSize, double threshold )
+{
+ int ic;
+ // How many correlations can we calculate?
+ int numCorrelations = haystackSize - needleSize;
+ double maxCorrelation = 0.0;
+ int peakIndex = -1;
+ double location = -1.0;
+ const double backThresholdScaler = 0.5;
+
+ for (ic = 0; ic < numCorrelations; ic++) {
+ double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
+
+ if( (correlation > maxCorrelation) ) {
+ maxCorrelation = correlation;
+ peakIndex = ic;
+ }
+
+ //printf("PaQa_FindFirstMatch: ic = %4d, correlation = %8f, maxSum = %8f\n",
+ // ic, correlation, maxSum );
+ // Are we past what we were looking for?
+ if((maxCorrelation > threshold) && (correlation < backThresholdScaler * maxCorrelation)) {
+ location = peakIndex;
+ break;
+ }
+ }
+
+ return location;
+}
+
+typedef struct LatencyReport_s {
+ double latencyInFrames;
+ double confidence;
+} LatencyReport;
+
+// Apply a technique similar to Harmonic Product Spectrum Analysis to find echo fundamental.
+// Using first echo instead of the original impulse for a better match.
+static int measureLatencyFromEchos(const float *haystack, int haystackSize,
+ const float *needle, int needleSize,
+ LatencyReport *report) {
+ const double threshold = 0.1;
+
+ // Find first peak
+ int first = (int) (findFirstMatch(haystack,
+ haystackSize,
+ needle,
+ needleSize,
+ threshold) + 0.5);
+
+ // Use first echo as the needle for the other echos because
+ // it will be more similar.
+ needle = &haystack[first];
+ int again = (int) (findFirstMatch(haystack,
+ haystackSize,
+ needle,
+ needleSize,
+ threshold) + 0.5);
+
+ printf("first = %d, again at %d\n", first, again);
+ first = again;
+
+ // Allocate results array
+ int remaining = haystackSize - first;
+ const int maxReasonableLatencyFrames = 48000 * 2; // arbitrary but generous value
+ int numCorrelations = std::min(remaining, maxReasonableLatencyFrames);
+ float *correlations = new float[numCorrelations];
+ float *harmonicSums = new float[numCorrelations](); // set to zero
+
+ // Generate correlation for every position.
+ numCorrelations = calculateCorrelations(&haystack[first], remaining,
+ needle, needleSize,
+ correlations, numCorrelations);
+
+ // Add higher harmonics mapped onto lower harmonics.
+ // This reinforces the "fundamental" echo.
+ const int numEchoes = 10;
+ for (int partial = 1; partial < numEchoes; partial++) {
+ for (int i = 0; i < numCorrelations; i++) {
+ harmonicSums[i / partial] += correlations[i] / partial;
+ }
+ }
+
+ // Find highest peak in correlation array.
+ float maxCorrelation = 0.0;
+ float sumOfPeaks = 0.0;
+ int peakIndex = 0;
+ const int skip = MAX_ZEROTH_PARTIAL_BINS; // skip low bins
+ for (int i = skip; i < numCorrelations; i++) {
+ if (harmonicSums[i] > maxCorrelation) {
+ maxCorrelation = harmonicSums[i];
+ sumOfPeaks += maxCorrelation;
+ peakIndex = i;
+ printf("maxCorrelation = %f at %d\n", maxCorrelation, peakIndex);
+ }
+ }
+
+ report->latencyInFrames = peakIndex;
+ if (sumOfPeaks < 0.0001) {
+ report->confidence = 0.0;
+ } else {
+ report->confidence = maxCorrelation / sumOfPeaks;
+ }
+
+ delete[] correlations;
+ delete[] harmonicSums;
+ return 0;
+}
+
+class AudioRecording
+{
+public:
+ AudioRecording() {
+ }
+ ~AudioRecording() {
+ delete[] mData;
+ }
+
+ void allocate(int maxFrames) {
+ delete[] mData;
+ mData = new float[maxFrames];
+ mMaxFrames = maxFrames;
+ }
+
+ // Write SHORT data from the first channel.
+ int write(int16_t *inputData, int inputChannelCount, int numFrames) {
+ // stop at end of buffer
+ if ((mFrameCounter + numFrames) > mMaxFrames) {
+ numFrames = mMaxFrames - mFrameCounter;
+ }
+ for (int i = 0; i < numFrames; i++) {
+ mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
+ }
+ return numFrames;
+ }
+
+ // Write FLOAT data from the first channel.
+ int write(float *inputData, int inputChannelCount, int numFrames) {
+ // stop at end of buffer
+ if ((mFrameCounter + numFrames) > mMaxFrames) {
+ numFrames = mMaxFrames - mFrameCounter;
+ }
+ for (int i = 0; i < numFrames; i++) {
+ mData[mFrameCounter++] = inputData[i * inputChannelCount];
+ }
+ return numFrames;
+ }
+
+ int size() {
+ return mFrameCounter;
+ }
+
+ float *getData() {
+ return mData;
+ }
+
+ int save(const char *fileName, bool writeShorts = true) {
+ int written = 0;
+ const int chunkSize = 64;
+ FILE *fid = fopen(fileName, "wb");
+ if (fid == NULL) {
+ return -errno;
+ }
+
+ if (writeShorts) {
+ int16_t buffer[chunkSize];
+ int32_t framesLeft = mFrameCounter;
+ int32_t cursor = 0;
+ while (framesLeft) {
+ int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
+ for (int i = 0; i < framesToWrite; i++) {
+ buffer[i] = (int16_t) (mData[cursor++] * 32767);
+ }
+ written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
+ framesLeft -= framesToWrite;
+ }
+ } else {
+ written = (int) fwrite(mData, sizeof(float), mFrameCounter, fid);
+ }
+ fclose(fid);
+ return written;
+ }
+
+private:
+ float *mData = nullptr;
+ int32_t mFrameCounter = 0;
+ int32_t mMaxFrames = 0;
+};
+
+// ====================================================================================
+class LoopbackProcessor {
+public:
+ virtual ~LoopbackProcessor() = default;
+
+
+ virtual void reset() {}
+
+ virtual void process(float *inputData, int inputChannelCount,
+ float *outputData, int outputChannelCount,
+ int numFrames) = 0;
+
+
+ virtual void report() = 0;
+
+ virtual void printStatus() {};
+
+ virtual bool isDone() {
+ return false;
+ }
+
+ void setSampleRate(int32_t sampleRate) {
+ mSampleRate = sampleRate;
+ }
+
+ int32_t getSampleRate() {
+ return mSampleRate;
+ }
+
+ // Measure peak amplitude of buffer.
+ static float measurePeakAmplitude(float *inputData, int inputChannelCount, int numFrames) {
+ float peak = 0.0f;
+ for (int i = 0; i < numFrames; i++) {
+ float pos = fabs(*inputData);
+ if (pos > peak) {
+ peak = pos;
+ }
+ inputData += inputChannelCount;
+ }
+ return peak;
+ }
+
+
+private:
+ int32_t mSampleRate = LOOPBACK_SAMPLE_RATE;
+};
+
+class PeakDetector {
+public:
+ float process(float input) {
+ float output = mPrevious * mDecay;
+ if (input > output) {
+ output = input;
+ }
+ mPrevious = output;
+ return output;
+ }
+
+private:
+ float mDecay = 0.99f;
+ float mPrevious = 0.0f;
+};
+
+
+static void printAudioScope(float sample) {
+ const int maxStars = 80
+ ; // arbitrary, fits on one line
+ char c = '*';
+ if (sample < -1.0) {
+ sample = -1.0;
+ c = '$';
+ } else if (sample > 1.0) {
+ sample = 1.0;
+ c = '$';
+ }
+ int numSpaces = (int) (((sample + 1.0) * 0.5) * maxStars);
+ for (int i = 0; i < numSpaces; i++) {
+ putchar(' ');
+ }
+ printf("%c\n", c);
+}
+
+// ====================================================================================
+/**
+ * Measure latency given a loopback stream data.
+ * Uses a state machine to cycle through various stages including:
+ *
+ */
+class EchoAnalyzer : public LoopbackProcessor {
+public:
+
+ EchoAnalyzer() : LoopbackProcessor() {
+ audioRecorder.allocate(2 * LOOPBACK_SAMPLE_RATE);
+ }
+
+ void reset() override {
+ mDownCounter = 200;
+ mLoopCounter = 0;
+ mMeasuredLoopGain = 0.0f;
+ mEchoGain = 1.0f;
+ mState = STATE_INITIAL_SILENCE;
+ }
+
+ virtual bool isDone() {
+ return mState == STATE_DONE;
+ }
+
+ void setGain(float gain) {
+ mEchoGain = gain;
+ }
+
+ float getGain() {
+ return mEchoGain;
+ }
+
+ void report() override {
+
+ printf("EchoAnalyzer ---------------\n");
+ printf(LOOPBACK_RESULT_TAG "measured.gain = %f\n", mMeasuredLoopGain);
+ printf(LOOPBACK_RESULT_TAG "echo.gain = %f\n", mEchoGain);
+ printf(LOOPBACK_RESULT_TAG "frame.count = %d\n", mFrameCounter);
+ printf(LOOPBACK_RESULT_TAG "test.state = %d\n", mState);
+ if (mMeasuredLoopGain >= 0.9999) {
+ printf(" ERROR - clipping, turn down volume slightly\n");
+ } else {
+ const float *needle = s_Impulse;
+ int needleSize = (int) (sizeof(s_Impulse) / sizeof(float));
+ float *haystack = audioRecorder.getData();
+ int haystackSize = audioRecorder.size();
+ int result = measureLatencyFromEchos(haystack, haystackSize,
+ needle, needleSize,
+ &latencyReport);
+ if (latencyReport.confidence < 0.01) {
+ printf(" ERROR - confidence too low = %f\n", latencyReport.confidence);
+ } else {
+ double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
+ printf(LOOPBACK_RESULT_TAG "latency.frames = %8.2f\n", latencyReport.latencyInFrames);
+ printf(LOOPBACK_RESULT_TAG "latency.msec = %8.2f\n", latencyMillis);
+ printf(LOOPBACK_RESULT_TAG "latency.confidence = %8.6f\n", latencyReport.confidence);
+ }
+ }
+
+ {
+#define ECHO_FILENAME "/data/oboe_echo.raw"
+ int written = audioRecorder.save(ECHO_FILENAME);
+ printf("Echo wrote %d mono samples to %s on Android device\n", written, ECHO_FILENAME);
+ }
+ }
+
+ void printStatus() override {
+ printf("state = %d, echo gain = %f ", mState, mEchoGain);
+ }
+
+ static void sendImpulse(float *outputData, int outputChannelCount) {
+ for (float sample : s_Impulse) {
+ *outputData = sample;
+ outputData += outputChannelCount;
+ }
+ }
+
+ void process(float *inputData, int inputChannelCount,
+ float *outputData, int outputChannelCount,
+ int numFrames) override {
+ int channelsValid = std::min(inputChannelCount, outputChannelCount);
+ float peak = 0.0f;
+ int numWritten;
+ int numSamples;
+
+ echo_state_t nextState = mState;
+
+ switch (mState) {
+ case STATE_INITIAL_SILENCE:
+ // Output silence at the beginning.
+ numSamples = numFrames * outputChannelCount;
+ for (int i = 0; i < numSamples; i++) {
+ outputData[i] = 0;
+ }
+ if (mDownCounter-- <= 0) {
+ nextState = STATE_MEASURING_GAIN;
+ //printf("%5d: switch to STATE_MEASURING_GAIN\n", mLoopCounter);
+ mDownCounter = 8;
+ }
+ break;
+
+ case STATE_MEASURING_GAIN:
+ sendImpulse(outputData, outputChannelCount);
+ peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+ // If we get several in a row then go to next state.
+ if (peak > mPulseThreshold) {
+ if (mDownCounter-- <= 0) {
+ nextState = STATE_WAITING_FOR_SILENCE;
+ //printf("%5d: switch to STATE_WAITING_FOR_SILENCE, measured peak = %f\n",
+ // mLoopCounter, peak);
+ mDownCounter = 8;
+ mMeasuredLoopGain = peak; // assumes original pulse amplitude is one
+ // Calculate gain that will give us a nice decaying echo.
+ mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
+ }
+ } else {
+ mDownCounter = 8;
+ }
+ break;
+
+ case STATE_WAITING_FOR_SILENCE:
+ // Output silence.
+ numSamples = numFrames * outputChannelCount;
+ for (int i = 0; i < numSamples; i++) {
+ outputData[i] = 0;
+ }
+ peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+ // If we get several in a row then go to next state.
+ if (peak < mSilenceThreshold) {
+ if (mDownCounter-- <= 0) {
+ nextState = STATE_SENDING_PULSE;
+ //printf("%5d: switch to STATE_SENDING_PULSE\n", mLoopCounter);
+ mDownCounter = 8;
+ }
+ } else {
+ mDownCounter = 8;
+ }
+ break;
+
+ case STATE_SENDING_PULSE:
+ audioRecorder.write(inputData, inputChannelCount, numFrames);
+ sendImpulse(outputData, outputChannelCount);
+ nextState = STATE_GATHERING_ECHOS;
+ //printf("%5d: switch to STATE_GATHERING_ECHOS\n", mLoopCounter);
+ break;
+
+ case STATE_GATHERING_ECHOS:
+ numWritten = audioRecorder.write(inputData, inputChannelCount, numFrames);
+ peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+ if (peak > mMeasuredLoopGain) {
+ mMeasuredLoopGain = peak; // AGC might be raising gain so adjust it on the fly.
+ // Recalculate gain that will give us a nice decaying echo.
+ mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
+ }
+ // Echo input to output.
+ for (int i = 0; i < numFrames; i++) {
+ int ic;
+ for (ic = 0; ic < channelsValid; ic++) {
+ outputData[ic] = inputData[ic] * mEchoGain;
+ }
+ for (; ic < outputChannelCount; ic++) {
+ outputData[ic] = 0;
+ }
+ inputData += inputChannelCount;
+ outputData += outputChannelCount;
+ }
+ if (numWritten < numFrames) {
+ nextState = STATE_DONE;
+ //printf("%5d: switch to STATE_DONE\n", mLoopCounter);
+ }
+ break;
+
+ case STATE_DONE:
+ default:
+ break;
+ }
+
+ mState = nextState;
+ mLoopCounter++;
+ }
+
+private:
+
+ enum echo_state_t {
+ STATE_INITIAL_SILENCE,
+ STATE_MEASURING_GAIN,
+ STATE_WAITING_FOR_SILENCE,
+ STATE_SENDING_PULSE,
+ STATE_GATHERING_ECHOS,
+ STATE_DONE
+ };
+
+ int mDownCounter = 500;
+ int mLoopCounter = 0;
+ int mLoopStart = 1000;
+ float mPulseThreshold = 0.02f;
+ float mSilenceThreshold = 0.002f;
+ float mMeasuredLoopGain = 0.0f;
+ float mDesiredEchoGain = 0.95f;
+ float mEchoGain = 1.0f;
+ echo_state_t mState = STATE_INITIAL_SILENCE;
+ int32_t mFrameCounter = 0;
+
+ AudioRecording audioRecorder;
+ LatencyReport latencyReport;
+ PeakDetector mPeakDetector;
+};
+
+
+// ====================================================================================
+/**
+ * Output a steady sinewave and analyze the return signal.
+ *
+ * Use a cosine transform to measure the predicted magnitude and relative phase of the
+ * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
+ */
+class SineAnalyzer : public LoopbackProcessor {
+public:
+
+ void report() override {
+ printf("SineAnalyzer ------------------\n");
+ printf(LOOPBACK_RESULT_TAG "peak.amplitude = %7.5f\n", mPeakAmplitude);
+ printf(LOOPBACK_RESULT_TAG "sine.magnitude = %7.5f\n", mMagnitude);
+ printf(LOOPBACK_RESULT_TAG "phase.offset = %7.5f\n", mPhaseOffset);
+ printf(LOOPBACK_RESULT_TAG "ref.phase = %7.5f\n", mPhase);
+ printf(LOOPBACK_RESULT_TAG "frames.accumulated = %6d\n", mFramesAccumulated);
+ printf(LOOPBACK_RESULT_TAG "sine.period = %6d\n", mPeriod);
+ printf(LOOPBACK_RESULT_TAG "test.state = %6d\n", mState);
+ printf(LOOPBACK_RESULT_TAG "frame.count = %6d\n", mFrameCounter);
+ // Did we ever get a lock?
+ bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
+ if (!gotLock) {
+ printf("ERROR - failed to lock on reference sine tone\n");
+ } else {
+ // Only print if meaningful.
+ printf(LOOPBACK_RESULT_TAG "glitch.count = %6d\n", mGlitchCount);
+ }
+ }
+
+ void printStatus() override {
+ printf(" state = %d, glitches = %d,", mState, mGlitchCount);
+ }
+
+ double calculateMagnitude(double *phasePtr = NULL) {
+ if (mFramesAccumulated == 0) {
+ return 0.0;
+ }
+ double sinMean = mSinAccumulator / mFramesAccumulated;
+ double cosMean = mCosAccumulator / mFramesAccumulated;
+ double magnitude = 2.0 * sqrt( (sinMean * sinMean) + (cosMean * cosMean ));
+ if( phasePtr != NULL )
+ {
+ double phase = M_PI_2 - atan2( sinMean, cosMean );
+ *phasePtr = phase;
+ }
+ return magnitude;
+ }
+
+ /**
+ * @param inputData contains microphone data with sine signal feedback
+ * @param outputData contains the reference sine wave
+ */
+ void process(float *inputData, int inputChannelCount,
+ float *outputData, int outputChannelCount,
+ int numFrames) override {
+ float sample;
+ float peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
+ if (peak > mPeakAmplitude) {
+ mPeakAmplitude = peak;
+ }
+
+ for (int i = 0; i < numFrames; i++) {
+ float sample = inputData[i * inputChannelCount];
+
+ float sinOut = sinf(mPhase);
+
+ switch (mState) {
+ case STATE_IMMUNE:
+ case STATE_WAITING_FOR_SIGNAL:
+ break;
+ case STATE_WAITING_FOR_LOCK:
+ mSinAccumulator += sample * sinOut;
+ mCosAccumulator += sample * cosf(mPhase);
+ mFramesAccumulated++;
+ // Must be a multiple of the period or the calculation will not be accurate.
+ if (mFramesAccumulated == mPeriod * 4) {
+ mPhaseOffset = 0.0;
+ mMagnitude = calculateMagnitude(&mPhaseOffset);
+ if (mMagnitude > mThreshold) {
+ if (fabs(mPreviousPhaseOffset - mPhaseOffset) < 0.001) {
+ mState = STATE_LOCKED;
+ //printf("%5d: switch to STATE_LOCKED\n", mFrameCounter);
+ }
+ mPreviousPhaseOffset = mPhaseOffset;
+ }
+ resetAccumulator();
+ }
+ break;
+
+ case STATE_LOCKED: {
+ // Predict next sine value
+ float predicted = sinf(mPhase + mPhaseOffset) * mMagnitude;
+ // printf(" predicted = %f, actual = %f\n", predicted, sample);
+
+ float diff = predicted - sample;
+ if (fabs(diff) > mTolerance) {
+ mGlitchCount++;
+ //printf("%5d: Got a glitch # %d, predicted = %f, actual = %f\n",
+ // mFrameCounter, mGlitchCount, predicted, sample);
+ mState = STATE_IMMUNE;
+ //printf("%5d: switch to STATE_IMMUNE\n", mFrameCounter);
+ mDownCounter = mPeriod; // Set duration of IMMUNE state.
+ }
+ } break;
+ }
+
+ // Output sine wave so we can measure it.
+ outputData[i * outputChannelCount] = (sinOut * mOutputAmplitude)
+ + (mWhiteNoise.nextRandomDouble() * mNoiseAmplitude);
+ // printf("%5d: sin(%f) = %f, %f\n", i, mPhase, sinOut, mPhaseIncrement);
+
+ // advance and wrap phase
+ mPhase += mPhaseIncrement;
+ if (mPhase > M_PI) {
+ mPhase -= (2.0 * M_PI);
+ }
+
+ mFrameCounter++;
+ }
+
+ // Do these once per buffer.
+ switch (mState) {
+ case STATE_IMMUNE:
+ mDownCounter -= numFrames;
+ if (mDownCounter <= 0) {
+ mState = STATE_WAITING_FOR_SIGNAL;
+ //printf("%5d: switch to STATE_WAITING_FOR_SIGNAL\n", mFrameCounter);
+ }
+ break;
+ case STATE_WAITING_FOR_SIGNAL:
+ if (peak > mThreshold) {
+ mState = STATE_WAITING_FOR_LOCK;
+ //printf("%5d: switch to STATE_WAITING_FOR_LOCK\n", mFrameCounter);
+ resetAccumulator();
+ }
+ break;
+ case STATE_WAITING_FOR_LOCK:
+ case STATE_LOCKED:
+ break;
+ }
+
+ }
+
+ void resetAccumulator() {
+ mFramesAccumulated = 0;
+ mSinAccumulator = 0.0;
+ mCosAccumulator = 0.0;
+ }
+
+ void reset() override {
+ mGlitchCount = 0;
+ mState = STATE_IMMUNE;
+ mPhaseIncrement = 2.0 * M_PI / mPeriod;
+ printf("phaseInc = %f for period %d\n", mPhaseIncrement, mPeriod);
+ resetAccumulator();
+ }
+
+private:
+
+ enum sine_state_t {
+ STATE_IMMUNE,
+ STATE_WAITING_FOR_SIGNAL,
+ STATE_WAITING_FOR_LOCK,
+ STATE_LOCKED
+ };
+
+ int mPeriod = 79;
+ double mPhaseIncrement = 0.0;
+ double mPhase = 0.0;
+ double mPhaseOffset = 0.0;
+ double mPreviousPhaseOffset = 0.0;
+ double mMagnitude = 0.0;
+ double mThreshold = 0.005;
+ double mTolerance = 0.01;
+ int32_t mFramesAccumulated = 0;
+ double mSinAccumulator = 0.0;
+ double mCosAccumulator = 0.0;
+ int32_t mGlitchCount = 0;
+ double mPeakAmplitude = 0.0;
+ int mDownCounter = 4000;
+ int32_t mFrameCounter = 0;
+ float mOutputAmplitude = 0.75;
+
+ int32_t mZeroCrossings = 0;
+
+ PseudoRandom mWhiteNoise;
+ float mNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
+
+ sine_state_t mState = STATE_IMMUNE;
+};
+
+
+#undef LOOPBACK_SAMPLE_RATE
+#undef LOOPBACK_RESULT_TAG
+
+#endif /* AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H */
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 57d45cd..df0df04 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -14,8 +14,7 @@
* limitations under the License.
*/
-// Play an impulse and then record it.
-// Measure the round trip latency.
+// Audio loopback tests to measure the round trip latency and glitches.
#include <algorithm>
#include <assert.h>
@@ -28,460 +27,38 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include "AAudioSimplePlayer.h"
+#include "AAudioSimpleRecorder.h"
+#include "AAudioExampleUtils.h"
+#include "LoopbackAnalyzer.h"
+
// Tag for machine readable results as property = value pairs
#define RESULT_TAG "RESULT: "
#define SAMPLE_RATE 48000
#define NUM_SECONDS 5
#define NUM_INPUT_CHANNELS 1
#define FILENAME "/data/oboe_input.raw"
-
-#define NANOS_PER_MICROSECOND ((int64_t)1000)
-#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
-#define MILLIS_PER_SECOND 1000
-#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * MILLIS_PER_SECOND)
-
-#define MAX_ZEROTH_PARTIAL_BINS 40
-
-static const float s_Impulse[] = {
- 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, // silence on each side of the impulse
- 0.5f, 0.9f, 0.0f, -0.9f, -0.5f, // bipolar
- 0.0f, 0.0f, 0.0f, 0.0f, 0.0f};
-
-
-static double calculateCorrelation(const float *a,
- const float *b,
- int windowSize)
-{
- double correlation = 0.0;
- double sumProducts = 0.0;
- double sumSquares = 0.0;
-
- // Correlate a against b.
- for (int i = 0; i < windowSize; i++) {
- float s1 = a[i];
- float s2 = b[i];
- // Use a normalized cross-correlation.
- sumProducts += s1 * s2;
- sumSquares += ((s1 * s1) + (s2 * s2));
- }
-
- if (sumSquares >= 0.00000001) {
- correlation = (float) (2.0 * sumProducts / sumSquares);
- }
- return correlation;
-}
-
-static int calculateCorrelations(const float *haystack, int haystackSize,
- const float *needle, int needleSize,
- float *results, int resultSize)
-{
- int ic;
- int maxCorrelations = haystackSize - needleSize;
- int numCorrelations = std::min(maxCorrelations, resultSize);
-
- for (ic = 0; ic < numCorrelations; ic++) {
- double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
- results[ic] = correlation;
- }
-
- return numCorrelations;
-}
-
-/*==========================================================================================*/
-/**
- * Scan until we get a correlation of a single scan that goes over the tolerance level,
- * peaks then drops back down.
- */
-static double findFirstMatch(const float *haystack, int haystackSize,
- const float *needle, int needleSize, double threshold )
-{
- int ic;
- // How many correlations can we calculate?
- int numCorrelations = haystackSize - needleSize;
- double maxCorrelation = 0.0;
- int peakIndex = -1;
- double location = -1.0;
-
- for (ic = 0; ic < numCorrelations; ic++) {
- double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
-
- if( (correlation > maxCorrelation) ) {
- maxCorrelation = correlation;
- peakIndex = ic;
- }
-
- //printf("PaQa_FindFirstMatch: ic = %4d, correlation = %8f, maxSum = %8f\n",
- // ic, correlation, maxSum );
- // Are we past what we were looking for?
- if((maxCorrelation > threshold) && (correlation < 0.5 * maxCorrelation)) {
- location = peakIndex;
- break;
- }
- }
-
- return location;
-}
-
-typedef struct LatencyReport_s {
- double latencyInFrames;
- double confidence;
-} LatencyReport;
-
-// Apply a technique similar to Harmonic Product Spectrum Analysis to find echo fundamental.
-// Using first echo instead of the original impulse for a better match.
-int measureLatencyFromEchos(const float *haystack, int haystackSize,
- const float *needle, int needleSize,
- LatencyReport *report) {
- double threshold = 0.1;
-
- // Find first peak
- int first = (int) (findFirstMatch(haystack,
- haystackSize,
- needle,
- needleSize,
- threshold) + 0.5);
-
- // Use first echo as the needle for the other echos because
- // it will be more similar.
- needle = &haystack[first];
- int again = (int) (findFirstMatch(haystack,
- haystackSize,
- needle,
- needleSize,
- threshold) + 0.5);
-
- printf("first = %d, again at %d\n", first, again);
- first = again;
-
- // Allocate results array
- int remaining = haystackSize - first;
- int generous = 48000 * 2;
- int numCorrelations = std::min(remaining, generous);
- float *correlations = new float[numCorrelations];
- float *harmonicSums = new float[numCorrelations](); // cleared to zero
-
- // Generate correlation for every position.
- numCorrelations = calculateCorrelations(&haystack[first], remaining,
- needle, needleSize,
- correlations, numCorrelations);
-
- // Add higher harmonics mapped onto lower harmonics.
- // This reinforces the "fundamental" echo.
- const int numEchoes = 10;
- for (int partial = 1; partial < numEchoes; partial++) {
- for (int i = 0; i < numCorrelations; i++) {
- harmonicSums[i / partial] += correlations[i] / partial;
- }
- }
-
- // Find highest peak in correlation array.
- float maxCorrelation = 0.0;
- float sumOfPeaks = 0.0;
- int peakIndex = 0;
- const int skip = MAX_ZEROTH_PARTIAL_BINS; // skip low bins
- for (int i = skip; i < numCorrelations; i++) {
- if (harmonicSums[i] > maxCorrelation) {
- maxCorrelation = harmonicSums[i];
- sumOfPeaks += maxCorrelation;
- peakIndex = i;
- printf("maxCorrelation = %f at %d\n", maxCorrelation, peakIndex);
- }
- }
-
- report->latencyInFrames = peakIndex;
- if (sumOfPeaks < 0.0001) {
- report->confidence = 0.0;
- } else {
- report->confidence = maxCorrelation / sumOfPeaks;
- }
-
- delete[] correlations;
- delete[] harmonicSums;
- return 0;
-}
-
-class AudioRecording
-{
-public:
- AudioRecording() {
- }
- ~AudioRecording() {
- delete[] mData;
- }
-
- void allocate(int maxFrames) {
- delete[] mData;
- mData = new float[maxFrames];
- mMaxFrames = maxFrames;
- }
-
- // Write SHORT data from the first channel.
- int write(int16_t *inputData, int inputChannelCount, int numFrames) {
- // stop at end of buffer
- if ((mFrameCounter + numFrames) > mMaxFrames) {
- numFrames = mMaxFrames - mFrameCounter;
- }
- for (int i = 0; i < numFrames; i++) {
- mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
- }
- return numFrames;
- }
-
- // Write FLOAT data from the first channel.
- int write(float *inputData, int inputChannelCount, int numFrames) {
- // stop at end of buffer
- if ((mFrameCounter + numFrames) > mMaxFrames) {
- numFrames = mMaxFrames - mFrameCounter;
- }
- for (int i = 0; i < numFrames; i++) {
- mData[mFrameCounter++] = inputData[i * inputChannelCount];
- }
- return numFrames;
- }
-
- int size() {
- return mFrameCounter;
- }
-
- float *getData() {
- return mData;
- }
-
- int save(const char *fileName, bool writeShorts = true) {
- int written = 0;
- const int chunkSize = 64;
- FILE *fid = fopen(fileName, "wb");
- if (fid == NULL) {
- return -errno;
- }
-
- if (writeShorts) {
- int16_t buffer[chunkSize];
- int32_t framesLeft = mFrameCounter;
- int32_t cursor = 0;
- while (framesLeft) {
- int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
- for (int i = 0; i < framesToWrite; i++) {
- buffer[i] = (int16_t) (mData[cursor++] * 32767);
- }
- written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
- framesLeft -= framesToWrite;
- }
- } else {
- written = fwrite(mData, sizeof(float), mFrameCounter, fid);
- }
- fclose(fid);
- return written;
- }
-
-private:
- float *mData = nullptr;
- int32_t mFrameCounter = 0;
- int32_t mMaxFrames = 0;
-};
-
-// ====================================================================================
-class LoopbackProcessor {
-public:
- virtual ~LoopbackProcessor() = default;
-
- virtual void process(float *inputData, int inputChannelCount,
- float *outputData, int outputChannelCount,
- int numFrames) = 0;
-
-
- virtual void report() = 0;
-
- void setSampleRate(int32_t sampleRate) {
- mSampleRate = sampleRate;
- }
-
- int32_t getSampleRate() {
- return mSampleRate;
- }
-
-private:
- int32_t mSampleRate = SAMPLE_RATE;
-};
-
-
-// ====================================================================================
-class EchoAnalyzer : public LoopbackProcessor {
-public:
-
- EchoAnalyzer() : LoopbackProcessor() {
- audioRecorder.allocate(NUM_SECONDS * SAMPLE_RATE);
- }
-
- void setGain(float gain) {
- mGain = gain;
- }
-
- float getGain() {
- return mGain;
- }
-
- void report() override {
-
- const float *needle = s_Impulse;
- int needleSize = (int)(sizeof(s_Impulse) / sizeof(float));
- float *haystack = audioRecorder.getData();
- int haystackSize = audioRecorder.size();
- int result = measureLatencyFromEchos(haystack, haystackSize,
- needle, needleSize,
- &latencyReport);
- if (latencyReport.confidence < 0.01) {
- printf(" ERROR - confidence too low = %f\n", latencyReport.confidence);
- } else {
- double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
- printf(RESULT_TAG "latency.frames = %8.2f\n", latencyReport.latencyInFrames);
- printf(RESULT_TAG "latency.msec = %8.2f\n", latencyMillis);
- printf(RESULT_TAG "latency.confidence = %8.6f\n", latencyReport.confidence);
- }
- }
-
- void process(float *inputData, int inputChannelCount,
- float *outputData, int outputChannelCount,
- int numFrames) override {
- int channelsValid = std::min(inputChannelCount, outputChannelCount);
-
- audioRecorder.write(inputData, inputChannelCount, numFrames);
-
- if (mLoopCounter < mLoopStart) {
- // Output silence at the beginning.
- for (int i = 0; i < numFrames; i++) {
- int ic;
- for (ic = 0; ic < outputChannelCount; ic++) {
- outputData[ic] = 0;
- }
- inputData += inputChannelCount;
- outputData += outputChannelCount;
- }
- } else if (mLoopCounter == mLoopStart) {
- // Send a bipolar impulse that we can easily detect.
- for (float sample : s_Impulse) {
- *outputData = sample;
- outputData += outputChannelCount;
- }
- } else {
- // Echo input to output.
- for (int i = 0; i < numFrames; i++) {
- int ic;
- for (ic = 0; ic < channelsValid; ic++) {
- outputData[ic] = inputData[ic] * mGain;
- }
- for (; ic < outputChannelCount; ic++) {
- outputData[ic] = 0;
- }
- inputData += inputChannelCount;
- outputData += outputChannelCount;
- }
- }
-
- mLoopCounter++;
- }
-
-private:
- int mLoopCounter = 0;
- int mLoopStart = 1000;
- float mGain = 1.0f;
-
- AudioRecording audioRecorder;
- LatencyReport latencyReport;
-};
-
-
-// ====================================================================================
-class SineAnalyzer : public LoopbackProcessor {
-public:
-
- void report() override {
- double magnitude = calculateMagnitude();
- printf("sine magnitude = %7.5f\n", magnitude);
- printf("sine frames = %7d\n", mFrameCounter);
- printf("sine frequency = %7.1f Hz\n", mFrequency);
- }
-
- double calculateMagnitude(double *phasePtr = NULL) {
- if (mFrameCounter == 0) {
- return 0.0;
- }
- double sinMean = mSinAccumulator / mFrameCounter;
- double cosMean = mCosAccumulator / mFrameCounter;
- double magnitude = 2.0 * sqrt( (sinMean * sinMean) + (cosMean * cosMean ));
- if( phasePtr != NULL )
- {
- double phase = atan2( sinMean, cosMean );
- *phasePtr = phase;
- }
- return magnitude;
- }
-
- void process(float *inputData, int inputChannelCount,
- float *outputData, int outputChannelCount,
- int numFrames) override {
- double phaseIncrement = 2.0 * M_PI * mFrequency / getSampleRate();
-
- for (int i = 0; i < numFrames; i++) {
- // Multiply input by sine/cosine
- float sample = inputData[i * inputChannelCount];
- float sinOut = sinf(mPhase);
- mSinAccumulator += sample * sinOut;
- mCosAccumulator += sample * cosf(mPhase);
- // Advance and wrap phase
- mPhase += phaseIncrement;
- if (mPhase > (2.0 * M_PI)) {
- mPhase -= (2.0 * M_PI);
- }
-
- // Output sine wave so we can measure it.
- outputData[i * outputChannelCount] = sinOut;
- }
- mFrameCounter += numFrames;
-
- double magnitude = calculateMagnitude();
- if (mWaiting) {
- if (magnitude < 0.001) {
- // discard silence
- mFrameCounter = 0;
- mSinAccumulator = 0.0;
- mCosAccumulator = 0.0;
- } else {
- mWaiting = false;
- }
- }
- };
-
- void setFrequency(int32_t frequency) {
- mFrequency = frequency;
- }
-
- int32_t getFrequency() {
- return mFrequency;
- }
-
-private:
- double mFrequency = 300.0;
- double mPhase = 0.0;
- int32_t mFrameCounter = 0;
- double mSinAccumulator = 0.0;
- double mCosAccumulator = 0.0;
- bool mWaiting = true;
-};
+#define APP_VERSION "0.1.22"
-// TODO make this a class that manages its own buffer allocation
struct LoopbackData {
AAudioStream *inputStream = nullptr;
int32_t inputFramesMaximum = 0;
int16_t *inputData = nullptr;
+ int16_t peakShort = 0;
float *conversionBuffer = nullptr;
int32_t actualInputChannelCount = 0;
int32_t actualOutputChannelCount = 0;
int32_t inputBuffersToDiscard = 10;
+ int32_t minNumFrames = INT32_MAX;
+ int32_t maxNumFrames = 0;
+ bool isDone = false;
- aaudio_result_t inputError;
+ aaudio_result_t inputError = AAUDIO_OK;
+ aaudio_result_t outputError = AAUDIO_OK;
+
SineAnalyzer sineAnalyzer;
EchoAnalyzer echoAnalyzer;
+ AudioRecording audioRecorder;
LoopbackProcessor *loopbackProcessor;
};
@@ -517,6 +94,13 @@
return AAUDIO_CALLBACK_RESULT_STOP;
}
+ if (numFrames > myData->maxNumFrames) {
+ myData->maxNumFrames = numFrames;
+ }
+ if (numFrames < myData->minNumFrames) {
+ myData->minNumFrames = numFrames;
+ }
+
if (myData->inputBuffersToDiscard > 0) {
// Drain the input.
do {
@@ -524,6 +108,7 @@
numFrames, 0);
if (framesRead < 0) {
myData->inputError = framesRead;
+ printf("ERROR in read = %d", framesRead);
result = AAUDIO_CALLBACK_RESULT_STOP;
} else if (framesRead > 0) {
myData->inputBuffersToDiscard--;
@@ -534,9 +119,14 @@
numFrames, 0);
if (framesRead < 0) {
myData->inputError = framesRead;
+ printf("ERROR in read = %d", framesRead);
result = AAUDIO_CALLBACK_RESULT_STOP;
} else if (framesRead > 0) {
+ myData->audioRecorder.write(myData->inputData,
+ myData->actualInputChannelCount,
+ numFrames);
+
int32_t numSamples = framesRead * myData->actualInputChannelCount;
convertPcm16ToFloat(myData->inputData, myData->conversionBuffer, numSamples);
@@ -545,12 +135,25 @@
outputData,
myData->actualOutputChannelCount,
framesRead);
+ myData->isDone = myData->loopbackProcessor->isDone();
+ if (myData->isDone) {
+ result = AAUDIO_CALLBACK_RESULT_STOP;
+ }
}
}
return result;
}
+static void MyErrorCallbackProc(
+ AAudioStream *stream __unused,
+ void *userData __unused,
+ aaudio_result_t error)
+{
+ printf("Error Callback, error: %d\n",(int)error);
+ LoopbackData *myData = (LoopbackData *) userData;
+ myData->outputError = error;
+}
static void usage() {
printf("loopback: -n{numBursts} -p{outPerf} -P{inPerf} -t{test} -g{gain} -f{freq}\n");
@@ -571,7 +174,7 @@
}
static aaudio_performance_mode_t parsePerformanceMode(char c) {
- aaudio_performance_mode_t mode = AAUDIO_PERFORMANCE_MODE_NONE;
+ aaudio_performance_mode_t mode = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
c = tolower(c);
switch (c) {
case 'n':
@@ -612,86 +215,117 @@
return testMode;
}
+void printAudioGraph(AudioRecording &recording, int numSamples) {
+ int32_t start = recording.size() / 2;
+ int32_t end = start + numSamples;
+ if (end >= recording.size()) {
+ end = recording.size() - 1;
+ }
+ float *data = recording.getData();
+ // Normalize data so we can see it better.
+ float maxSample = 0.01;
+ for (int32_t i = start; i < end; i++) {
+ float samplePos = fabs(data[i]);
+ if (samplePos > maxSample) {
+ maxSample = samplePos;
+ }
+ }
+ float gain = 0.98f / maxSample;
+ for (int32_t i = start; i < end; i++) {
+ float sample = data[i];
+ printf("%5.3f ", sample); // actual value
+ sample *= gain;
+ printAudioScope(sample);
+ }
+}
+
+
// ====================================================================================
// TODO break up this large main() function into smaller functions
int main(int argc, const char **argv)
{
- aaudio_result_t result = AAUDIO_OK;
- LoopbackData loopbackData;
- AAudioStream *outputStream = nullptr;
- int requestedInputChannelCount = NUM_INPUT_CHANNELS;
- const int requestedOutputChannelCount = AAUDIO_UNSPECIFIED;
- const int requestedSampleRate = SAMPLE_RATE;
- int actualSampleRate = 0;
+ AAudioArgsParser argParser;
+ AAudioSimplePlayer player;
+ AAudioSimpleRecorder recorder;
+ LoopbackData loopbackData;
+ AAudioStream *outputStream = nullptr;
+
+ aaudio_result_t result = AAUDIO_OK;
+ aaudio_sharing_mode_t requestedInputSharingMode = AAUDIO_SHARING_MODE_SHARED;
+ int requestedInputChannelCount = NUM_INPUT_CHANNELS;
+ const int requestedOutputChannelCount = AAUDIO_UNSPECIFIED;
+ int actualSampleRate = 0;
const aaudio_format_t requestedInputFormat = AAUDIO_FORMAT_PCM_I16;
const aaudio_format_t requestedOutputFormat = AAUDIO_FORMAT_PCM_FLOAT;
- aaudio_format_t actualInputFormat;
- aaudio_format_t actualOutputFormat;
+ aaudio_format_t actualInputFormat;
+ aaudio_format_t actualOutputFormat;
+ aaudio_performance_mode_t outputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+ aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+
int testMode = TEST_ECHO_LATENCY;
- double frequency = 1000.0;
double gain = 1.0;
- const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
- //const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_SHARED;
- aaudio_sharing_mode_t actualSharingMode;
-
- AAudioStreamBuilder *builder = nullptr;
aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNINITIALIZED;
int32_t framesPerBurst = 0;
float *outputData = NULL;
double deviation;
double latency;
- aaudio_performance_mode_t outputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
- aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
-
int32_t burstsPerBuffer = 1; // single buffered
+ // Make printf print immediately so that debug info is not stuck
+ // in a buffer if we hang or crash.
+ setvbuf(stdout, NULL, _IONBF, (size_t) 0);
+
+ printf("%s - Audio loopback using AAudio V" APP_VERSION "\n", argv[0]);
+
for (int i = 1; i < argc; i++) {
const char *arg = argv[i];
- if (arg[0] == '-') {
- char option = arg[1];
- switch (option) {
- case 'c':
- requestedInputChannelCount = atoi(&arg[2]);
- break;
- case 'f':
- frequency = atof(&arg[2]);
- break;
- case 'g':
- gain = atof(&arg[2]);
- break;
- case 'm':
- AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
- break;
- case 'n':
- burstsPerBuffer = atoi(&arg[2]);
- break;
- case 'p':
- outputPerformanceLevel = parsePerformanceMode(arg[2]);
- break;
- case 'P':
- inputPerformanceLevel = parsePerformanceMode(arg[2]);
- break;
- case 't':
- testMode = parseTestMode(arg[2]);
- break;
- default:
- usage();
- exit(0);
- break;
+ if (argParser.parseArg(arg)) {
+ // Handle options that are not handled by the ArgParser
+ if (arg[0] == '-') {
+ char option = arg[1];
+ switch (option) {
+ case 'C':
+ requestedInputChannelCount = atoi(&arg[2]);
+ break;
+ case 'g':
+ gain = atof(&arg[2]);
+ break;
+ case 'P':
+ inputPerformanceLevel = parsePerformanceMode(arg[2]);
+ break;
+ case 'X':
+ requestedInputSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
+ break;
+ case 't':
+ testMode = parseTestMode(arg[2]);
+ break;
+ default:
+ usage();
+ exit(EXIT_FAILURE);
+ break;
+ }
+ } else {
+ usage();
+ exit(EXIT_FAILURE);
+ break;
}
- } else {
- usage();
- exit(0);
- break;
}
+
}
+ if (inputPerformanceLevel < 0) {
+ printf("illegal inputPerformanceLevel = %d\n", inputPerformanceLevel);
+ exit(EXIT_FAILURE);
+ }
+
+ int32_t requestedDuration = argParser.getDurationSeconds();
+ int32_t recordingDuration = std::min(60, requestedDuration);
+ loopbackData.audioRecorder.allocate(recordingDuration * SAMPLE_RATE);
switch(testMode) {
case TEST_SINE_MAGNITUDE:
- loopbackData.sineAnalyzer.setFrequency(frequency);
loopbackData.loopbackProcessor = &loopbackData.sineAnalyzer;
break;
case TEST_ECHO_LATENCY:
@@ -703,106 +337,44 @@
break;
}
- // Make printf print immediately so that debug info is not stuck
- // in a buffer if we hang or crash.
- setvbuf(stdout, NULL, _IONBF, (size_t) 0);
-
- printf("%s - Audio loopback using AAudio\n", argv[0]);
-
- // Use an AAudioStreamBuilder to contain requested parameters.
- result = AAudio_createStreamBuilder(&builder);
- if (result < 0) {
- goto finish;
- }
-
- // Request common stream properties.
- AAudioStreamBuilder_setSampleRate(builder, requestedSampleRate);
- AAudioStreamBuilder_setFormat(builder, requestedInputFormat);
- AAudioStreamBuilder_setSharingMode(builder, requestedSharingMode);
-
- // Open the input stream.
- AAudioStreamBuilder_setDirection(builder, AAUDIO_DIRECTION_INPUT);
- AAudioStreamBuilder_setPerformanceMode(builder, inputPerformanceLevel);
- AAudioStreamBuilder_setChannelCount(builder, requestedInputChannelCount);
-
- result = AAudioStreamBuilder_openStream(builder, &loopbackData.inputStream);
- printf("AAudioStreamBuilder_openStream(input) returned %d = %s\n",
- result, AAudio_convertResultToText(result));
- if (result < 0) {
- goto finish;
- }
-
- // Create an output stream using the Builder.
- AAudioStreamBuilder_setDirection(builder, AAUDIO_DIRECTION_OUTPUT);
- AAudioStreamBuilder_setFormat(builder, requestedOutputFormat);
- AAudioStreamBuilder_setPerformanceMode(builder, outputPerformanceLevel);
- AAudioStreamBuilder_setChannelCount(builder, requestedOutputChannelCount);
- AAudioStreamBuilder_setDataCallback(builder, MyDataCallbackProc, &loopbackData);
-
- result = AAudioStreamBuilder_openStream(builder, &outputStream);
- printf("AAudioStreamBuilder_openStream(output) returned %d = %s\n",
- result, AAudio_convertResultToText(result));
+ printf("OUTPUT stream ----------------------------------------\n");
+ argParser.setFormat(requestedOutputFormat);
+ result = player.open(argParser, MyDataCallbackProc, MyErrorCallbackProc, &loopbackData);
if (result != AAUDIO_OK) {
+ fprintf(stderr, "ERROR - player.open() returned %d\n", result);
goto finish;
}
+ outputStream = player.getStream();
+ argParser.compareWithStream(outputStream);
- printf("Stream INPUT ---------------------\n");
- loopbackData.actualInputChannelCount = AAudioStream_getChannelCount(loopbackData.inputStream);
- printf(" channelCount: requested = %d, actual = %d\n", requestedInputChannelCount,
- loopbackData.actualInputChannelCount);
- printf(" framesPerBurst = %d\n", AAudioStream_getFramesPerBurst(loopbackData.inputStream));
- printf(" bufferSize = %d\n",
- AAudioStream_getBufferSizeInFrames(loopbackData.inputStream));
- printf(" bufferCapacity = %d\n",
- AAudioStream_getBufferCapacityInFrames(loopbackData.inputStream));
+ actualOutputFormat = AAudioStream_getFormat(outputStream);
+ assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
- actualSharingMode = AAudioStream_getSharingMode(loopbackData.inputStream);
- printf(" sharingMode: requested = %d, actual = %d\n",
- requestedSharingMode, actualSharingMode);
-
- actualInputFormat = AAudioStream_getFormat(loopbackData.inputStream);
- printf(" dataFormat: requested = %d, actual = %d\n",
- requestedInputFormat, actualInputFormat);
- assert(actualInputFormat == AAUDIO_FORMAT_PCM_I16);
-
- printf(" is MMAP used? = %s\n", AAudioStream_isMMapUsed(loopbackData.inputStream)
- ? "yes" : "no");
-
-
- printf("Stream OUTPUT ---------------------\n");
- // Check to see what kind of stream we actually got.
- actualSampleRate = AAudioStream_getSampleRate(outputStream);
- printf(" sampleRate: requested = %d, actual = %d\n", requestedSampleRate, actualSampleRate);
- loopbackData.echoAnalyzer.setSampleRate(actualSampleRate);
-
- loopbackData.actualOutputChannelCount = AAudioStream_getChannelCount(outputStream);
- printf(" channelCount: requested = %d, actual = %d\n", requestedOutputChannelCount,
- loopbackData.actualOutputChannelCount);
-
- actualSharingMode = AAudioStream_getSharingMode(outputStream);
- printf(" sharingMode: requested = %d, actual = %d\n",
- requestedSharingMode, actualSharingMode);
+ printf("INPUT stream ----------------------------------------\n");
+ // Use different parameters for the input.
+ argParser.setNumberOfBursts(AAUDIO_UNSPECIFIED);
+ argParser.setFormat(requestedInputFormat);
+ argParser.setPerformanceMode(inputPerformanceLevel);
+ argParser.setChannelCount(requestedInputChannelCount);
+ argParser.setSharingMode(requestedInputSharingMode);
+ result = recorder.open(argParser);
+ if (result != AAUDIO_OK) {
+ fprintf(stderr, "ERROR - recorder.open() returned %d\n", result);
+ goto finish;
+ }
+ loopbackData.inputStream = recorder.getStream();
+ argParser.compareWithStream(loopbackData.inputStream);
// This is the number of frames that are read in one chunk by a DMA controller
// or a DSP or a mixer.
framesPerBurst = AAudioStream_getFramesPerBurst(outputStream);
- printf(" framesPerBurst = %d\n", framesPerBurst);
- result = AAudioStream_setBufferSizeInFrames(outputStream, burstsPerBuffer * framesPerBurst);
- if (result < 0) { // may be positive buffer size
- fprintf(stderr, "ERROR - AAudioStream_setBufferSize() returned %d\n", result);
- goto finish;
- }
- printf(" bufferSize = %d\n", AAudioStream_getBufferSizeInFrames(outputStream));
- printf(" bufferCapacity = %d\n", AAudioStream_getBufferCapacityInFrames(outputStream));
+ actualInputFormat = AAudioStream_getFormat(outputStream);
+ assert(actualInputFormat == AAUDIO_FORMAT_PCM_I16);
- actualOutputFormat = AAudioStream_getFormat(outputStream);
- printf(" dataFormat: requested = %d, actual = %d\n",
- requestedOutputFormat, actualOutputFormat);
- assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
- printf(" is MMAP used? = %s\n", AAudioStream_isMMapUsed(outputStream)
- ? "yes" : "no");
+ loopbackData.actualInputChannelCount = recorder.getChannelCount();
+ loopbackData.actualOutputChannelCount = player.getChannelCount();
// Allocate a buffer for the audio data.
loopbackData.inputFramesMaximum = 32 * framesPerBurst;
@@ -813,49 +385,82 @@
loopbackData.conversionBuffer = new float[loopbackData.inputFramesMaximum *
loopbackData.actualInputChannelCount];
+ loopbackData.loopbackProcessor->reset();
- // Start output first so input stream runs low.
- result = AAudioStream_requestStart(outputStream);
+ result = recorder.start();
if (result != AAUDIO_OK) {
- fprintf(stderr, "ERROR - AAudioStream_requestStart(output) returned %d = %s\n",
- result, AAudio_convertResultToText(result));
+ printf("ERROR - AAudioStream_requestStart(input) returned %d = %s\n",
+ result, AAudio_convertResultToText(result));
goto finish;
}
- result = AAudioStream_requestStart(loopbackData.inputStream);
+ result = player.start();
if (result != AAUDIO_OK) {
- fprintf(stderr, "ERROR - AAudioStream_requestStart(input) returned %d = %s\n",
- result, AAudio_convertResultToText(result));
+ printf("ERROR - AAudioStream_requestStart(output) returned %d = %s\n",
+ result, AAudio_convertResultToText(result));
goto finish;
}
printf("------- sleep while the callback runs --------------\n");
fflush(stdout);
- sleep(NUM_SECONDS);
+ for (int i = requestedDuration; i > 0 ; i--) {
+ if (loopbackData.inputError != AAUDIO_OK) {
+ printf(" ERROR on input stream\n");
+ break;
+ } else if (loopbackData.outputError != AAUDIO_OK) {
+ printf(" ERROR on output stream\n");
+ break;
+ } else if (loopbackData.isDone) {
+ printf(" test says it is done!\n");
+ break;
+ } else {
+ sleep(1);
+ printf("%4d: ", i);
+ loopbackData.loopbackProcessor->printStatus();
+ int64_t inputFramesWritten = AAudioStream_getFramesWritten(loopbackData.inputStream);
+ int64_t inputFramesRead = AAudioStream_getFramesRead(loopbackData.inputStream);
+ int64_t outputFramesWritten = AAudioStream_getFramesWritten(outputStream);
+ int64_t outputFramesRead = AAudioStream_getFramesRead(outputStream);
+ printf(" INPUT: wr %lld rd %lld state %s, OUTPUT: wr %lld rd %lld state %s, xruns %d\n",
+ (long long) inputFramesWritten,
+ (long long) inputFramesRead,
+ AAudio_convertStreamStateToText(AAudioStream_getState(loopbackData.inputStream)),
+ (long long) outputFramesWritten,
+ (long long) outputFramesRead,
+ AAudio_convertStreamStateToText(AAudioStream_getState(outputStream)),
+ AAudioStream_getXRunCount(outputStream)
+ );
+ }
+ }
printf("input error = %d = %s\n",
loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
printf("AAudioStream_getXRunCount %d\n", AAudioStream_getXRunCount(outputStream));
- printf("framesRead = %d\n", (int) AAudioStream_getFramesRead(outputStream));
- printf("framesWritten = %d\n", (int) AAudioStream_getFramesWritten(outputStream));
+ printf("framesRead = %8d\n", (int) AAudioStream_getFramesRead(outputStream));
+ printf("framesWritten = %8d\n", (int) AAudioStream_getFramesWritten(outputStream));
+ printf("min numFrames = %8d\n", (int) loopbackData.minNumFrames);
+ printf("max numFrames = %8d\n", (int) loopbackData.maxNumFrames);
- loopbackData.loopbackProcessor->report();
+ if (loopbackData.inputError == AAUDIO_OK) {
+ if (testMode == TEST_SINE_MAGNITUDE) {
+ printAudioGraph(loopbackData.audioRecorder, 200);
+ }
+ loopbackData.loopbackProcessor->report();
+ }
-// {
-// int written = loopbackData.audioRecorder.save(FILENAME);
-// printf("wrote %d mono samples to %s on Android device\n", written, FILENAME);
-// }
-
+ {
+ int written = loopbackData.audioRecorder.save(FILENAME);
+ printf("main() wrote %d mono samples to %s on Android device\n", written, FILENAME);
+ }
finish:
- AAudioStream_close(outputStream);
- AAudioStream_close(loopbackData.inputStream);
+ player.close();
+ recorder.close();
delete[] loopbackData.conversionBuffer;
delete[] loopbackData.inputData;
delete[] outputData;
- AAudioStreamBuilder_delete(builder);
printf(RESULT_TAG "error = %d = %s\n", result, AAudio_convertResultToText(result));
if ((result != AAUDIO_OK)) {
diff --git a/media/libaaudio/examples/loopback/src/loopback.sh b/media/libaaudio/examples/loopback/src/loopback.sh
new file mode 100644
index 0000000..bc63125
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/loopback.sh
@@ -0,0 +1,14 @@
+#!/system/bin/sh
+# Run a loopback test in the background after a delay.
+# To run the script enter:
+# adb shell "nohup sh /data/loopback.sh &"
+
+SLEEP_TIME=10
+TEST_COMMAND="aaudio_loopback -pl -Pl -C1 -n2 -m2 -tm -d5"
+
+echo "Plug in USB Mir and Fun Plug."
+echo "Test will start in ${SLEEP_TIME} seconds: ${TEST_COMMAND}"
+sleep ${SLEEP_TIME}
+date > /data/loopreport.txt
+${TEST_COMMAND} >> /data/loopreport.txt
+date >> /data/loopreport.txt
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index 54217a5..46bc99e 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -121,7 +121,7 @@
aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
- int32_t mNumberOfBursts = AAUDIO_UNSPECIFIED;
+ int32_t mNumberOfBursts = AAUDIO_UNSPECIFIED;
};
class AAudioArgsParser : public AAudioParameters {
@@ -151,9 +151,13 @@
case 'd':
mDurationSeconds = atoi(&arg[2]);
break;
- case 'm':
- AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
- break;
+ case 'm': {
+ aaudio_policy_t policy = AAUDIO_POLICY_AUTO;
+ if (strlen(arg) > 2) {
+ policy = atoi(&arg[2]);
+ }
+ AAudio_setMMapPolicy(policy);
+ } break;
case 'n':
setNumberOfBursts(atoi(&arg[2]));
break;
@@ -198,7 +202,11 @@
printf(" -b{bufferCapacity} frames\n");
printf(" -c{channels} for example 2 for stereo\n");
printf(" -d{duration} in seconds, default is %d\n", DEFAULT_DURATION_SECONDS);
- printf(" -m enable MMAP\n");
+ printf(" -m{0|1|2|3} set MMAP policy\n");
+ printf(" 0 = _UNSPECIFIED, default\n");
+ printf(" 1 = _NEVER\n");
+ printf(" 2 = _AUTO, also if -m is used with no number\n");
+ printf(" 3 = _ALWAYS\n");
printf(" -n{numberOfBursts} for setBufferSize\n");
printf(" -p{performanceMode} set output AAUDIO_PERFORMANCE_MODE*, default NONE\n");
printf(" n for _NONE\n");
diff --git a/media/libaaudio/examples/utils/AAudioExampleUtils.h b/media/libaaudio/examples/utils/AAudioExampleUtils.h
index 66de25f..6cbcc58 100644
--- a/media/libaaudio/examples/utils/AAudioExampleUtils.h
+++ b/media/libaaudio/examples/utils/AAudioExampleUtils.h
@@ -25,7 +25,7 @@
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
-static const char *getSharingModeText(aaudio_sharing_mode_t mode) {
+const char *getSharingModeText(aaudio_sharing_mode_t mode) {
const char *modeText = "unknown";
switch (mode) {
case AAUDIO_SHARING_MODE_EXCLUSIVE:
@@ -49,7 +49,7 @@
return (time.tv_sec * NANOS_PER_SECOND) + time.tv_nsec;
}
-void displayPeakLevel(float peakLevel) {
+static void displayPeakLevel(float peakLevel) {
printf("%5.3f ", peakLevel);
const int maxStars = 50; // arbitrary, fits on one line
int numStars = (int) (peakLevel * maxStars);
@@ -59,4 +59,24 @@
printf("\n");
}
+/**
+ * @param position1 position of hardware frame
+ * @param nanoseconds1
+ * @param position2 position of client read/write
+ * @param nanoseconds2
+ * @param sampleRate
+ * @return latency in milliseconds
+ */
+static double calculateLatencyMillis(int64_t position1, int64_t nanoseconds1,
+ int64_t position2, int64_t nanoseconds2,
+ int64_t sampleRate) {
+ int64_t deltaFrames = position2 - position1;
+ int64_t deltaTime =
+ (NANOS_PER_SECOND * deltaFrames / sampleRate);
+ int64_t timeCurrentFramePlayed = nanoseconds1 + deltaTime;
+ int64_t latencyNanos = timeCurrentFramePlayed - nanoseconds2;
+ double latencyMillis = latencyNanos / 1000000.0;
+ return latencyMillis;
+}
+
#endif // AAUDIO_EXAMPLE_UTILS_H
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 19f8aff..cc0cb34 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -30,6 +30,11 @@
#define SHARING_MODE AAUDIO_SHARING_MODE_SHARED
#define PERFORMANCE_MODE AAUDIO_PERFORMANCE_MODE_NONE
+// Arbitrary period for glitches, once per second at 48000 Hz.
+#define FORCED_UNDERRUN_PERIOD_FRAMES 48000
+// How long to sleep in a callback to cause an intentional glitch. For testing.
+#define FORCED_UNDERRUN_SLEEP_MICROS (10 * 1000)
+
/**
* Simple wrapper for AAudio that opens an output stream either in callback or blocking write mode.
*/
@@ -219,8 +224,13 @@
typedef struct SineThreadedData_s {
SineGenerator sineOsc1;
SineGenerator sineOsc2;
+ int64_t framesTotal = 0;
+ int64_t nextFrameToGlitch = FORCED_UNDERRUN_PERIOD_FRAMES;
+ int32_t minNumFrames = INT32_MAX;
+ int32_t maxNumFrames = 0;
int scheduler;
- bool schedulerChecked;
+ bool schedulerChecked = false;
+ bool forceUnderruns = false;
} SineThreadedData_t;
// Callback function that fills the audio output buffer.
@@ -233,16 +243,33 @@
// should not happen but just in case...
if (userData == nullptr) {
- fprintf(stderr, "ERROR - SimplePlayerDataCallbackProc needs userData\n");
+ printf("ERROR - SimplePlayerDataCallbackProc needs userData\n");
return AAUDIO_CALLBACK_RESULT_STOP;
}
SineThreadedData_t *sineData = (SineThreadedData_t *) userData;
+ sineData->framesTotal += numFrames;
+
+ if (sineData->forceUnderruns) {
+ if (sineData->framesTotal > sineData->nextFrameToGlitch) {
+ usleep(FORCED_UNDERRUN_SLEEP_MICROS);
+ printf("Simulate glitch at %lld\n", (long long) sineData->framesTotal);
+ sineData->nextFrameToGlitch += FORCED_UNDERRUN_PERIOD_FRAMES;
+ }
+ }
+
if (!sineData->schedulerChecked) {
sineData->scheduler = sched_getscheduler(gettid());
sineData->schedulerChecked = true;
}
+ if (numFrames > sineData->maxNumFrames) {
+ sineData->maxNumFrames = numFrames;
+ }
+ if (numFrames < sineData->minNumFrames) {
+ sineData->minNumFrames = numFrames;
+ }
+
int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
// This code only plays on the first one or two channels.
// TODO Support arbitrary number of channels.
diff --git a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
index 6be9112..1344273 100644
--- a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
+++ b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
@@ -73,12 +73,20 @@
/**
* Only call this after open() has been called.
*/
- int32_t getSamplesPerFrame() {
+ int32_t getChannelCount() {
if (mStream == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
}
return AAudioStream_getChannelCount(mStream);;
}
+
+ /**
+ * @deprecated use getChannelCount()
+ */
+ int32_t getSamplesPerFrame() {
+ return getChannelCount();
+ }
+
/**
* Only call this after open() has been called.
*/
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index 2211b72..b5602e9 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -26,9 +26,7 @@
#include <aaudio/AAudio.h>
#include "AAudioExampleUtils.h"
#include "AAudioSimplePlayer.h"
-
-// Application data that gets passed to the callback.
-#define MAX_FRAME_COUNT_RECORDS 256
+#include "../../utils/AAudioSimplePlayer.h"
int main(int argc, const char **argv)
{
@@ -42,9 +40,10 @@
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
- printf("%s - Play a sine sweep using an AAudio callback V0.1.1\n", argv[0]);
+ printf("%s - Play a sine sweep using an AAudio callback V0.1.2\n", argv[0]);
myData.schedulerChecked = false;
+ myData.forceUnderruns = false; // set true to test AAudioStream_getXRunCount()
if (argParser.parseArgs(argc, argv)) {
return EXIT_FAILURE;
@@ -99,7 +98,10 @@
printf("Stream state is %d %s!\n", state, AAudio_convertStreamStateToText(state));
break;
}
- printf("framesWritten = %d\n", (int) AAudioStream_getFramesWritten(player.getStream()));
+ printf("framesWritten = %d, underruns = %d\n",
+ (int) AAudioStream_getFramesWritten(player.getStream()),
+ (int) AAudioStream_getXRunCount(player.getStream())
+ );
}
printf("Woke up now.\n");
@@ -120,6 +122,9 @@
SCHED_FIFO);
}
+ printf("min numFrames = %8d\n", (int) myData.minNumFrames);
+ printf("max numFrames = %8d\n", (int) myData.maxNumFrames);
+
printf("SUCCESS\n");
return EXIT_SUCCESS;
error:
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index e5f0deb..30fbdd6 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -793,7 +793,7 @@
* The position and time passed back are monotonically increasing.
*
* @param stream reference provided by AAudioStreamBuilder_openStream()
- * @param clockid AAUDIO_CLOCK_MONOTONIC or AAUDIO_CLOCK_BOOTTIME
+ * @param clockid CLOCK_MONOTONIC or CLOCK_BOOTTIME
* @param framePosition pointer to a variable to receive the position
* @param timeNanoseconds pointer to a variable to receive the time
* @return AAUDIO_OK or a negative error
diff --git a/media/libaaudio/src/binding/AAudioServiceMessage.h b/media/libaaudio/src/binding/AAudioServiceMessage.h
index b4377fb..54e8001 100644
--- a/media/libaaudio/src/binding/AAudioServiceMessage.h
+++ b/media/libaaudio/src/binding/AAudioServiceMessage.h
@@ -28,7 +28,6 @@
// Used to send information about the HAL to the client.
struct AAudioMessageTimestamp {
int64_t position; // number of frames transferred so far
- int64_t deviceOffset; // add to client position to get device position
int64_t timestamp; // time when that position was reached
};
@@ -51,7 +50,8 @@
typedef struct AAudioServiceMessage_s {
enum class code : uint32_t {
NOTHING,
- TIMESTAMP,
+ TIMESTAMP_SERVICE, // when frame is read or written by the service to the client
+ TIMESTAMP_HARDWARE, // when frame is at DAC or ADC
EVENT,
};
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 4c7d0f7..8c3045a 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -68,6 +68,7 @@
, mServiceInterface(serviceInterface)
, mWakeupDelayNanos(AAudioProperty_getWakeupDelayMicros() * AAUDIO_NANOS_PER_MICROSECOND)
, mMinimumSleepNanos(AAudioProperty_getMinimumSleepMicros() * AAUDIO_NANOS_PER_MICROSECOND)
+ , mAtomicTimestamp()
{
ALOGD("AudioStreamInternal(): mWakeupDelayNanos = %d, mMinimumSleepNanos = %d",
mWakeupDelayNanos, mMinimumSleepNanos);
@@ -351,15 +352,18 @@
aaudio_result_t AudioStreamInternal::getTimestamp(clockid_t clockId,
int64_t *framePosition,
int64_t *timeNanoseconds) {
- // TODO Generate in server and pass to client. Return latest.
- int64_t time = AudioClock::getNanoseconds();
- *framePosition = mClockModel.convertTimeToPosition(time) + mFramesOffsetFromService;
- // TODO Get a more accurate timestamp from the service. This code just adds a fudge factor.
- *timeNanoseconds = time + (6 * AAUDIO_NANOS_PER_MILLISECOND);
- return AAUDIO_OK;
+ // Generated in server and passed to client. Return latest.
+ if (mAtomicTimestamp.isValid()) {
+ Timestamp timestamp = mAtomicTimestamp.read();
+ *framePosition = timestamp.getPosition();
+ *timeNanoseconds = timestamp.getNanoseconds();
+ return AAUDIO_OK;
+ } else {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
}
-aaudio_result_t AudioStreamInternal::updateStateWhileWaiting() {
+aaudio_result_t AudioStreamInternal::updateStateMachine() {
if (isDataCallbackActive()) {
return AAUDIO_OK; // state is getting updated by the callback thread read/write call
}
@@ -385,7 +389,7 @@
oldTime = nanoTime;
}
-aaudio_result_t AudioStreamInternal::onTimestampFromServer(AAudioServiceMessage *message) {
+aaudio_result_t AudioStreamInternal::onTimestampService(AAudioServiceMessage *message) {
#if LOG_TIMESTAMPS
logTimestamp(*message);
#endif
@@ -393,6 +397,12 @@
return AAUDIO_OK;
}
+aaudio_result_t AudioStreamInternal::onTimestampHardware(AAudioServiceMessage *message) {
+ Timestamp timestamp(message->timestamp.position, message->timestamp.timestamp);
+ mAtomicTimestamp.write(timestamp);
+ return AAUDIO_OK;
+}
+
aaudio_result_t AudioStreamInternal::onEventFromServer(AAudioServiceMessage *message) {
aaudio_result_t result = AAUDIO_OK;
switch (message->event.event) {
@@ -456,8 +466,12 @@
break; // no command this time, no problem
}
switch (message.what) {
- case AAudioServiceMessage::code::TIMESTAMP:
- result = onTimestampFromServer(&message);
+ case AAudioServiceMessage::code::TIMESTAMP_SERVICE:
+ result = onTimestampService(&message);
+ break;
+
+ case AAudioServiceMessage::code::TIMESTAMP_HARDWARE:
+ result = onTimestampHardware(&message);
break;
case AAudioServiceMessage::code::EVENT:
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 1b991de..13cf16c 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -50,7 +50,7 @@
int64_t *framePosition,
int64_t *timeNanoseconds) override;
- virtual aaudio_result_t updateStateWhileWaiting() override;
+ virtual aaudio_result_t updateStateMachine() override;
aaudio_result_t open(const AudioStreamBuilder &builder) override;
@@ -122,7 +122,9 @@
aaudio_result_t onEventFromServer(AAudioServiceMessage *message);
- aaudio_result_t onTimestampFromServer(AAudioServiceMessage *message);
+ aaudio_result_t onTimestampService(AAudioServiceMessage *message);
+
+ aaudio_result_t onTimestampHardware(AAudioServiceMessage *message);
void logTimestamp(AAudioServiceMessage &message);
@@ -181,6 +183,11 @@
AudioEndpointParcelable mEndPointParcelable; // description of the buffers filled by service
EndpointDescriptor mEndpointDescriptor; // buffer description with resolved addresses
+
+ SimpleDoubleBuffer<Timestamp> mAtomicTimestamp;
+
+ int64_t mServiceLatencyNanos = 0;
+
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 4859c69..6c4a193 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -98,7 +98,7 @@
aaudio_stream_state_t *nextState,
int64_t timeoutNanoseconds)
{
- aaudio_result_t result = updateStateWhileWaiting();
+ aaudio_result_t result = updateStateMachine();
if (result != AAUDIO_OK) {
return result;
}
@@ -112,7 +112,7 @@
AudioClock::sleepForNanos(durationNanos);
timeoutNanoseconds -= durationNanos;
- aaudio_result_t result = updateStateWhileWaiting();
+ aaudio_result_t result = updateStateMachine();
if (result != AAUDIO_OK) {
return result;
}
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index e5fdcc6..ad18751 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -68,10 +68,10 @@
/**
- * Update state while in the middle of waitForStateChange()
+ * Update state machine.()
* @return
*/
- virtual aaudio_result_t updateStateWhileWaiting() = 0;
+ virtual aaudio_result_t updateStateMachine() = 0;
// =========== End ABSTRACT methods ===========================
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dd5e3c0..2816bac 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -94,12 +94,15 @@
} else {
audioBuffer->size = 0;
}
- break;
+
+ if (updateStateMachine() == AAUDIO_OK) {
+ break; // don't fall through
+ }
}
}
/// FALL THROUGH
- // Stream got rerouted so we disconnect.
+ // Stream got rerouted so we disconnect.
case AAUDIO_CALLBACK_OPERATION_DISCONNECTED: {
setState(AAUDIO_STREAM_STATE_DISCONNECTED);
ALOGD("processCallbackCommon() stream disconnected");
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 8e8070c..041280d 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -159,6 +159,9 @@
actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
}
setPerformanceMode(actualPerformanceMode);
+
+ setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
+
// Log warning if we did not get what we asked for.
ALOGW_IF(actualFlags != flags,
"AudioStreamRecord::open() flags changed from 0x%08X to 0x%08X",
@@ -207,7 +210,7 @@
if (mAudioRecord.get() == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
}
- // Get current position so we can detect when the track is playing.
+ // Get current position so we can detect when the track is recording.
status_t err = mAudioRecord->getPosition(&mPositionWhenStarting);
if (err != OK) {
return AAudioConvert_androidToAAudioResult(err);
@@ -235,7 +238,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamRecord::updateStateWhileWaiting()
+aaudio_result_t AudioStreamRecord::updateStateMachine()
{
aaudio_result_t result = AAUDIO_OK;
aaudio_wrapping_frames_t position;
@@ -292,6 +295,12 @@
}
int32_t framesRead = (int32_t)(bytesRead / bytesPerFrame);
incrementFramesRead(framesRead);
+
+ result = updateStateMachine();
+ if (result != AAUDIO_OK) {
+ return result;
+ }
+
return (aaudio_result_t) framesRead;
}
@@ -330,3 +339,21 @@
}
return getBestTimestamp(clockId, framePosition, timeNanoseconds, &extendedTimestamp);
}
+
+int64_t AudioStreamRecord::getFramesWritten() {
+ aaudio_wrapping_frames_t position;
+ status_t result;
+ switch (getState()) {
+ case AAUDIO_STREAM_STATE_STARTING:
+ case AAUDIO_STREAM_STATE_STARTED:
+ case AAUDIO_STREAM_STATE_STOPPING:
+ result = mAudioRecord->getPosition(&position);
+ if (result == OK) {
+ mFramesWritten.update32(position);
+ }
+ break;
+ default:
+ break;
+ }
+ return AudioStreamLegacy::getFramesWritten();
+}
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 2c6a7eb..c1723ba 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -59,9 +59,11 @@
int32_t getXRunCount() const override;
+ int64_t getFramesWritten() override;
+
int32_t getFramesPerBurst() const override;
- aaudio_result_t updateStateWhileWaiting() override;
+ aaudio_result_t updateStateMachine() override;
aaudio_direction_t getDirection() const override {
return AAUDIO_DIRECTION_INPUT;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 77f31e2..525537f 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -183,6 +183,9 @@
actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
}
setPerformanceMode(actualPerformanceMode);
+
+ setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
+
// Log warning if we did not get what we asked for.
ALOGW_IF(actualFlags != flags,
"AudioStreamTrack::open() flags changed from 0x%08X to 0x%08X",
@@ -296,7 +299,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::updateStateWhileWaiting()
+aaudio_result_t AudioStreamTrack::updateStateMachine()
{
status_t err;
aaudio_wrapping_frames_t position;
@@ -373,6 +376,12 @@
}
int32_t framesWritten = (int32_t)(bytesWritten / bytesPerFrame);
incrementFramesWritten(framesWritten);
+
+ result = updateStateMachine();
+ if (result != AAUDIO_OK) {
+ return result;
+ }
+
return framesWritten;
}
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index ff429ea..3230ac8 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -67,7 +67,7 @@
return AAUDIO_DIRECTION_OUTPUT;
}
- aaudio_result_t updateStateWhileWaiting() override;
+ aaudio_result_t updateStateMachine() override;
// This is public so it can be called from the C callback function.
void processCallback(int event, void *info) override;
@@ -81,8 +81,7 @@
// adapts between variable sized blocks and fixed size blocks
FixedBlockReader mFixedBlockReader;
- // TODO add 64-bit position reporting to AudioRecord and use it.
- aaudio_wrapping_frames_t mPositionWhenStarting = 0;
+ // TODO add 64-bit position reporting to AudioTrack and use it.
aaudio_wrapping_frames_t mPositionWhenPausing = 0;
};
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index acd319b..b0c6c94 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -258,4 +258,74 @@
}
}
+
+/**
+ * Simple double buffer for a structure that can be written occasionally and read occasionally.
+ * This allows a SINGLE writer with multiple readers.
+ *
+ * It is OK if the FIFO overflows and we lose old values.
+ * It is also OK if we read an old value.
+ * Thread may return a non-atomic result if the other thread is rapidly writing
+ * new values on another core.
+ */
+template <class T>
+class SimpleDoubleBuffer {
+public:
+ SimpleDoubleBuffer()
+ : mValues()
+ , mCounter(0) {}
+
+ __attribute__((no_sanitize("integer")))
+ void write(T value) {
+ int index = mCounter.load() & 1;
+ mValues[index] = value;
+ mCounter++; // Increment AFTER updating storage, OK if it wraps.
+ }
+
+ T read() const {
+ T result;
+ int before;
+ int after;
+ int timeout = 3;
+ do {
+ // Check to see if a write occurred while were reading.
+ before = mCounter.load();
+ int index = (before & 1) ^ 1;
+ result = mValues[index];
+ after = mCounter.load();
+ } while ((after != before) && --timeout > 0);
+ return result;
+ }
+
+ /**
+ * @return true if at least one value has been written
+ */
+ bool isValid() const {
+ return mCounter.load() > 0;
+ }
+
+private:
+ T mValues[2];
+ std::atomic<int> mCounter;
+};
+
+class Timestamp {
+public:
+ Timestamp()
+ : mPosition(0)
+ , mNanoseconds(0) {}
+ Timestamp(int64_t position, int64_t nanoseconds)
+ : mPosition(position)
+ , mNanoseconds(nanoseconds) {}
+
+ int64_t getPosition() const { return mPosition; }
+
+ int64_t getNanoseconds() const { return mNanoseconds; }
+
+private:
+ // These cannot be const because we need to implement the copy assignment operator.
+ int64_t mPosition;
+ int64_t mNanoseconds;
+};
+
#endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.mk b/media/libaaudio/tests/Android.mk
index e4eef06..4402919 100644
--- a/media/libaaudio/tests/Android.mk
+++ b/media/libaaudio/tests/Android.mk
@@ -34,6 +34,17 @@
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/src \
+ frameworks/av/media/libaaudio/examples
+LOCAL_SRC_FILES:= test_timestamps.cpp
+LOCAL_SHARED_LIBRARIES := libaaudio
+LOCAL_MODULE := test_timestamps
+include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/libaaudio/include \
frameworks/av/media/libaaudio/src
LOCAL_SRC_FILES:= test_linear_ramp.cpp
LOCAL_SHARED_LIBRARIES := libaaudio
diff --git a/media/libaaudio/tests/test_timestamps.cpp b/media/libaaudio/tests/test_timestamps.cpp
new file mode 100644
index 0000000..d9ca391
--- /dev/null
+++ b/media/libaaudio/tests/test_timestamps.cpp
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Play silence and recover from dead servers or disconnected devices.
+
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+
+#include "utils/AAudioExampleUtils.h"
+
+#define DEFAULT_TIMEOUT_NANOS ((int64_t)1000000000)
+
+int main(int argc, char **argv) {
+ (void) argc;
+ (void *)argv;
+
+ aaudio_result_t result = AAUDIO_OK;
+
+ int32_t triesLeft = 3;
+ int32_t bufferCapacity;
+ int32_t framesPerBurst = 0;
+ float *buffer = nullptr;
+
+ int32_t actualChannelCount = 0;
+ int32_t actualSampleRate = 0;
+ int32_t originalBufferSize = 0;
+ int32_t requestedBufferSize = 0;
+ int32_t finalBufferSize = 0;
+ aaudio_format_t actualDataFormat = AAUDIO_FORMAT_PCM_FLOAT;
+ aaudio_sharing_mode_t actualSharingMode = AAUDIO_SHARING_MODE_SHARED;
+ int32_t framesMax;
+ int64_t framesTotal;
+ int64_t printAt;
+ int samplesPerBurst;
+ int64_t previousFramePosition = -1;
+
+ AAudioStreamBuilder *aaudioBuilder = nullptr;
+ AAudioStream *aaudioStream = nullptr;
+
+ // Make printf print immediately so that debug info is not stuck
+ // in a buffer if we hang or crash.
+ setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+ printf("Test Timestamps V0.1.1\n");
+
+ AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
+
+ // Use an AAudioStreamBuilder to contain requested parameters.
+ result = AAudio_createStreamBuilder(&aaudioBuilder);
+ if (result != AAUDIO_OK) {
+ printf("AAudio_createStreamBuilder returned %s",
+ AAudio_convertResultToText(result));
+ goto finish;
+ }
+
+ // Request stream properties.
+ AAudioStreamBuilder_setFormat(aaudioBuilder, AAUDIO_FORMAT_PCM_FLOAT);
+ //AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_NONE);
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+ // Create an AAudioStream using the Builder.
+ result = AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStreamBuilder_openStream returned %s",
+ AAudio_convertResultToText(result));
+ goto finish;
+ }
+
+ // Check to see what kind of stream we actually got.
+ actualSampleRate = AAudioStream_getSampleRate(aaudioStream);
+ actualChannelCount = AAudioStream_getChannelCount(aaudioStream);
+ actualDataFormat = AAudioStream_getFormat(aaudioStream);
+
+ printf("-------- chans = %3d, rate = %6d format = %d\n",
+ actualChannelCount, actualSampleRate, actualDataFormat);
+ printf(" Is MMAP used? %s\n", AAudioStream_isMMapUsed(aaudioStream)
+ ? "yes" : "no");
+
+ // This is the number of frames that are read in one chunk by a DMA controller
+ // or a DSP or a mixer.
+ framesPerBurst = AAudioStream_getFramesPerBurst(aaudioStream);
+ printf(" framesPerBurst = %3d\n", framesPerBurst);
+
+ originalBufferSize = AAudioStream_getBufferSizeInFrames(aaudioStream);
+ requestedBufferSize = 2 * framesPerBurst;
+ finalBufferSize = AAudioStream_setBufferSizeInFrames(aaudioStream, requestedBufferSize);
+
+ printf(" BufferSize: original = %4d, requested = %4d, final = %4d\n",
+ originalBufferSize, requestedBufferSize, finalBufferSize);
+
+ samplesPerBurst = framesPerBurst * actualChannelCount;
+ buffer = new float[samplesPerBurst];
+
+ result = AAudioStream_requestStart(aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_requestStart returned %s",
+ AAudio_convertResultToText(result));
+ goto finish;
+ }
+
+ // Play silence very briefly.
+ framesMax = actualSampleRate * 4;
+ framesTotal = 0;
+ printAt = actualSampleRate;
+ while (result == AAUDIO_OK && framesTotal < framesMax) {
+ int32_t framesWritten = AAudioStream_write(aaudioStream,
+ buffer, framesPerBurst,
+ DEFAULT_TIMEOUT_NANOS);
+ if (framesWritten < 0) {
+ result = framesWritten;
+ printf("write() returned %s, frames = %d\n",
+ AAudio_convertResultToText(result), (int)framesTotal);
+ printf(" frames = %d\n", (int)framesTotal);
+ } else if (framesWritten != framesPerBurst) {
+ printf("write() returned %d, frames = %d\n", framesWritten, (int)framesTotal);
+ result = AAUDIO_ERROR_TIMEOUT;
+ } else {
+ framesTotal += framesWritten;
+ if (framesTotal >= printAt) {
+ printf("frames = %d\n", (int)framesTotal);
+ printAt += actualSampleRate;
+ }
+ }
+
+ // Print timestamps.
+ int64_t framePosition = 0;
+ int64_t frameTime = 0;
+ aaudio_result_t timeResult;
+ timeResult = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+ &framePosition, &frameTime);
+
+ if (timeResult == AAUDIO_OK) {
+ if (framePosition > (previousFramePosition + 5000)) {
+ int64_t realTime = getNanoseconds();
+ int64_t framesWritten = AAudioStream_getFramesWritten(aaudioStream);
+
+ double latencyMillis = calculateLatencyMillis(framePosition, frameTime,
+ framesWritten, realTime,
+ actualSampleRate);
+
+ printf("--- timestamp: result = %4d, position = %lld, at %lld nanos"
+ ", latency = %7.2f msec\n",
+ timeResult,
+ (long long) framePosition,
+ (long long) frameTime,
+ latencyMillis);
+ previousFramePosition = framePosition;
+ }
+ }
+ }
+
+ result = AAudioStream_requestStop(aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_requestStop returned %s\n",
+ AAudio_convertResultToText(result));
+ }
+ result = AAudioStream_close(aaudioStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_close returned %s\n",
+ AAudio_convertResultToText(result));
+ }
+ aaudioStream = nullptr;
+
+
+finish:
+ if (aaudioStream != nullptr) {
+ AAudioStream_close(aaudioStream);
+ }
+ AAudioStreamBuilder_delete(aaudioBuilder);
+ delete[] buffer;
+ printf("result = %d = %s\n", result, AAudio_convertResultToText(result));
+}
diff --git a/media/libaudiohal/StreamHalHidl.cpp b/media/libaudiohal/StreamHalHidl.cpp
index 176abee..0cafa36 100644
--- a/media/libaudiohal/StreamHalHidl.cpp
+++ b/media/libaudiohal/StreamHalHidl.cpp
@@ -47,7 +47,8 @@
StreamHalHidl::StreamHalHidl(IStream *stream)
: ConversionHelperHidl("Stream"),
mStream(stream),
- mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT) {
+ mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT),
+ mCachedBufferSize(0){
// Instrument audio signal power logging.
// Note: This assumes channel mask, format, and sample rate do not change after creation.
@@ -73,7 +74,11 @@
status_t StreamHalHidl::getBufferSize(size_t *size) {
if (!mStream) return NO_INIT;
- return processReturn("getBufferSize", mStream->getBufferSize(), size);
+ status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
+ if (status == OK) {
+ mCachedBufferSize = *size;
+ }
+ return status;
}
status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
@@ -202,6 +207,14 @@
return OK;
}
+status_t StreamHalHidl::getCachedBufferSize(size_t *size) {
+ if (mCachedBufferSize != 0) {
+ *size = mCachedBufferSize;
+ return OK;
+ }
+ return getBufferSize(size);
+}
+
bool StreamHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
if (mHalThreadPriority == HAL_THREAD_PRIORITY_DEFAULT) {
return true;
@@ -320,8 +333,19 @@
}
status_t status;
- if (!mDataMQ && (status = prepareForWriting(bytes)) != OK) {
- return status;
+ if (!mDataMQ) {
+ // In case if playback starts close to the end of a compressed track, the bytes
+ // that need to be written is less than the actual buffer size. Need to use
+ // full buffer size for the MQ since otherwise after seeking back to the middle
+ // data will be truncated.
+ size_t bufferSize;
+ if ((status = getCachedBufferSize(&bufferSize)) != OK) {
+ return status;
+ }
+ if (bytes > bufferSize) bufferSize = bytes;
+ if ((status = prepareForWriting(bufferSize)) != OK) {
+ return status;
+ }
}
status = callWriterThread(
diff --git a/media/libaudiohal/StreamHalHidl.h b/media/libaudiohal/StreamHalHidl.h
index a69cce8..d4ab943 100644
--- a/media/libaudiohal/StreamHalHidl.h
+++ b/media/libaudiohal/StreamHalHidl.h
@@ -102,6 +102,8 @@
// The destructor automatically closes the stream.
virtual ~StreamHalHidl();
+ status_t getCachedBufferSize(size_t *size);
+
bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
// mStreamPowerLog is used for audio signal power logging.
@@ -111,6 +113,7 @@
const int HAL_THREAD_PRIORITY_DEFAULT = -1;
IStream *mStream;
int mHalThreadPriority;
+ size_t mCachedBufferSize;
};
class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index b180853..fb4fe4b 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -1,9 +1,15 @@
cc_library_headers {
name: "libmedia_headers",
vendor_available: true,
- export_include_dirs: ["include", "omx/1.0/include"],
- header_libs: ["libstagefright_headers"],
- export_header_lib_headers: ["libstagefright_headers"],
+ export_include_dirs: ["include"],
+ header_libs:[
+ "libstagefright_headers",
+ "media_plugin_headers",
+ ],
+ export_header_lib_headers: [
+ "libstagefright_headers",
+ "media_plugin_headers",
+ ],
}
cc_library {
@@ -72,13 +78,6 @@
"libutils",
],
- include_dirs: [
- "frameworks/av/include/media",
- "frameworks/native/include", // for media/hardware/MetadataBufferType.h
- "frameworks/native/include/media/openmax",
- "frameworks/av/media/libstagefright",
- ],
-
export_shared_lib_headers: [
"android.hidl.memory@1.0",
"android.hidl.token@1.0-utils",
@@ -204,6 +203,7 @@
"libicui18n",
"libsonivox",
"libmediadrm",
+ "libmedia_helper",
"android.hidl.memory@1.0",
],
@@ -212,15 +212,10 @@
"libc_malloc_debug_backtrace",
],
- include_dirs: [
- "frameworks/native/include/media/openmax",
- "frameworks/av/include/media/",
- "frameworks/av/media/libstagefright",
- ],
-
export_include_dirs: [
"include",
],
+
cflags: [
"-Werror",
"-Wno-error=deprecated-declarations",
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
index 808c2b5..990d260 100644
--- a/media/libmedia/CharacterEncodingDetector.cpp
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -18,15 +18,15 @@
#define LOG_TAG "CharacterEncodingDector"
#include <utils/Log.h>
-#include <CharacterEncodingDetector.h>
+#include <media/CharacterEncodingDetector.h>
#include "CharacterEncodingDetectorTables.h"
-#include "utils/Vector.h"
-#include "StringArray.h"
+#include <utils/Vector.h>
+#include <media/StringArray.h>
-#include "unicode/ucnv.h"
-#include "unicode/ucsdet.h"
-#include "unicode/ustring.h"
+#include <unicode/ucnv.h>
+#include <unicode/ucsdet.h>
+#include <unicode/ustring.h>
namespace android {
diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp
index 95f7d2e..9724fc1 100644
--- a/media/libmedia/IResourceManagerService.cpp
+++ b/media/libmedia/IResourceManagerService.cpp
@@ -19,7 +19,7 @@
#define LOG_TAG "IResourceManagerService"
#include <utils/Log.h>
-#include "media/IResourceManagerService.h"
+#include <media/IResourceManagerService.h>
#include <binder/Parcel.h>
diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp
index 9f803cb..028616b 100644
--- a/media/libmedia/MediaScannerClient.cpp
+++ b/media/libmedia/MediaScannerClient.cpp
@@ -20,8 +20,8 @@
#include <media/mediascanner.h>
-#include "CharacterEncodingDetector.h"
-#include "StringArray.h"
+#include <media/CharacterEncodingDetector.h>
+#include <media/StringArray.h>
namespace android {
diff --git a/media/libmedia/MidiDeviceInfo.cpp b/media/libmedia/MidiDeviceInfo.cpp
index 02efc5f..7588e00 100644
--- a/media/libmedia/MidiDeviceInfo.cpp
+++ b/media/libmedia/MidiDeviceInfo.cpp
@@ -16,7 +16,7 @@
#define LOG_TAG "MidiDeviceInfo"
-#include "MidiDeviceInfo.h"
+#include <media/MidiDeviceInfo.h>
#include <binder/Parcel.h>
#include <log/log.h>
diff --git a/media/libmedia/MidiIoWrapper.cpp b/media/libmedia/MidiIoWrapper.cpp
index faae954..4e5d67f 100644
--- a/media/libmedia/MidiIoWrapper.cpp
+++ b/media/libmedia/MidiIoWrapper.cpp
@@ -22,7 +22,7 @@
#include <sys/stat.h>
#include <fcntl.h>
-#include "media/MidiIoWrapper.h"
+#include <media/MidiIoWrapper.h>
static int readAt(void *handle, void *buffer, int pos, int size) {
return ((android::MidiIoWrapper*)handle)->readAt(buffer, pos, size);
diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp
index b2e5907..7868b85 100644
--- a/media/libmedia/StringArray.cpp
+++ b/media/libmedia/StringArray.cpp
@@ -21,7 +21,7 @@
#include <stdlib.h>
#include <string.h>
-#include "StringArray.h"
+#include <media/StringArray.h>
namespace android {
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index a6eba86..e6c8f9c 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -375,7 +375,7 @@
audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
{
audio_channel_mask_t channels;
- if (!OutputChannelConverter::fromString(literalChannels, channels) ||
+ if (!OutputChannelConverter::fromString(literalChannels, channels) &&
!InputChannelConverter::fromString(literalChannels, channels)) {
return AUDIO_CHANNEL_INVALID;
}
diff --git a/media/libmedia/include/media/IOMX.h b/media/libmedia/include/media/IOMX.h
index 9a0ada1..d868860 100644
--- a/media/libmedia/include/media/IOMX.h
+++ b/media/libmedia/include/media/IOMX.h
@@ -29,8 +29,8 @@
#include <media/hardware/MetadataBufferType.h>
#include <android/hardware/media/omx/1.0/IOmxNode.h>
-#include <OMX_Core.h>
-#include <OMX_Video.h>
+#include <media/openmax/OMX_Core.h>
+#include <media/openmax/OMX_Video.h>
namespace android {
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/Conversion.h b/media/libmedia/include/media/omx/1.0/Conversion.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/Conversion.h
rename to media/libmedia/include/media/omx/1.0/Conversion.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WGraphicBufferSource.h b/media/libmedia/include/media/omx/1.0/WGraphicBufferSource.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WGraphicBufferSource.h
rename to media/libmedia/include/media/omx/1.0/WGraphicBufferSource.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmx.h b/media/libmedia/include/media/omx/1.0/WOmx.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmx.h
rename to media/libmedia/include/media/omx/1.0/WOmx.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxBufferSource.h b/media/libmedia/include/media/omx/1.0/WOmxBufferSource.h
similarity index 98%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmxBufferSource.h
rename to media/libmedia/include/media/omx/1.0/WOmxBufferSource.h
index 86322da..086f648 100644
--- a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxBufferSource.h
+++ b/media/libmedia/include/media/omx/1.0/WOmxBufferSource.h
@@ -21,7 +21,7 @@
#include <hidl/Status.h>
#include <binder/Binder.h>
-#include <OMXFenceParcelable.h>
+#include <media/OMXFenceParcelable.h>
#include <android/hardware/media/omx/1.0/IOmxBufferSource.h>
#include <android/BnOMXBufferSource.h>
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxNode.h b/media/libmedia/include/media/omx/1.0/WOmxNode.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmxNode.h
rename to media/libmedia/include/media/omx/1.0/WOmxNode.h
diff --git a/media/libmedia/omx/1.0/include/media/omx/1.0/WOmxObserver.h b/media/libmedia/include/media/omx/1.0/WOmxObserver.h
similarity index 100%
rename from media/libmedia/omx/1.0/include/media/omx/1.0/WOmxObserver.h
rename to media/libmedia/include/media/omx/1.0/WOmxObserver.h
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 942a8fc..496db0d 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -77,7 +77,7 @@
#include "TestPlayerStub.h"
#include "nuplayer/NuPlayerDriver.h"
-#include <OMX.h>
+#include <media/stagefright/omx/OMX.h>
#include "HDCP.h"
#include "HTTPBase.h"
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index d83c406..aa21fff 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1242,6 +1242,16 @@
mAudioLastDequeueTimeUs = seekTimeUs;
}
+ if (mSubtitleTrack.mSource != NULL) {
+ mSubtitleTrack.mPackets->clear();
+ mFetchSubtitleDataGeneration++;
+ }
+
+ if (mTimedTextTrack.mSource != NULL) {
+ mTimedTextTrack.mPackets->clear();
+ mFetchTimedTextDataGeneration++;
+ }
+
// If currently buffering, post kWhatBufferingEnd first, so that
// NuPlayer resumes. Otherwise, if cache hits high watermark
// before new polling happens, no one will resume the playback.
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6491ceb..d4ec30d 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -49,18 +49,18 @@
#include <hidlmemory/mapping.h>
-#include <OMX_AudioExt.h>
-#include <OMX_VideoExt.h>
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-#include <OMX_AsString.h>
+#include <media/openmax/OMX_AudioExt.h>
+#include <media/openmax/OMX_VideoExt.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/openmax/OMX_AsString.h>
#include "include/avc_utils.h"
#include "include/ACodecBufferChannel.h"
#include "include/DataConverter.h"
#include "include/SecureBuffer.h"
#include "include/SharedMemoryBuffer.h"
-#include "omx/OMXUtils.h"
+#include <media/stagefright/omx/OMXUtils.h>
#include <android/hidl/allocator/1.0/IAllocator.h>
#include <android/hidl/memory/1.0/IMemory.h>
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 99e6d45..19973bd 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -68,11 +68,6 @@
"avc_utils.cpp",
],
- include_dirs: [
- "frameworks/native/include/media/openmax",
- "frameworks/native/include/media/hardware",
- ],
-
shared_libs: [
"libaudioutils",
"libbinder",
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 941c759..459a1cb 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -941,6 +941,12 @@
ALOGE("moov: depth %d", depth);
return ERROR_MALFORMED;
}
+
+ if (chunk_type == FOURCC('m', 'o', 'o', 'v') && mInitCheck == OK) {
+ ALOGE("duplicate moov");
+ return ERROR_MALFORMED;
+ }
+
if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) {
// store the offset of the first segment
mMoofFound = true;
@@ -1014,6 +1020,12 @@
if (!mLastTrack->meta->findInt32(kKeyTrackID, &trackId)) {
mLastTrack->skipTrack = true;
}
+
+ status_t err = verifyTrack(mLastTrack);
+ if (err != OK) {
+ mLastTrack->skipTrack = true;
+ }
+
if (mLastTrack->skipTrack) {
Track *cur = mFirstTrack;
@@ -1033,12 +1045,6 @@
return OK;
}
-
- status_t err = verifyTrack(mLastTrack);
-
- if (err != OK) {
- return err;
- }
} else if (chunk_type == FOURCC('m', 'o', 'o', 'v')) {
mInitCheck = OK;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 92399f1..d57efee 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -127,7 +127,7 @@
private:
enum {
- kMaxCttsOffsetTimeUs = 1000000LL, // 1 second
+ kMaxCttsOffsetTimeUs = 2000000LL, // 2 seconds
kSampleArraySize = 1000,
};
@@ -1860,10 +1860,12 @@
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds(data, size);
- if (esds.getCodecSpecificInfo(&data, &size) != OK) {
- data = NULL;
- size = 0;
+ if (esds.getCodecSpecificInfo(&data, &size) == OK &&
+ data != NULL &&
+ copyCodecSpecificData((uint8_t*)data, size) == OK) {
+ mGotAllCodecSpecificData = true;
}
+ return;
}
}
if (data != NULL && copyCodecSpecificData((uint8_t *)data, size) == OK) {
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index e31c37c..810b0d6 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -578,6 +578,10 @@
}
// First two pages are header pages.
if (err == ERROR_END_OF_STREAM || mCurrentPage.mPageNo > 2) {
+ if (mBuf != NULL) {
+ mBuf->release();
+ mBuf = NULL;
+ }
break;
}
curGranulePosition = mCurrentPage.mGranulePosition;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 03dc9df..57af772 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -422,15 +422,8 @@
&& trackMeta->findInt32(kKeyDisplayHeight, &height)
&& frame->mDisplayWidth > 0 && frame->mDisplayHeight > 0
&& width > 0 && height > 0) {
- if (frame->mDisplayHeight * (int64_t)width / height > (int64_t)frame->mDisplayWidth) {
- frame->mDisplayHeight =
- (int32_t)(height * (int64_t)frame->mDisplayWidth / width);
- } else {
- frame->mDisplayWidth =
- (int32_t)(frame->mDisplayHeight * (int64_t)width / height);
- }
- ALOGV("thumbNail width and height are overridden to %d x %d",
- frame->mDisplayWidth, frame->mDisplayHeight);
+ frame->mDisplayWidth = width;
+ frame->mDisplayHeight = height;
}
}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index a1cf285..73a3965 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -17,7 +17,7 @@
#ifndef SOFT_AAC_2_H_
#define SOFT_AAC_2_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
#include "aacdecoder_lib.h"
#include "DrcPresModeWrap.h"
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
index 981cbbb..e64c1b7 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_AAC_ENCODER_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
struct VO_AUDIO_CODECAPI;
struct VO_MEM_OPERATOR;
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
index 123fd25..681dcf2 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
@@ -18,7 +18,7 @@
#define SOFT_AAC_ENCODER_2_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
#include "aacenc_lib.h"
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
index 758d6ac..869b81d 100644
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
@@ -18,7 +18,7 @@
#define SOFT_AMR_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
namespace android {
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
index 50178c4..c73e4dd 100644
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
+++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_AMRNB_ENCODER_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
namespace android {
diff --git a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
index d0c1dab..8950a8c 100644
--- a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
+++ b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_AMRWB_ENCODER_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
#include "voAMRWB.h"
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
index 18b7556..679ed3e 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
@@ -18,7 +18,7 @@
#define SOFT_H264_DEC_H_
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
#include <sys/time.h>
namespace android {
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
index 818e4a1..a43cdf1 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
@@ -21,7 +21,7 @@
#include <media/stagefright/foundation/ABase.h>
#include <utils/Vector.h>
-#include "SoftVideoEncoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
namespace android {
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
index c09081d..4a21c34 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
@@ -18,7 +18,7 @@
#define SOFT_FLAC_DECODER_H
#include "FLACDecoder.h"
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
namespace android {
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
index 6027f76..f4f0655 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_FLAC_ENC_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
#include "FLAC/stream_encoder.h"
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.h b/media/libstagefright/codecs/g711/dec/SoftG711.h
index 16b6340..3ece246 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.h
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.h
@@ -18,7 +18,7 @@
#define SOFT_G711_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
namespace android {
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
index 0303dea..ef86915 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -18,7 +18,7 @@
#define SOFT_GSM_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
extern "C" {
#include "gsm.h"
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
index e7c2127..5800490 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -18,7 +18,7 @@
#define SOFT_HEVC_H_
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
#include <sys/time.h>
namespace android {
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
index 4114e7d..e399ac9 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
@@ -18,7 +18,7 @@
#define SOFT_MPEG4_H_
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
struct tagvideoDecControls;
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index ae8cb6f..00f2dd3 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -19,7 +19,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/foundation/ABase.h>
-#include "SoftVideoEncoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
#include "mp4enc_api.h"
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
index 3bfa6c7..976fd00 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.h
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h
@@ -18,7 +18,7 @@
#define SOFT_MP3_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
struct tPVMP3DecoderExternal;
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
index 6729a54..338fc30 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
@@ -18,7 +18,7 @@
#define SOFT_MPEG2_H_
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
#include <sys/time.h>
namespace android {
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index 84cf79c..d6bb902 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -18,7 +18,7 @@
#define SOFT_VPX_H_
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
#include "vpx/vpx_decoder.h"
#include "vpx/vpx_codec.h"
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 86dfad7..dd86d36 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_VPX_ENCODER_H_
-#include "SoftVideoEncoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
#include <OMX_VideoExt.h>
#include <OMX_IndexExt.h>
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index b8c1807..fad988b 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -18,7 +18,7 @@
#define SOFT_AVC_H_
-#include "SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
#include <utils/KeyedVector.h>
#include "H264SwDecApi.h"
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index 97f6561..fab925d 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -23,7 +23,7 @@
#define SOFT_OPUS_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
struct OpusMSDecoder;
diff --git a/media/libstagefright/codecs/raw/SoftRaw.h b/media/libstagefright/codecs/raw/SoftRaw.h
index 80906b4..ebc2741 100644
--- a/media/libstagefright/codecs/raw/SoftRaw.h
+++ b/media/libstagefright/codecs/raw/SoftRaw.h
@@ -18,7 +18,7 @@
#define SOFT_RAW_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
struct tPVMP4AudioDecoderExternal;
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index 30d137b..52d1632 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -18,7 +18,7 @@
#define SOFT_VORBIS_H_
-#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
struct vorbis_dsp_state;
struct vorbis_info;
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 2aaa884..d6149c0 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -29,7 +29,7 @@
#include <utils/List.h>
#include <utils/RefBase.h>
#include <utils/String16.h>
-#include <MetadataBufferType.h>
+#include <media/hardware/MetadataBufferType.h>
namespace android {
diff --git a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
index d38c337..d1677fa 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
@@ -25,7 +25,7 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MediaBuffer.h>
-#include <MetadataBufferType.h>
+#include <media/hardware/MetadataBufferType.h>
#include "foundation/ABase.h"
diff --git a/media/libstagefright/omx/1.0/Omx.cpp b/media/libstagefright/omx/1.0/Omx.cpp
index 789379a..dfab3b0 100644
--- a/media/libstagefright/omx/1.0/Omx.cpp
+++ b/media/libstagefright/omx/1.0/Omx.cpp
@@ -19,20 +19,19 @@
#include <android-base/logging.h>
#include <gui/IGraphicBufferProducer.h>
-#include <OMX_Core.h>
-#include <OMX_AsString.h>
+#include <media/openmax/OMX_Core.h>
+#include <media/openmax/OMX_AsString.h>
-#include "../OMXUtils.h"
-#include "../OMXMaster.h"
-#include "../GraphicBufferSource.h"
+#include <media/stagefright/omx/OMXUtils.h>
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/GraphicBufferSource.h>
-#include "WOmxNode.h"
-#include "WOmxObserver.h"
-#include "WGraphicBufferProducer.h"
-#include "WGraphicBufferSource.h"
-#include "Conversion.h"
-
-#include "Omx.h"
+#include <media/stagefright/omx/1.0/WOmxNode.h>
+#include <media/stagefright/omx/1.0/WOmxObserver.h>
+#include <media/stagefright/omx/1.0/WGraphicBufferProducer.h>
+#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
+#include <media/stagefright/omx/1.0/Omx.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 0e37af9..a82625a 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -19,8 +19,8 @@
#include <android-base/logging.h>
-#include "Conversion.h"
-#include "OmxStore.h"
+#include <media/stagefright/omx/1.0/Conversion.h>
+#include <media/stagefright/omx/1.0/OmxStore.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
index acda060..fcf1092 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
@@ -18,9 +18,9 @@
#include <android-base/logging.h>
-#include "WGraphicBufferProducer.h"
-#include "WProducerListener.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WGraphicBufferProducer.h>
+#include <media/stagefright/omx/1.0/WProducerListener.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
#include <system/window.h>
namespace android {
@@ -64,10 +64,9 @@
sp<Fence> fence;
::android::FrameEventHistoryDelta outTimestamps;
status_t status = mBase->dequeueBuffer(
- &slot, &fence,
- width, height,
- static_cast<::android::PixelFormat>(format), usage,
- getFrameTimestamps ? &outTimestamps : nullptr);
+ &slot, &fence, width, height,
+ static_cast<::android::PixelFormat>(format), usage, nullptr,
+ getFrameTimestamps ? &outTimestamps : nullptr);
hidl_handle tFence;
FrameEventHistoryDelta tOutTimestamps;
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index d8540f8..3697429 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -17,15 +17,14 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "TWGraphicBufferSource"
+#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
+#include <media/stagefright/omx/1.0/WOmxNode.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
+#include <media/stagefright/omx/OMXUtils.h>
#include <android/hardware/media/omx/1.0/IOmxBufferSource.h>
#include <android/hardware/media/omx/1.0/IOmxNode.h>
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-
-#include "omx/OMXUtils.h"
-#include "WGraphicBufferSource.h"
-#include "WOmxNode.h"
-#include "Conversion.h"
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxBufferSource.cpp b/media/libstagefright/omx/1.0/WOmxBufferSource.cpp
index 803283a..c8c963f 100644
--- a/media/libstagefright/omx/1.0/WOmxBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WOmxBufferSource.cpp
@@ -16,8 +16,8 @@
#include <utils/String8.h>
-#include "WOmxBufferSource.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WOmxBufferSource.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxNode.cpp b/media/libstagefright/omx/1.0/WOmxNode.cpp
index 91d1010..9f82283 100644
--- a/media/libstagefright/omx/1.0/WOmxNode.cpp
+++ b/media/libstagefright/omx/1.0/WOmxNode.cpp
@@ -16,9 +16,9 @@
#include <algorithm>
-#include "WOmxNode.h"
-#include "WOmxBufferSource.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WOmxNode.h>
+#include <media/stagefright/omx/1.0/WOmxBufferSource.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxObserver.cpp b/media/libstagefright/omx/1.0/WOmxObserver.cpp
index 354db29..ccbe25c 100644
--- a/media/libstagefright/omx/1.0/WOmxObserver.cpp
+++ b/media/libstagefright/omx/1.0/WOmxObserver.cpp
@@ -16,14 +16,14 @@
#define LOG_TAG "WOmxObserver-impl"
-#include <vector>
-
#include <android-base/logging.h>
#include <cutils/native_handle.h>
#include <binder/Binder.h>
-#include "WOmxObserver.h"
-#include "Conversion.h"
+#include <media/stagefright/omx/1.0/WOmxObserver.h>
+#include <media/stagefright/omx/1.0/Conversion.h>
+
+#include <vector>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WProducerListener.cpp b/media/libstagefright/omx/1.0/WProducerListener.cpp
index be0d4d5..bdc3aa1 100644
--- a/media/libstagefright/omx/1.0/WProducerListener.cpp
+++ b/media/libstagefright/omx/1.0/WProducerListener.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-#include "WProducerListener.h"
+#include <media/stagefright/omx/1.0/WProducerListener.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index b60ce16..3027cdd 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -25,6 +25,10 @@
"1.0/WOmxBufferSource.cpp",
],
+ export_include_dirs: [
+ "include",
+ ],
+
include_dirs: [
"frameworks/av/include/media/",
"frameworks/av/media/libstagefright",
@@ -34,6 +38,14 @@
"frameworks/native/include/media/openmax",
],
+ header_libs: [
+ "media_plugin_headers",
+ ],
+
+ export_header_lib_headers: [
+ "media_plugin_headers",
+ ],
+
shared_libs: [
"libbase",
"libbinder",
@@ -60,7 +72,9 @@
export_shared_lib_headers: [
"android.hidl.memory@1.0",
- "libstagefright_xmlparser",
+ "libmedia_omx",
+ "libstagefright_foundation",
+ "libstagefright_xmlparser",
],
cflags: [
@@ -85,12 +99,21 @@
cc_library_static {
name: "libstagefright_omx_utils",
srcs: ["OMXUtils.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright",
- "frameworks/native/include/media/hardware",
- "frameworks/native/include/media/openmax",
+ export_include_dirs: [
+ "include",
],
- shared_libs: ["libmedia"],
+ header_libs: [
+ "media_plugin_headers",
+ ],
+ export_header_lib_headers: [
+ "media_plugin_headers",
+ ],
+ shared_libs: [
+ "libmedia",
+ ],
+ export_shared_lib_headers: [
+ "libmedia",
+ ],
sanitize: {
misc_undefined: [
"signed-integer-overflow",
diff --git a/media/libstagefright/omx/BWGraphicBufferSource.cpp b/media/libstagefright/omx/BWGraphicBufferSource.cpp
index 79f6d93..94ef598 100644
--- a/media/libstagefright/omx/BWGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/BWGraphicBufferSource.cpp
@@ -17,15 +17,13 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "BWGraphicBufferSource"
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-
+#include <media/stagefright/omx/BWGraphicBufferSource.h>
+#include <media/stagefright/omx/OMXUtils.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
#include <media/OMXBuffer.h>
#include <media/IOMX.h>
-#include "OMXUtils.h"
-#include "BWGraphicBufferSource.h"
-
namespace android {
static const OMX_U32 kPortIndexInput = 0;
diff --git a/media/libstagefright/omx/FrameDropper.cpp b/media/libstagefright/omx/FrameDropper.cpp
index 9a4952e..0c50c58 100644
--- a/media/libstagefright/omx/FrameDropper.cpp
+++ b/media/libstagefright/omx/FrameDropper.cpp
@@ -18,8 +18,7 @@
#define LOG_TAG "FrameDropper"
#include <utils/Log.h>
-#include "FrameDropper.h"
-
+#include <media/stagefright/omx/FrameDropper.h>
#include <media/stagefright/foundation/ADebug.h>
namespace android {
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index ef4d745..caf3ac8 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -22,7 +22,9 @@
#define STRINGIFY_ENUMS // for asString in HardwareAPI.h/VideoAPI.h
-#include "GraphicBufferSource.h"
+#include <media/stagefright/omx/GraphicBufferSource.h>
+#include <media/stagefright/omx/FrameDropper.h>
+#include <media/stagefright/omx/OMXUtils.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ColorUtils.h>
@@ -31,14 +33,12 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
#include <gui/BufferItem.h>
-#include <HardwareAPI.h>
-#include "omx/OMXUtils.h"
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-#include "media/OMXBuffer.h"
+#include <media/hardware/HardwareAPI.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/OMXBuffer.h>
#include <inttypes.h>
-#include "FrameDropper.h"
#include <functional>
#include <memory>
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 8c1141d..93b4dbe 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -22,15 +22,12 @@
#include <dlfcn.h>
-#include "../include/OMX.h"
-
-#include "../include/OMXNodeInstance.h"
-
+#include <media/stagefright/omx/OMX.h>
+#include <media/stagefright/omx/OMXNodeInstance.h>
+#include <media/stagefright/omx/BWGraphicBufferSource.h>
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXUtils.h>
#include <media/stagefright/foundation/ADebug.h>
-#include "BWGraphicBufferSource.h"
-
-#include "OMXMaster.h"
-#include "OMXUtils.h"
namespace android {
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index ac9b0c3..fd97fdc 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -18,15 +18,13 @@
#define LOG_TAG "OMXMaster"
#include <utils/Log.h>
-#include "OMXMaster.h"
-
-#include "SoftOMXPlugin.h"
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/foundation/ADebug.h>
#include <dlfcn.h>
#include <fcntl.h>
-#include <media/stagefright/foundation/ADebug.h>
-
namespace android {
OMXMaster::OMXMaster()
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bc4ce9d..c749454 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -20,20 +20,20 @@
#include <inttypes.h>
-#include "../include/OMXNodeInstance.h"
-#include "OMXMaster.h"
-#include "OMXUtils.h"
+#include <media/stagefright/omx/OMXNodeInstance.h>
+#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXUtils.h>
#include <android/IOMXBufferSource.h>
-#include <OMX_Component.h>
-#include <OMX_IndexExt.h>
-#include <OMX_VideoExt.h>
-#include <OMX_AsString.h>
+#include <media/openmax/OMX_Component.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/openmax/OMX_VideoExt.h>
+#include <media/openmax/OMX_AsString.h>
#include <binder/IMemory.h>
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
-#include <HardwareAPI.h>
+#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ColorUtils.h>
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index a66d565..5894837 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -19,13 +19,13 @@
#include <string.h>
-#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/omx/OMXUtils.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/hardware/HardwareAPI.h>
#include <media/MediaDefs.h>
#include <system/graphics-base.h>
-#include "OMXUtils.h"
namespace android {
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 761b425..09e6d75 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -18,8 +18,7 @@
#define LOG_TAG "SimpleSoftOMXComponent"
#include <utils/Log.h>
-#include "include/SimpleSoftOMXComponent.h"
-
+#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libstagefright/omx/SoftOMXComponent.cpp b/media/libstagefright/omx/SoftOMXComponent.cpp
index df978f8..ee269e1 100644
--- a/media/libstagefright/omx/SoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftOMXComponent.cpp
@@ -18,8 +18,7 @@
#define LOG_TAG "SoftOMXComponent"
#include <utils/Log.h>
-#include "include/SoftOMXComponent.h"
-
+#include <media/stagefright/omx/SoftOMXComponent.h>
#include <media/stagefright/foundation/ADebug.h>
namespace android {
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 0bc65e1..4946ada 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -18,8 +18,8 @@
#define LOG_TAG "SoftOMXPlugin"
#include <utils/Log.h>
-#include "SoftOMXPlugin.h"
-#include "include/SoftOMXComponent.h"
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/omx/SoftOMXComponent.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AString.h>
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 920dd18..24ed981 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -20,14 +20,14 @@
#define LOG_TAG "SoftVideoDecoderOMXComponent"
#include <utils/Log.h>
-#include "include/SoftVideoDecoderOMXComponent.h"
+#include <media/stagefright/omx/SoftVideoDecoderOMXComponent.h>
-#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/MediaDefs.h>
namespace android {
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 7ecfbbb..f33bdc0 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -21,25 +21,22 @@
#include <utils/Log.h>
#include <utils/misc.h>
-#include "include/SoftVideoEncoderOMXComponent.h"
-
-#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/MediaDefs.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/openmax/OMX_IndexExt.h>
+#include <media/MediaDefs.h>
#include <ui/Fence.h>
#include <ui/GraphicBufferMapper.h>
#include <ui/Rect.h>
#include <hardware/gralloc.h>
-
#include <nativebase/nativebase.h>
-#include <OMX_IndexExt.h>
-
namespace android {
const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = {
diff --git a/media/libstagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
similarity index 99%
rename from media/libstagefright/omx/1.0/Conversion.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index fd91574..f319bdc 100644
--- a/media/libstagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -29,12 +29,12 @@
#include <binder/Binder.h>
#include <binder/Status.h>
#include <ui/FenceTime.h>
-#include <media/OMXFenceParcelable.h>
#include <cutils/native_handle.h>
#include <gui/IGraphicBufferProducer.h>
+#include <media/OMXFenceParcelable.h>
#include <media/OMXBuffer.h>
-#include <VideoAPI.h>
+#include <media/hardware/VideoAPI.h>
#include <android/hidl/memory/1.0/IMemory.h>
#include <android/hardware/graphics/bufferqueue/1.0/IProducerListener.h>
diff --git a/media/libstagefright/omx/1.0/Omx.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
similarity index 97%
rename from media/libstagefright/omx/1.0/Omx.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
index 5fdf38e..a6a9d3e 100644
--- a/media/libstagefright/omx/1.0/Omx.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
@@ -20,10 +20,9 @@
#include <hidl/MQDescriptor.h>
#include <hidl/Status.h>
-#include "../../include/OMXNodeInstance.h"
-
-#include <android/hardware/media/omx/1.0/IOmx.h>
+#include <media/stagefright/omx/OMXNodeInstance.h>
#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+#include <android/hardware/media/omx/1.0/IOmx.h>
namespace android {
diff --git a/media/libstagefright/omx/1.0/OmxStore.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
similarity index 100%
rename from media/libstagefright/omx/1.0/OmxStore.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferProducer.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WGraphicBufferProducer.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferSource.h
similarity index 98%
rename from media/libstagefright/omx/1.0/WGraphicBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferSource.h
index 4549c97..b9f22ab 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferSource.h
@@ -28,7 +28,7 @@
#include <android/BnGraphicBufferSource.h>
-#include "../GraphicBufferSource.h"
+#include <media/stagefright/omx/GraphicBufferSource.h>
namespace android {
namespace hardware {
diff --git a/media/libstagefright/omx/1.0/WOmxBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxBufferSource.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WOmxBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxBufferSource.h
diff --git a/media/libstagefright/omx/1.0/WOmxNode.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxNode.h
similarity index 98%
rename from media/libstagefright/omx/1.0/WOmxNode.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxNode.h
index d715374..38d5885 100644
--- a/media/libstagefright/omx/1.0/WOmxNode.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxNode.h
@@ -22,7 +22,7 @@
#include <utils/Errors.h>
-#include "../../include/OMXNodeInstance.h"
+#include <media/stagefright/omx/OMXNodeInstance.h>
#include <android/hardware/media/omx/1.0/IOmxNode.h>
#include <android/hardware/media/omx/1.0/IOmxObserver.h>
diff --git a/media/libstagefright/omx/1.0/WOmxObserver.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxObserver.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WOmxObserver.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WOmxObserver.h
diff --git a/media/libstagefright/omx/1.0/WProducerListener.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WProducerListener.h
similarity index 100%
rename from media/libstagefright/omx/1.0/WProducerListener.h
rename to media/libstagefright/omx/include/media/stagefright/omx/1.0/WProducerListener.h
diff --git a/media/libstagefright/omx/BWGraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/BWGraphicBufferSource.h
similarity index 100%
rename from media/libstagefright/omx/BWGraphicBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/BWGraphicBufferSource.h
diff --git a/media/libstagefright/omx/FrameDropper.h b/media/libstagefright/omx/include/media/stagefright/omx/FrameDropper.h
similarity index 100%
rename from media/libstagefright/omx/FrameDropper.h
rename to media/libstagefright/omx/include/media/stagefright/omx/FrameDropper.h
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/GraphicBufferSource.h
similarity index 100%
rename from media/libstagefright/omx/GraphicBufferSource.h
rename to media/libstagefright/omx/include/media/stagefright/omx/GraphicBufferSource.h
diff --git a/media/libstagefright/omx/IOmxNodeWrapper.h b/media/libstagefright/omx/include/media/stagefright/omx/IOmxNodeWrapper.h
similarity index 100%
rename from media/libstagefright/omx/IOmxNodeWrapper.h
rename to media/libstagefright/omx/include/media/stagefright/omx/IOmxNodeWrapper.h
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/omx/include/media/stagefright/omx/OMX.h
similarity index 100%
rename from media/libstagefright/include/OMX.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMX.h
diff --git a/media/libstagefright/omx/OMXMaster.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
similarity index 100%
rename from media/libstagefright/omx/OMXMaster.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
similarity index 100%
rename from media/libstagefright/include/OMXNodeInstance.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
diff --git a/media/libstagefright/omx/OMXUtils.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXUtils.h
similarity index 100%
rename from media/libstagefright/omx/OMXUtils.h
rename to media/libstagefright/omx/include/media/stagefright/omx/OMXUtils.h
diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SimpleSoftOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
diff --git a/media/libstagefright/include/SoftOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SoftOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftOMXComponent.h
diff --git a/media/libstagefright/omx/SoftOMXPlugin.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftOMXPlugin.h
similarity index 100%
rename from media/libstagefright/omx/SoftOMXPlugin.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftOMXPlugin.h
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SoftVideoDecoderOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoEncoderOMXComponent.h
similarity index 100%
rename from media/libstagefright/include/SoftVideoEncoderOMXComponent.h
rename to media/libstagefright/omx/include/media/stagefright/omx/SoftVideoEncoderOMXComponent.h
diff --git a/media/libstagefright/omx/tests/FrameDropper_test.cpp b/media/libstagefright/omx/tests/FrameDropper_test.cpp
index f966b5e..a925da6 100644
--- a/media/libstagefright/omx/tests/FrameDropper_test.cpp
+++ b/media/libstagefright/omx/tests/FrameDropper_test.cpp
@@ -20,7 +20,7 @@
#include <gtest/gtest.h>
-#include "FrameDropper.h"
+#include <media/stagefright/omx/FrameDropper.h>
#include <media/stagefright/foundation/ADebug.h>
namespace android {
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 5d1a20b..a450dd3 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -44,7 +44,11 @@
const char* AImageReader::kGraphicBufferKey = "GraphicBuffer";
bool
-AImageReader::isSupportedFormat(int32_t format) {
+AImageReader::isSupportedFormatAndUsage(int32_t format, uint64_t usage) {
+ // Check whether usage has either CPU_READ_OFTEN or CPU_READ set. Note that check against
+ // AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN (0x6) is sufficient as it implies
+ // AHARDWAREBUFFER_USAGE_CPU_READ (0x2).
+ bool hasCpuUsage = usage & AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN;
switch (format) {
case AIMAGE_FORMAT_RGBA_8888:
case AIMAGE_FORMAT_RGBX_8888:
@@ -60,6 +64,9 @@
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
return true;
+ case AIMAGE_FORMAT_PRIVATE:
+ // For private format, cpu usage is prohibited.
+ return !hasCpuUsage;
default:
return false;
}
@@ -83,6 +90,8 @@
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
return 1;
+ case AIMAGE_FORMAT_PRIVATE:
+ return 0;
default:
return -1;
}
@@ -606,9 +615,9 @@
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- if (!AImageReader::isSupportedFormat(format)) {
- ALOGE("%s: format %d is not supported by AImageReader",
- __FUNCTION__, format);
+ if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
+ ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
+ __FUNCTION__, format, usage);
return AMEDIA_ERROR_INVALID_PARAMETER;
}
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 989c1fd..989b937 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -49,7 +49,7 @@
struct AImageReader : public RefBase {
public:
- static bool isSupportedFormat(int32_t format);
+ static bool isSupportedFormatAndUsage(int32_t format, uint64_t usage0);
static int getNumPlanesForFormat(int32_t format);
AImageReader(int32_t width,
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index d7443be..1931496 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -495,7 +495,13 @@
/**
* Android private opaque image format.
*
- * <p>This format is not currently supported by {@link AImageReader}.</p>
+ * <p>The choices of the actual format and pixel data layout are entirely up to the
+ * device-specific and framework internal implementations, and may vary depending on use cases
+ * even for the same device. Also note that the contents of these buffers are not directly
+ * accessible to the application.</p>
+ *
+ * <p>When an {@link AImage} of this format is obtained from an {@link AImageReader} or
+ * {@link AImage_getNumberOfPlanes()} method will return zero.</p>
*/
AIMAGE_FORMAT_PRIVATE = 0x22
};
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 59ae507..7a0c17b 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -70,7 +70,9 @@
* @param height The default height in pixels of the Images that this reader will produce.
* @param format The format of the Image that this reader will produce. This must be one of the
* AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}. Note that not all
- * formats are supported, like {@link AIMAGE_FORMAT_PRIVATE}.
+ * formats are supported. One example is {@link AIMAGE_FORMAT_PRIVATE}, as it is not
+ * intended to be read by applications directly. That format is supported by
+ * {@link AImageReader_newWithUsage} introduced in API 26.
* @param maxImages The maximum number of images the user will want to access simultaneously. This
* should be as small as possible to limit memory use. Once maxImages Images are obtained
* by the user, one of them has to be released before a new {@link AImage} will become
@@ -307,6 +309,28 @@
* for the consumer usage. All other parameters and the return values are identical to those passed
* to {@line AImageReader_new}.
*
+ * <p>If the {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, the created {@link AImageReader}
+ * will produce images whose contents are not directly accessible by the application. The application can
+ * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
+ * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
+ * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
+ * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
+ * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
+ * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
+ * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * transporting textures that may be shared cross-process.</p>
+ * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
+ * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
+ * AImageReader}s using other format such as {@link AIMAGE_FORMAT_YUV_420_888}.</p>
+ *
+ * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
+ * especially if {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, {@code usage} must not include either
+ * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param format The format of the Image that this reader will produce. This must be one of the
+ * AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}.
* @param usage specifies how the consumer will access the AImage, using combination of the
* AHARDWAREBUFFER_USAGE flags described in {@link hardware_buffer.h}.
* Passing {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN} is equivalent to calling
@@ -331,6 +355,11 @@
* {@link AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE}, or combined</td>
* </tr>
* </table>
+ * @return <ul>
+ * <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ * <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ * height, format, maxImages, or usage arguments is not supported.</li>
+ * <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
*
* @see AImage
* @see AImageReader_new
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 8efcce6..459e4fb 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7877,17 +7877,34 @@
{
AudioParameter param = AudioParameter(keyValuePair);
int value;
+ bool sendToHal = true;
if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) {
+ audio_devices_t device = (audio_devices_t)value;
// forward device change to effects that have requested to be
// aware of attached audio device.
- if (value != AUDIO_DEVICE_NONE) {
- mOutDevice = value;
+ if (device != AUDIO_DEVICE_NONE) {
for (size_t i = 0; i < mEffectChains.size(); i++) {
- mEffectChains[i]->setDevice_l(mOutDevice);
+ mEffectChains[i]->setDevice_l(device);
}
}
+ if (audio_is_output_devices(device)) {
+ mOutDevice = device;
+ if (!isOutput()) {
+ sendToHal = false;
+ }
+ } else {
+ mInDevice = device;
+ if (device != AUDIO_DEVICE_NONE) {
+ mPrevInDevice = value;
+ }
+ // TODO: implement and call checkBtNrec_l();
+ }
}
- status = mHalStream->setParameters(keyValuePair);
+ if (sendToHal) {
+ status = mHalStream->setParameters(keyValuePair);
+ } else {
+ status = NO_ERROR;
+ }
return false;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 6421695..69b1d7d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2350,7 +2350,9 @@
mErrorCause = errorCause;
- mRequestThread->setPaused(true);
+ if (mRequestThread != nullptr) {
+ mRequestThread->setPaused(true);
+ }
internalUpdateStatusLocked(STATUS_ERROR);
// Notify upstream about a device error
diff --git a/services/mediacodec/MediaCodecService.h b/services/mediacodec/MediaCodecService.h
index d64debb..0d2c9d8 100644
--- a/services/mediacodec/MediaCodecService.h
+++ b/services/mediacodec/MediaCodecService.h
@@ -19,7 +19,7 @@
#include <binder/BinderService.h>
#include <media/IMediaCodecService.h>
-#include <include/OMX.h>
+#include <media/stagefright/omx/OMX.h>
namespace android {
diff --git a/services/mediacodec/main_codecservice.cpp b/services/mediacodec/main_codecservice.cpp
index c59944a..79d6da5 100644
--- a/services/mediacodec/main_codecservice.cpp
+++ b/services/mediacodec/main_codecservice.cpp
@@ -32,8 +32,8 @@
#include "minijail.h"
#include <hidl/HidlTransportSupport.h>
-#include <omx/1.0/Omx.h>
-#include <omx/1.0/OmxStore.h>
+#include <media/stagefright/omx/1.0/Omx.h>
+#include <media/stagefright/omx/1.0/OmxStore.h>
using namespace android;
diff --git a/services/mediacodec/seccomp_policy/mediacodec-arm.policy b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
index 52658d1..cbd7fb9 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
@@ -12,6 +12,7 @@
dup: 1
ppoll: 1
mmap2: 1
+getrandom: 1
# mremap: Ensure |flags| are (MREMAP_MAYMOVE | MREMAP_FIXED) TODO: Once minijail
# parser support for '<' is in this needs to be modified to also prevent
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 8222734..952aa82 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -88,11 +88,11 @@
}
partIndex++;
}
- fifo->getFifoControllerBase()->advanceReadIndex(mFramesPerBurst - framesLeft);
- if (framesLeft > 0) {
- //ALOGW("AAudioMixer::mix() UNDERFLOW by %d / %d frames ----- UNDERFLOW !!!!!!!!!!",
- // framesLeft, mFramesPerBurst);
- }
+ // Always advance by one burst even if we do not have the data.
+ // Otherwise the stream timing will drift whenever there is an underflow.
+ // This actual underflow can then be detected by the client for XRun counting.
+ fifo->getFifoControllerBase()->advanceReadIndex(mFramesPerBurst);
+
#if AAUDIO_MIXER_ATRACE_ENABLED
ATRACE_END();
#endif /* AAUDIO_MIXER_ATRACE_ENABLED */
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 0f863fe..6c345cd 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -177,7 +177,10 @@
configuration.getSamplesPerFrame() != mStreamInternal->getSamplesPerFrame()) {
return false;
}
-
return true;
}
+
+aaudio_result_t AAudioServiceEndpoint::getTimestamp(int64_t *positionFrames, int64_t *timeNanos) {
+ return mStreamInternal->getTimestamp(CLOCK_MONOTONIC, positionFrames, timeNanos);
+}
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index e40a670..603d497 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -69,6 +69,8 @@
mReferenceCount = count;
}
+ aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos);
+
bool matches(const AAudioStreamConfiguration& configuration);
virtual AudioStreamInternal *getStreamInternal() = 0;
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 6a37330..6504cc1 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -62,6 +62,9 @@
// result might be a frame count
while (mCallbackEnabled.load() && getStreamInternal()->isActive() && (result >= 0)) {
+
+ int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
+
// Read audio data from stream using a blocking read.
result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
if (result == AAUDIO_ERROR_DISCONNECTED) {
@@ -74,18 +77,32 @@
}
// Distribute data to each active stream.
- { // use lock guard
+ { // brackets are for lock_guard
+
std::lock_guard <std::mutex> lock(mLockStreams);
- for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
- if (sharedStream->isRunning()) {
- FifoBuffer *fifo = sharedStream->getDataFifoBuffer();
+ for (sp<AAudioServiceStreamShared> clientStream : mRegisteredStreams) {
+ if (clientStream->isRunning()) {
+ FifoBuffer *fifo = clientStream->getDataFifoBuffer();
+
+ // Determine offset between framePosition in client's stream vs the underlying
+ // MMAP stream.
+ int64_t clientFramesWritten = fifo->getWriteCounter();
+ // There are two indices that refer to the same frame.
+ int64_t positionOffset = mmapFramesRead - clientFramesWritten;
+ clientStream->setTimestampPositionOffset(positionOffset);
+
if (fifo->getFifoControllerBase()->getEmptyFramesAvailable() <
getFramesPerBurst()) {
underflowCount++;
} else {
fifo->write(mDistributionBuffer, getFramesPerBurst());
}
- sharedStream->markTransferTime(AudioClock::getNanoseconds());
+
+ // This timestamp represents the completion of data being written into the
+ // client buffer. It is sent to the client and used in the timing model
+ // to decide when data will be available to read.
+ Timestamp timestamp(fifo->getWriteCounter(), AudioClock::getNanoseconds());
+ clientStream->markTransferTime(timestamp);
}
}
}
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index b83b918..20cc5b8 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -73,17 +73,31 @@
while (mCallbackEnabled.load() && getStreamInternal()->isActive() && (result >= 0)) {
// Mix data from each active stream.
mMixer.clear();
- { // use lock guard
+ { // brackets are for lock_guard
int index = 0;
+ int64_t mmapFramesWritten = getStreamInternal()->getFramesWritten();
+
std::lock_guard <std::mutex> lock(mLockStreams);
- for (sp<AAudioServiceStreamShared> sharedStream : mRegisteredStreams) {
- if (sharedStream->isRunning()) {
- FifoBuffer *fifo = sharedStream->getDataFifoBuffer();
+ for (sp<AAudioServiceStreamShared> clientStream : mRegisteredStreams) {
+ if (clientStream->isRunning()) {
+ FifoBuffer *fifo = clientStream->getDataFifoBuffer();
+ // Determine offset between framePosition in client's stream vs the underlying
+ // MMAP stream.
+ int64_t clientFramesRead = fifo->getReadCounter();
+ // These two indices refer to the same frame.
+ int64_t positionOffset = mmapFramesWritten - clientFramesRead;
+ clientStream->setTimestampPositionOffset(positionOffset);
+
float volume = 1.0; // to match legacy volume
bool underflowed = mMixer.mix(index, fifo, volume);
underflowCount += underflowed ? 1 : 0;
// TODO log underflows in each stream
- sharedStream->markTransferTime(AudioClock::getNanoseconds());
+
+ // This timestamp represents the completion of data being read out of the
+ // client buffer. It is sent to the client and used in the timing model
+ // to decide when the client has room to write more data.
+ Timestamp timestamp(fifo->getReadCounter(), AudioClock::getNanoseconds());
+ clientStream->markTransferTime(timestamp);
}
index++;
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 5f7d179..3fc5957 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -37,7 +37,8 @@
AAudioServiceStreamBase::AAudioServiceStreamBase()
: mUpMessageQueue(nullptr)
- , mAAudioThread() {
+ , mAAudioThread()
+ , mAtomicTimestamp() {
mMmapClient.clientUid = -1;
mMmapClient.clientPid = -1;
mMmapClient.packageName = String16("");
@@ -211,15 +212,25 @@
aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp() {
AAudioServiceMessage command;
+ // Send a timestamp for the clock model.
aaudio_result_t result = getFreeRunningPosition(&command.timestamp.position,
&command.timestamp.timestamp);
if (result == AAUDIO_OK) {
- // ALOGD("sendCurrentTimestamp(): position = %lld, nanos = %lld",
- // (long long) command.timestamp.position,
- // (long long) command.timestamp.timestamp);
- command.what = AAudioServiceMessage::code::TIMESTAMP;
+ command.what = AAudioServiceMessage::code::TIMESTAMP_SERVICE;
result = writeUpMessageQueue(&command);
- } else if (result == AAUDIO_ERROR_UNAVAILABLE) {
+
+ if (result == AAUDIO_OK) {
+ // Send a hardware timestamp for presentation time.
+ result = getHardwareTimestamp(&command.timestamp.position,
+ &command.timestamp.timestamp);
+ if (result == AAUDIO_OK) {
+ command.what = AAudioServiceMessage::code::TIMESTAMP_HARDWARE;
+ result = writeUpMessageQueue(&command);
+ }
+ }
+ }
+
+ if (result == AAUDIO_ERROR_UNAVAILABLE) {
result = AAUDIO_OK; // just not available yet, try again later
}
return result;
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index cebefec..e91ea82 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -20,6 +20,7 @@
#include <assert.h>
#include <mutex>
+#include <media/AudioClient.h>
#include <utils/RefBase.h>
#include "fifo/FifoBuffer.h"
@@ -27,7 +28,7 @@
#include "binding/AudioEndpointParcelable.h"
#include "binding/AAudioServiceMessage.h"
#include "utility/AAudioUtilities.h"
-#include <media/AudioClient.h>
+#include "utility/AudioClock.h"
#include "SharedRingBuffer.h"
#include "AAudioThread.h"
@@ -170,6 +171,8 @@
*/
virtual aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) = 0;
+ virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) = 0;
+
virtual aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable) = 0;
aaudio_stream_state_t mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
@@ -191,6 +194,8 @@
android::AudioClient mMmapClient;
audio_port_handle_t mClientHandle = AUDIO_PORT_HANDLE_NONE;
+ SimpleDoubleBuffer<Timestamp> mAtomicTimestamp;
+
private:
aaudio_handle_t mHandle = -1;
};
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index ff02c0f..970d734 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -37,6 +37,11 @@
#define AAUDIO_BUFFER_CAPACITY_MIN 4 * 512
#define AAUDIO_SAMPLE_RATE_DEFAULT 48000
+// This is an estimate of the time difference between the HW and the MMAP time.
+// TODO Get presentation timestamps from the HAL instead of using these estimates.
+#define OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS (3 * AAUDIO_NANOS_PER_MILLISECOND)
+#define INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS (-1 * AAUDIO_NANOS_PER_MILLISECOND)
+
/**
* Service Stream that uses an MMAP buffer.
*/
@@ -113,10 +118,14 @@
config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
? AUDIO_CHANNEL_OUT_STEREO
: audio_channel_out_mask_from_count(aaudioSamplesPerFrame);
+ mHardwareTimeOffsetNanos = OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at DAC later
+
} else if (direction == AAUDIO_DIRECTION_INPUT) {
config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
? AUDIO_CHANNEL_IN_STEREO
: audio_channel_in_mask_from_count(aaudioSamplesPerFrame);
+ mHardwareTimeOffsetNanos = INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at ADC earlier
+
} else {
ALOGE("openMmapStream - invalid direction = %d", direction);
return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
@@ -289,6 +298,7 @@
return AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
}
+// Get free-running DSP or DMA hardware position from the HAL.
aaudio_result_t AAudioServiceStreamMMAP::getFreeRunningPosition(int64_t *positionFrames,
int64_t *timeNanos) {
struct audio_mmap_position position;
@@ -305,12 +315,29 @@
disconnect();
} else {
mFramesRead.update32(position.position_frames);
- *positionFrames = mFramesRead.get();
- *timeNanos = position.time_nanoseconds;
+
+ Timestamp timestamp(mFramesRead.get(), position.time_nanoseconds);
+ mAtomicTimestamp.write(timestamp);
+ *positionFrames = timestamp.getPosition();
+ *timeNanos = timestamp.getNanoseconds();
}
return result;
}
+// Get timestamp that was written by getFreeRunningPosition()
+aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) {
+ // TODO Get presentation timestamp from the HAL
+ if (mAtomicTimestamp.isValid()) {
+ Timestamp timestamp = mAtomicTimestamp.read();
+ *positionFrames = timestamp.getPosition();
+ *timeNanos = timestamp.getNanoseconds() + mHardwareTimeOffsetNanos;
+ return AAUDIO_OK;
+ } else {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+}
+
void AAudioServiceStreamMMAP::onTearDown() {
ALOGD("AAudioServiceStreamMMAP::onTearDown() called");
disconnect();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 533e5a8..e6f8fad 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -100,6 +100,8 @@
aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable) override;
aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+ virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) override;
private:
// This proxy class was needed to prevent a crash in AudioFlinger
@@ -132,6 +134,7 @@
MonotonicCounter mFramesRead;
int32_t mPreviousFrameCounter = 0; // from HAL
int mAudioDataFileDescriptor = -1;
+ int64_t mHardwareTimeOffsetNanos = 0; // TODO get from HAL
// Interface to the AudioFlinger MMAP support.
android::sp<android::MmapStreamInterface> mMmapStream;
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 5654113..07c4faf 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -41,6 +41,7 @@
AAudioServiceStreamShared::AAudioServiceStreamShared(AAudioService &audioService)
: mAudioService(audioService)
+ , mTimestampPositionOffset(0)
{
}
@@ -307,15 +308,30 @@
return AAUDIO_OK;
}
-void AAudioServiceStreamShared::markTransferTime(int64_t nanoseconds) {
- mMarkedPosition = mAudioDataQueue->getFifoBuffer()->getReadCounter();
- mMarkedTime = nanoseconds;
+void AAudioServiceStreamShared::markTransferTime(Timestamp ×tamp) {
+ mAtomicTimestamp.write(timestamp);
}
+// Get timestamp that was written by the real-time service thread, eg. mixer.
aaudio_result_t AAudioServiceStreamShared::getFreeRunningPosition(int64_t *positionFrames,
int64_t *timeNanos) {
- // TODO get these two numbers as an atomic pair
- *positionFrames = mMarkedPosition;
- *timeNanos = mMarkedTime;
- return AAUDIO_OK;
+ if (mAtomicTimestamp.isValid()) {
+ Timestamp timestamp = mAtomicTimestamp.read();
+ *positionFrames = timestamp.getPosition();
+ *timeNanos = timestamp.getNanoseconds();
+ return AAUDIO_OK;
+ } else {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+}
+
+// Get timestamp from lower level service.
+aaudio_result_t AAudioServiceStreamShared::getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) {
+
+ aaudio_result_t result = mServiceEndpoint->getTimestamp(positionFrames, timeNanos);
+ if (result == AAUDIO_OK) {
+ *positionFrames -= mTimestampPositionOffset.load(); // Offset from shared MMAP stream
+ }
+ return result;
}
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 6b67337..8caccda 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -85,7 +85,11 @@
/* Keep a record of when a buffer transfer completed.
* This allows for a more accurate timing model.
*/
- void markTransferTime(int64_t nanoseconds);
+ void markTransferTime(Timestamp ×tamp);
+
+ void setTimestampPositionOffset(int64_t deltaFrames) {
+ mTimestampPositionOffset.store(deltaFrames);
+ }
protected:
@@ -93,6 +97,9 @@
aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+ virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames,
+ int64_t *timeNanos) override;
+
/**
* @param requestedCapacityFrames
* @param framesPerBurst
@@ -106,8 +113,7 @@
AAudioServiceEndpoint *mServiceEndpoint = nullptr;
SharedRingBuffer *mAudioDataQueue = nullptr;
- int64_t mMarkedPosition = 0;
- int64_t mMarkedTime = 0;
+ std::atomic<int64_t> mTimestampPositionOffset;
};
} /* namespace aaudio */