Merge "Add parameters validation on AAudio MMap server" into oc-dr1-dev
diff --git a/camera/Android.bp b/camera/Android.bp
index 849f560..c76ae50 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -31,9 +31,11 @@
// include libcamera_client, at the path "aidl/package/path/BnFoo.h"
"aidl/android/hardware/ICameraService.aidl",
"aidl/android/hardware/ICameraServiceListener.aidl",
+ "aidl/android/hardware/ICameraServiceProxy.aidl",
"aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
"aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
+
// Source for camera interface parcelables, and manually-written interfaces
"Camera.cpp",
"CameraMetadata.cpp",
@@ -42,7 +44,6 @@
"CameraParameters2.cpp",
"ICamera.cpp",
"ICameraClient.cpp",
- "ICameraServiceProxy.cpp",
"ICameraRecordingProxy.cpp",
"ICameraRecordingProxyListener.cpp",
"camera2/CaptureRequest.cpp",
diff --git a/camera/ICameraServiceProxy.cpp b/camera/ICameraServiceProxy.cpp
deleted file mode 100644
index a9d0836..0000000
--- a/camera/ICameraServiceProxy.cpp
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "BpCameraServiceProxy"
-
-#include <stdint.h>
-
-#include <binder/Parcel.h>
-
-#include <camera/ICameraServiceProxy.h>
-
-namespace android {
-
-class BpCameraServiceProxy: public BpInterface<ICameraServiceProxy> {
-public:
- explicit BpCameraServiceProxy(const sp<IBinder>& impl)
- : BpInterface<ICameraServiceProxy>(impl) {}
-
- virtual void pingForUserUpdate() {
- Parcel data;
- data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
- remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, nullptr,
- IBinder::FLAG_ONEWAY);
- }
-
- virtual void notifyCameraState(String16 cameraId, CameraState newCameraState) {
- Parcel data;
- data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
- data.writeString16(cameraId);
- data.writeInt32(newCameraState);
- remote()->transact(BnCameraServiceProxy::NOTIFY_CAMERA_STATE, data, nullptr,
- IBinder::FLAG_ONEWAY);
- }
-
-};
-
-
-IMPLEMENT_META_INTERFACE(CameraServiceProxy, "android.hardware.ICameraServiceProxy");
-
-status_t BnCameraServiceProxy::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
- uint32_t flags) {
- switch(code) {
- case PING_FOR_USER_UPDATE: {
- CHECK_INTERFACE(ICameraServiceProxy, data, reply);
- pingForUserUpdate();
- return NO_ERROR;
- } break;
- case NOTIFY_CAMERA_STATE: {
- CHECK_INTERFACE(ICameraServiceProxy, data, reply);
- String16 cameraId = data.readString16();
- CameraState newCameraState =
- static_cast<CameraState>(data.readInt32());
- notifyCameraState(cameraId, newCameraState);
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-}; // namespace android
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 0e654d5..5dc23eb 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -31,7 +31,23 @@
oneway void pingForUserUpdate();
/**
- * Update the status of a camera device
+ * Values for notifyCameraState newCameraState
*/
- oneway void notifyCameraState(String cameraId, int newCameraState);
+ const int CAMERA_STATE_OPEN = 0;
+ const int CAMERA_STATE_ACTIVE = 1;
+ const int CAMERA_STATE_IDLE = 2;
+ const int CAMERA_STATE_CLOSED = 3;
+
+ /**
+ * Values for notifyCameraState facing
+ */
+ const int CAMERA_FACING_BACK = 0;
+ const int CAMERA_FACING_FRONT = 1;
+ const int CAMERA_FACING_EXTERNAL = 2;
+
+ /**
+ * Update the status of a camera device.
+ */
+ oneway void notifyCameraState(String cameraId, int facing, int newCameraState,
+ String clientName);
}
diff --git a/camera/include/camera/ICameraServiceProxy.h b/camera/include/camera/ICameraServiceProxy.h
deleted file mode 100644
index 2613c01..0000000
--- a/camera/include/camera/ICameraServiceProxy.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
-#define ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-
-namespace android {
-
-/**
- * Interface from native camera service to managed-side camera service proxy.
- *
- * Keep in sync with frameworks/base/core/java/android/hardware/ICameraServiceProxy.aidl
- *
- */
-class ICameraServiceProxy : public IInterface {
-public:
- enum {
- PING_FOR_USER_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
- NOTIFY_CAMERA_STATE
- };
-
- enum CameraState {
- CAMERA_STATE_OPEN,
- CAMERA_STATE_ACTIVE,
- CAMERA_STATE_IDLE,
- CAMERA_STATE_CLOSED
- };
-
- DECLARE_META_INTERFACE(CameraServiceProxy);
-
- virtual void pingForUserUpdate() = 0;
- virtual void notifyCameraState(String16 cameraId, CameraState newCameraState) = 0;
-};
-
-class BnCameraServiceProxy: public BnInterface<ICameraServiceProxy>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-
-
-}; // namespace android
-
-#endif // ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 45a3beb..57d45cd 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -17,6 +17,7 @@
// Play an impulse and then record it.
// Measure the round trip latency.
+#include <algorithm>
#include <assert.h>
#include <cctype>
#include <math.h>
@@ -25,11 +26,13 @@
#include <unistd.h>
#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
-#define INPUT_PEAK_THRESHOLD 0.1f
-#define SILENCE_FRAMES 10000
+// Tag for machine readable results as property = value pairs
+#define RESULT_TAG "RESULT: "
#define SAMPLE_RATE 48000
-#define NUM_SECONDS 7
+#define NUM_SECONDS 5
+#define NUM_INPUT_CHANNELS 1
#define FILENAME "/data/oboe_input.raw"
#define NANOS_PER_MICROSECOND ((int64_t)1000)
@@ -37,12 +40,172 @@
#define MILLIS_PER_SECOND 1000
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * MILLIS_PER_SECOND)
-class AudioRecorder
+#define MAX_ZEROTH_PARTIAL_BINS 40
+
+static const float s_Impulse[] = {
+ 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, // silence on each side of the impulse
+ 0.5f, 0.9f, 0.0f, -0.9f, -0.5f, // bipolar
+ 0.0f, 0.0f, 0.0f, 0.0f, 0.0f};
+
+
+static double calculateCorrelation(const float *a,
+ const float *b,
+ int windowSize)
+{
+ double correlation = 0.0;
+ double sumProducts = 0.0;
+ double sumSquares = 0.0;
+
+ // Correlate a against b.
+ for (int i = 0; i < windowSize; i++) {
+ float s1 = a[i];
+ float s2 = b[i];
+ // Use a normalized cross-correlation.
+ sumProducts += s1 * s2;
+ sumSquares += ((s1 * s1) + (s2 * s2));
+ }
+
+ if (sumSquares >= 0.00000001) {
+ correlation = (float) (2.0 * sumProducts / sumSquares);
+ }
+ return correlation;
+}
+
+static int calculateCorrelations(const float *haystack, int haystackSize,
+ const float *needle, int needleSize,
+ float *results, int resultSize)
+{
+ int ic;
+ int maxCorrelations = haystackSize - needleSize;
+ int numCorrelations = std::min(maxCorrelations, resultSize);
+
+ for (ic = 0; ic < numCorrelations; ic++) {
+ double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
+ results[ic] = correlation;
+ }
+
+ return numCorrelations;
+}
+
+/*==========================================================================================*/
+/**
+ * Scan until we get a correlation of a single scan that goes over the tolerance level,
+ * peaks then drops back down.
+ */
+static double findFirstMatch(const float *haystack, int haystackSize,
+ const float *needle, int needleSize, double threshold )
+{
+ int ic;
+ // How many correlations can we calculate?
+ int numCorrelations = haystackSize - needleSize;
+ double maxCorrelation = 0.0;
+ int peakIndex = -1;
+ double location = -1.0;
+
+ for (ic = 0; ic < numCorrelations; ic++) {
+ double correlation = calculateCorrelation(&haystack[ic], needle, needleSize);
+
+ if( (correlation > maxCorrelation) ) {
+ maxCorrelation = correlation;
+ peakIndex = ic;
+ }
+
+ //printf("PaQa_FindFirstMatch: ic = %4d, correlation = %8f, maxSum = %8f\n",
+ // ic, correlation, maxSum );
+ // Are we past what we were looking for?
+ if((maxCorrelation > threshold) && (correlation < 0.5 * maxCorrelation)) {
+ location = peakIndex;
+ break;
+ }
+ }
+
+ return location;
+}
+
+typedef struct LatencyReport_s {
+ double latencyInFrames;
+ double confidence;
+} LatencyReport;
+
+// Apply a technique similar to Harmonic Product Spectrum Analysis to find echo fundamental.
+// Using first echo instead of the original impulse for a better match.
+int measureLatencyFromEchos(const float *haystack, int haystackSize,
+ const float *needle, int needleSize,
+ LatencyReport *report) {
+ double threshold = 0.1;
+
+ // Find first peak
+ int first = (int) (findFirstMatch(haystack,
+ haystackSize,
+ needle,
+ needleSize,
+ threshold) + 0.5);
+
+ // Use first echo as the needle for the other echos because
+ // it will be more similar.
+ needle = &haystack[first];
+ int again = (int) (findFirstMatch(haystack,
+ haystackSize,
+ needle,
+ needleSize,
+ threshold) + 0.5);
+
+ printf("first = %d, again at %d\n", first, again);
+ first = again;
+
+ // Allocate results array
+ int remaining = haystackSize - first;
+ int generous = 48000 * 2;
+ int numCorrelations = std::min(remaining, generous);
+ float *correlations = new float[numCorrelations];
+ float *harmonicSums = new float[numCorrelations](); // cleared to zero
+
+ // Generate correlation for every position.
+ numCorrelations = calculateCorrelations(&haystack[first], remaining,
+ needle, needleSize,
+ correlations, numCorrelations);
+
+ // Add higher harmonics mapped onto lower harmonics.
+ // This reinforces the "fundamental" echo.
+ const int numEchoes = 10;
+ for (int partial = 1; partial < numEchoes; partial++) {
+ for (int i = 0; i < numCorrelations; i++) {
+ harmonicSums[i / partial] += correlations[i] / partial;
+ }
+ }
+
+ // Find highest peak in correlation array.
+ float maxCorrelation = 0.0;
+ float sumOfPeaks = 0.0;
+ int peakIndex = 0;
+ const int skip = MAX_ZEROTH_PARTIAL_BINS; // skip low bins
+ for (int i = skip; i < numCorrelations; i++) {
+ if (harmonicSums[i] > maxCorrelation) {
+ maxCorrelation = harmonicSums[i];
+ sumOfPeaks += maxCorrelation;
+ peakIndex = i;
+ printf("maxCorrelation = %f at %d\n", maxCorrelation, peakIndex);
+ }
+ }
+
+ report->latencyInFrames = peakIndex;
+ if (sumOfPeaks < 0.0001) {
+ report->confidence = 0.0;
+ } else {
+ report->confidence = maxCorrelation / sumOfPeaks;
+ }
+
+ delete[] correlations;
+ delete[] harmonicSums;
+ return 0;
+}
+
+class AudioRecording
{
public:
- AudioRecorder() {
+ AudioRecording() {
}
- ~AudioRecorder() {
+ ~AudioRecording() {
delete[] mData;
}
@@ -52,7 +215,8 @@
mMaxFrames = maxFrames;
}
- void record(int16_t *inputData, int inputChannelCount, int numFrames) {
+ // Write SHORT data from the first channel.
+ int write(int16_t *inputData, int inputChannelCount, int numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
@@ -60,9 +224,11 @@
for (int i = 0; i < numFrames; i++) {
mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
}
+ return numFrames;
}
- void record(float *inputData, int inputChannelCount, int numFrames) {
+ // Write FLOAT data from the first channel.
+ int write(float *inputData, int inputChannelCount, int numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
@@ -70,162 +236,253 @@
for (int i = 0; i < numFrames; i++) {
mData[mFrameCounter++] = inputData[i * inputChannelCount];
}
+ return numFrames;
}
- int save(const char *fileName) {
+ int size() {
+ return mFrameCounter;
+ }
+
+ float *getData() {
+ return mData;
+ }
+
+ int save(const char *fileName, bool writeShorts = true) {
+ int written = 0;
+ const int chunkSize = 64;
FILE *fid = fopen(fileName, "wb");
if (fid == NULL) {
- return errno;
+ return -errno;
}
- int written = fwrite(mData, sizeof(float), mFrameCounter, fid);
+
+ if (writeShorts) {
+ int16_t buffer[chunkSize];
+ int32_t framesLeft = mFrameCounter;
+ int32_t cursor = 0;
+ while (framesLeft) {
+ int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
+ for (int i = 0; i < framesToWrite; i++) {
+ buffer[i] = (int16_t) (mData[cursor++] * 32767);
+ }
+ written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
+ framesLeft -= framesToWrite;
+ }
+ } else {
+ written = fwrite(mData, sizeof(float), mFrameCounter, fid);
+ }
fclose(fid);
return written;
}
private:
- float *mData = NULL;
+ float *mData = nullptr;
int32_t mFrameCounter = 0;
int32_t mMaxFrames = 0;
};
// ====================================================================================
-// ========================= Loopback Processor =======================================
-// ====================================================================================
class LoopbackProcessor {
public:
+ virtual ~LoopbackProcessor() = default;
- // Calculate mean and standard deviation.
- double calculateAverageLatency(double *deviation) {
- if (mLatencyCount <= 0) {
- return -1.0;
- }
- double sum = 0.0;
- for (int i = 0; i < mLatencyCount; i++) {
- sum += mLatencyArray[i];
- }
- double average = sum / mLatencyCount;
- sum = 0.0;
- for (int i = 0; i < mLatencyCount; i++) {
- double error = average - mLatencyArray[i];
- sum += error * error; // squared
- }
- *deviation = sqrt(sum / mLatencyCount);
- return average;
+ virtual void process(float *inputData, int inputChannelCount,
+ float *outputData, int outputChannelCount,
+ int numFrames) = 0;
+
+
+ virtual void report() = 0;
+
+ void setSampleRate(int32_t sampleRate) {
+ mSampleRate = sampleRate;
}
- float getMaxAmplitude() const { return mMaxAmplitude; }
- int getMeasurementCount() const { return mLatencyCount; }
- float getAverageAmplitude() const { return mAmplitudeTotal / mAmplitudeCount; }
-
- // TODO Convert this to a feedback circuit and then use auto-correlation to measure the period.
- void process(float *inputData, int inputChannelCount,
- float *outputData, int outputChannelCount,
- int numFrames) {
- (void) outputChannelCount;
-
- // Measure peak and average amplitude.
- for (int i = 0; i < numFrames; i++) {
- float sample = inputData[i * inputChannelCount];
- if (sample > mMaxAmplitude) {
- mMaxAmplitude = sample;
- }
- if (sample < 0) {
- sample = 0 - sample;
- }
- mAmplitudeTotal += sample;
- mAmplitudeCount++;
- }
-
- // Clear output.
- memset(outputData, 0, numFrames * outputChannelCount * sizeof(float));
-
- // Wait a while between hearing the pulse and starting a new one.
- if (mState == STATE_SILENT) {
- mCounter += numFrames;
- if (mCounter > SILENCE_FRAMES) {
- //printf("LoopbackProcessor send impulse, burst #%d\n", mBurstCounter);
- // copy impulse
- for (float sample : mImpulse) {
- *outputData = sample;
- outputData += outputChannelCount;
- }
- mState = STATE_LISTENING;
- mCounter = 0;
- }
- }
- // Start listening as soon as we send the impulse.
- if (mState == STATE_LISTENING) {
- for (int i = 0; i < numFrames; i++) {
- float sample = inputData[i * inputChannelCount];
- if (sample >= INPUT_PEAK_THRESHOLD) {
- mLatencyArray[mLatencyCount++] = mCounter;
- if (mLatencyCount >= MAX_LATENCY_VALUES) {
- mState = STATE_DONE;
- } else {
- mState = STATE_SILENT;
- }
- mCounter = 0;
- break;
- } else {
- mCounter++;
- }
- }
- }
+ int32_t getSampleRate() {
+ return mSampleRate;
}
- void echo(float *inputData, int inputChannelCount,
- float *outputData, int outputChannelCount,
- int numFrames) {
- int channelsValid = (inputChannelCount < outputChannelCount)
- ? inputChannelCount : outputChannelCount;
- for (int i = 0; i < numFrames; i++) {
- int ic;
- for (ic = 0; ic < channelsValid; ic++) {
- outputData[ic] = inputData[ic];
- }
- for (ic = 0; ic < outputChannelCount; ic++) {
- outputData[ic] = 0;
- }
- inputData += inputChannelCount;
- outputData += outputChannelCount;
- }
- }
private:
- enum {
- STATE_SILENT,
- STATE_LISTENING,
- STATE_DONE
- };
-
- enum {
- MAX_LATENCY_VALUES = 64
- };
-
- int mState = STATE_SILENT;
- int32_t mCounter = 0;
- int32_t mLatencyArray[MAX_LATENCY_VALUES];
- int32_t mLatencyCount = 0;
- float mMaxAmplitude = 0;
- float mAmplitudeTotal = 0;
- int32_t mAmplitudeCount = 0;
- static const float mImpulse[5];
+ int32_t mSampleRate = SAMPLE_RATE;
};
-const float LoopbackProcessor::mImpulse[5] = {0.5f, 0.9f, 0.0f, -0.9f, -0.5f};
+
+// ====================================================================================
+class EchoAnalyzer : public LoopbackProcessor {
+public:
+
+ EchoAnalyzer() : LoopbackProcessor() {
+ audioRecorder.allocate(NUM_SECONDS * SAMPLE_RATE);
+ }
+
+ void setGain(float gain) {
+ mGain = gain;
+ }
+
+ float getGain() {
+ return mGain;
+ }
+
+ void report() override {
+
+ const float *needle = s_Impulse;
+ int needleSize = (int)(sizeof(s_Impulse) / sizeof(float));
+ float *haystack = audioRecorder.getData();
+ int haystackSize = audioRecorder.size();
+ int result = measureLatencyFromEchos(haystack, haystackSize,
+ needle, needleSize,
+ &latencyReport);
+ if (latencyReport.confidence < 0.01) {
+ printf(" ERROR - confidence too low = %f\n", latencyReport.confidence);
+ } else {
+ double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
+ printf(RESULT_TAG "latency.frames = %8.2f\n", latencyReport.latencyInFrames);
+ printf(RESULT_TAG "latency.msec = %8.2f\n", latencyMillis);
+ printf(RESULT_TAG "latency.confidence = %8.6f\n", latencyReport.confidence);
+ }
+ }
+
+ void process(float *inputData, int inputChannelCount,
+ float *outputData, int outputChannelCount,
+ int numFrames) override {
+ int channelsValid = std::min(inputChannelCount, outputChannelCount);
+
+ audioRecorder.write(inputData, inputChannelCount, numFrames);
+
+ if (mLoopCounter < mLoopStart) {
+ // Output silence at the beginning.
+ for (int i = 0; i < numFrames; i++) {
+ int ic;
+ for (ic = 0; ic < outputChannelCount; ic++) {
+ outputData[ic] = 0;
+ }
+ inputData += inputChannelCount;
+ outputData += outputChannelCount;
+ }
+ } else if (mLoopCounter == mLoopStart) {
+ // Send a bipolar impulse that we can easily detect.
+ for (float sample : s_Impulse) {
+ *outputData = sample;
+ outputData += outputChannelCount;
+ }
+ } else {
+ // Echo input to output.
+ for (int i = 0; i < numFrames; i++) {
+ int ic;
+ for (ic = 0; ic < channelsValid; ic++) {
+ outputData[ic] = inputData[ic] * mGain;
+ }
+ for (; ic < outputChannelCount; ic++) {
+ outputData[ic] = 0;
+ }
+ inputData += inputChannelCount;
+ outputData += outputChannelCount;
+ }
+ }
+
+ mLoopCounter++;
+ }
+
+private:
+ int mLoopCounter = 0;
+ int mLoopStart = 1000;
+ float mGain = 1.0f;
+
+ AudioRecording audioRecorder;
+ LatencyReport latencyReport;
+};
+
+
+// ====================================================================================
+class SineAnalyzer : public LoopbackProcessor {
+public:
+
+ void report() override {
+ double magnitude = calculateMagnitude();
+ printf("sine magnitude = %7.5f\n", magnitude);
+ printf("sine frames = %7d\n", mFrameCounter);
+ printf("sine frequency = %7.1f Hz\n", mFrequency);
+ }
+
+ double calculateMagnitude(double *phasePtr = NULL) {
+ if (mFrameCounter == 0) {
+ return 0.0;
+ }
+ double sinMean = mSinAccumulator / mFrameCounter;
+ double cosMean = mCosAccumulator / mFrameCounter;
+ double magnitude = 2.0 * sqrt( (sinMean * sinMean) + (cosMean * cosMean ));
+ if( phasePtr != NULL )
+ {
+ double phase = atan2( sinMean, cosMean );
+ *phasePtr = phase;
+ }
+ return magnitude;
+ }
+
+ void process(float *inputData, int inputChannelCount,
+ float *outputData, int outputChannelCount,
+ int numFrames) override {
+ double phaseIncrement = 2.0 * M_PI * mFrequency / getSampleRate();
+
+ for (int i = 0; i < numFrames; i++) {
+ // Multiply input by sine/cosine
+ float sample = inputData[i * inputChannelCount];
+ float sinOut = sinf(mPhase);
+ mSinAccumulator += sample * sinOut;
+ mCosAccumulator += sample * cosf(mPhase);
+ // Advance and wrap phase
+ mPhase += phaseIncrement;
+ if (mPhase > (2.0 * M_PI)) {
+ mPhase -= (2.0 * M_PI);
+ }
+
+ // Output sine wave so we can measure it.
+ outputData[i * outputChannelCount] = sinOut;
+ }
+ mFrameCounter += numFrames;
+
+ double magnitude = calculateMagnitude();
+ if (mWaiting) {
+ if (magnitude < 0.001) {
+ // discard silence
+ mFrameCounter = 0;
+ mSinAccumulator = 0.0;
+ mCosAccumulator = 0.0;
+ } else {
+ mWaiting = false;
+ }
+ }
+ };
+
+ void setFrequency(int32_t frequency) {
+ mFrequency = frequency;
+ }
+
+ int32_t getFrequency() {
+ return mFrequency;
+ }
+
+private:
+ double mFrequency = 300.0;
+ double mPhase = 0.0;
+ int32_t mFrameCounter = 0;
+ double mSinAccumulator = 0.0;
+ double mCosAccumulator = 0.0;
+ bool mWaiting = true;
+};
// TODO make this a class that manages its own buffer allocation
struct LoopbackData {
- AAudioStream *inputStream = nullptr;
- int32_t inputFramesMaximum = 0;
- int16_t *inputData = nullptr;
- float *conversionBuffer = nullptr;
- int32_t actualInputChannelCount = 0;
- int32_t actualOutputChannelCount = 0;
- int32_t inputBuffersToDiscard = 10;
+ AAudioStream *inputStream = nullptr;
+ int32_t inputFramesMaximum = 0;
+ int16_t *inputData = nullptr;
+ float *conversionBuffer = nullptr;
+ int32_t actualInputChannelCount = 0;
+ int32_t actualOutputChannelCount = 0;
+ int32_t inputBuffersToDiscard = 10;
- aaudio_result_t inputError;
- LoopbackProcessor loopbackProcessor;
- AudioRecorder audioRecorder;
+ aaudio_result_t inputError;
+ SineAnalyzer sineAnalyzer;
+ EchoAnalyzer echoAnalyzer;
+ LoopbackProcessor *loopbackProcessor;
};
static void convertPcm16ToFloat(const int16_t *source,
@@ -248,6 +505,7 @@
int32_t numFrames
) {
(void) outputStream;
+ aaudio_data_callback_result_t result = AAUDIO_CALLBACK_RESULT_CONTINUE;
LoopbackData *myData = (LoopbackData *) userData;
float *outputData = (float *) audioData;
@@ -266,6 +524,7 @@
numFrames, 0);
if (framesRead < 0) {
myData->inputError = framesRead;
+ result = AAUDIO_CALLBACK_RESULT_STOP;
} else if (framesRead > 0) {
myData->inputBuffersToDiscard--;
}
@@ -275,16 +534,13 @@
numFrames, 0);
if (framesRead < 0) {
myData->inputError = framesRead;
+ result = AAUDIO_CALLBACK_RESULT_STOP;
} else if (framesRead > 0) {
- // Process valid input data.
- myData->audioRecorder.record(myData->inputData,
- myData->actualInputChannelCount,
- framesRead);
int32_t numSamples = framesRead * myData->actualInputChannelCount;
convertPcm16ToFloat(myData->inputData, myData->conversionBuffer, numSamples);
- myData->loopbackProcessor.process(myData->conversionBuffer,
+ myData->loopbackProcessor->process(myData->conversionBuffer,
myData->actualInputChannelCount,
outputData,
myData->actualOutputChannelCount,
@@ -292,17 +548,25 @@
}
}
- return AAUDIO_CALLBACK_RESULT_CONTINUE;
+ return result;
}
+
static void usage() {
- printf("loopback: -b{burstsPerBuffer} -p{outputPerfMode} -P{inputPerfMode}\n");
- printf(" -b{burstsPerBuffer} for example 2 for double buffered\n");
- printf(" -p{outputPerfMode} set output AAUDIO_PERFORMANCE_MODE*\n");
- printf(" -P{inputPerfMode} set input AAUDIO_PERFORMANCE_MODE*\n");
+ printf("loopback: -n{numBursts} -p{outPerf} -P{inPerf} -t{test} -g{gain} -f{freq}\n");
+ printf(" -c{inputChannels}\n");
+ printf(" -f{freq} sine frequency\n");
+ printf(" -g{gain} recirculating loopback gain\n");
+ printf(" -m enable MMAP mode\n");
+ printf(" -n{numBursts} buffer size, for example 2 for double buffered\n");
+ printf(" -p{outPerf} set output AAUDIO_PERFORMANCE_MODE*\n");
+ printf(" -P{inPerf} set input AAUDIO_PERFORMANCE_MODE*\n");
printf(" n for _NONE\n");
printf(" l for _LATENCY\n");
printf(" p for _POWER_SAVING;\n");
+ printf(" -t{test} select test mode\n");
+ printf(" m for sine magnitude\n");
+ printf(" e for echo latency (default)\n");
printf("For example: loopback -b2 -pl -Pn\n");
}
@@ -320,12 +584,34 @@
mode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
break;
default:
- printf("ERROR invalue performance mode %c\n", c);
+ printf("ERROR in value performance mode %c\n", c);
break;
}
return mode;
}
+enum {
+ TEST_SINE_MAGNITUDE = 0,
+ TEST_ECHO_LATENCY,
+};
+
+static int parseTestMode(char c) {
+ int testMode = TEST_ECHO_LATENCY;
+ c = tolower(c);
+ switch (c) {
+ case 'm':
+ testMode = TEST_SINE_MAGNITUDE;
+ break;
+ case 'e':
+ testMode = TEST_ECHO_LATENCY;
+ break;
+ default:
+ printf("ERROR in value test mode %c\n", c);
+ break;
+ }
+ return testMode;
+}
+
// ====================================================================================
// TODO break up this large main() function into smaller functions
int main(int argc, const char **argv)
@@ -334,7 +620,7 @@
LoopbackData loopbackData;
AAudioStream *outputStream = nullptr;
- const int requestedInputChannelCount = 1;
+ int requestedInputChannelCount = NUM_INPUT_CHANNELS;
const int requestedOutputChannelCount = AAUDIO_UNSPECIFIED;
const int requestedSampleRate = SAMPLE_RATE;
int actualSampleRate = 0;
@@ -342,6 +628,9 @@
const aaudio_format_t requestedOutputFormat = AAUDIO_FORMAT_PCM_FLOAT;
aaudio_format_t actualInputFormat;
aaudio_format_t actualOutputFormat;
+ int testMode = TEST_ECHO_LATENCY;
+ double frequency = 1000.0;
+ double gain = 1.0;
const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
//const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_SHARED;
@@ -363,7 +652,19 @@
if (arg[0] == '-') {
char option = arg[1];
switch (option) {
- case 'b':
+ case 'c':
+ requestedInputChannelCount = atoi(&arg[2]);
+ break;
+ case 'f':
+ frequency = atof(&arg[2]);
+ break;
+ case 'g':
+ gain = atof(&arg[2]);
+ break;
+ case 'm':
+ AAudio_setMMapPolicy(AAUDIO_POLICY_AUTO);
+ break;
+ case 'n':
burstsPerBuffer = atoi(&arg[2]);
break;
case 'p':
@@ -372,16 +673,35 @@
case 'P':
inputPerformanceLevel = parsePerformanceMode(arg[2]);
break;
+ case 't':
+ testMode = parseTestMode(arg[2]);
+ break;
default:
usage();
+ exit(0);
break;
}
} else {
+ usage();
+ exit(0);
break;
}
}
- loopbackData.audioRecorder.allocate(NUM_SECONDS * SAMPLE_RATE);
+
+ switch(testMode) {
+ case TEST_SINE_MAGNITUDE:
+ loopbackData.sineAnalyzer.setFrequency(frequency);
+ loopbackData.loopbackProcessor = &loopbackData.sineAnalyzer;
+ break;
+ case TEST_ECHO_LATENCY:
+ loopbackData.echoAnalyzer.setGain(gain);
+ loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
+ break;
+ default:
+ exit(1);
+ break;
+ }
// Make printf print immediately so that debug info is not stuck
// in a buffer if we hang or crash.
@@ -431,47 +751,68 @@
printf(" channelCount: requested = %d, actual = %d\n", requestedInputChannelCount,
loopbackData.actualInputChannelCount);
printf(" framesPerBurst = %d\n", AAudioStream_getFramesPerBurst(loopbackData.inputStream));
+ printf(" bufferSize = %d\n",
+ AAudioStream_getBufferSizeInFrames(loopbackData.inputStream));
+ printf(" bufferCapacity = %d\n",
+ AAudioStream_getBufferCapacityInFrames(loopbackData.inputStream));
+
+ actualSharingMode = AAudioStream_getSharingMode(loopbackData.inputStream);
+ printf(" sharingMode: requested = %d, actual = %d\n",
+ requestedSharingMode, actualSharingMode);
actualInputFormat = AAudioStream_getFormat(loopbackData.inputStream);
- printf(" dataFormat: requested = %d, actual = %d\n", requestedInputFormat, actualInputFormat);
+ printf(" dataFormat: requested = %d, actual = %d\n",
+ requestedInputFormat, actualInputFormat);
assert(actualInputFormat == AAUDIO_FORMAT_PCM_I16);
+ printf(" is MMAP used? = %s\n", AAudioStream_isMMapUsed(loopbackData.inputStream)
+ ? "yes" : "no");
+
+
printf("Stream OUTPUT ---------------------\n");
// Check to see what kind of stream we actually got.
actualSampleRate = AAudioStream_getSampleRate(outputStream);
printf(" sampleRate: requested = %d, actual = %d\n", requestedSampleRate, actualSampleRate);
+ loopbackData.echoAnalyzer.setSampleRate(actualSampleRate);
loopbackData.actualOutputChannelCount = AAudioStream_getChannelCount(outputStream);
printf(" channelCount: requested = %d, actual = %d\n", requestedOutputChannelCount,
loopbackData.actualOutputChannelCount);
actualSharingMode = AAudioStream_getSharingMode(outputStream);
- printf(" sharingMode: requested = %d, actual = %d\n", requestedSharingMode, actualSharingMode);
+ printf(" sharingMode: requested = %d, actual = %d\n",
+ requestedSharingMode, actualSharingMode);
// This is the number of frames that are read in one chunk by a DMA controller
// or a DSP or a mixer.
framesPerBurst = AAudioStream_getFramesPerBurst(outputStream);
printf(" framesPerBurst = %d\n", framesPerBurst);
- printf(" bufferCapacity = %d\n", AAudioStream_getBufferCapacityInFrames(outputStream));
-
- actualOutputFormat = AAudioStream_getFormat(outputStream);
- printf(" dataFormat: requested = %d, actual = %d\n", requestedOutputFormat, actualOutputFormat);
- assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
-
- // Allocate a buffer for the audio data.
- loopbackData.inputFramesMaximum = 32 * framesPerBurst;
-
- loopbackData.inputData = new int16_t[loopbackData.inputFramesMaximum * loopbackData.actualInputChannelCount];
- loopbackData.conversionBuffer = new float[loopbackData.inputFramesMaximum *
- loopbackData.actualInputChannelCount];
-
result = AAudioStream_setBufferSizeInFrames(outputStream, burstsPerBuffer * framesPerBurst);
if (result < 0) { // may be positive buffer size
fprintf(stderr, "ERROR - AAudioStream_setBufferSize() returned %d\n", result);
goto finish;
}
- printf("AAudioStream_setBufferSize() actual = %d\n",result);
+ printf(" bufferSize = %d\n", AAudioStream_getBufferSizeInFrames(outputStream));
+ printf(" bufferCapacity = %d\n", AAudioStream_getBufferCapacityInFrames(outputStream));
+
+ actualOutputFormat = AAudioStream_getFormat(outputStream);
+ printf(" dataFormat: requested = %d, actual = %d\n",
+ requestedOutputFormat, actualOutputFormat);
+ assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
+
+ printf(" is MMAP used? = %s\n", AAudioStream_isMMapUsed(outputStream)
+ ? "yes" : "no");
+
+ // Allocate a buffer for the audio data.
+ loopbackData.inputFramesMaximum = 32 * framesPerBurst;
+ loopbackData.inputBuffersToDiscard = 100;
+
+ loopbackData.inputData = new int16_t[loopbackData.inputFramesMaximum
+ * loopbackData.actualInputChannelCount];
+ loopbackData.conversionBuffer = new float[loopbackData.inputFramesMaximum *
+ loopbackData.actualInputChannelCount];
+
// Start output first so input stream runs low.
result = AAudioStream_requestStart(outputStream);
@@ -500,18 +841,13 @@
printf("framesRead = %d\n", (int) AAudioStream_getFramesRead(outputStream));
printf("framesWritten = %d\n", (int) AAudioStream_getFramesWritten(outputStream));
- latency = loopbackData.loopbackProcessor.calculateAverageLatency(&deviation);
- printf("measured peak = %8.5f\n", loopbackData.loopbackProcessor.getMaxAmplitude());
- printf("threshold = %8.5f\n", INPUT_PEAK_THRESHOLD);
- printf("measured average = %8.5f\n", loopbackData.loopbackProcessor.getAverageAmplitude());
- printf("# latency measurements = %d\n", loopbackData.loopbackProcessor.getMeasurementCount());
- printf("measured latency = %8.2f +/- %4.5f frames\n", latency, deviation);
- printf("measured latency = %8.2f msec <===== !!\n", (1000.0 * latency / actualSampleRate));
+ loopbackData.loopbackProcessor->report();
- {
- int written = loopbackData.audioRecorder.save(FILENAME);
- printf("wrote %d samples to %s\n", written, FILENAME);
- }
+// {
+// int written = loopbackData.audioRecorder.save(FILENAME);
+// printf("wrote %d mono samples to %s on Android device\n", written, FILENAME);
+// }
+
finish:
AAudioStream_close(outputStream);
@@ -521,7 +857,13 @@
delete[] outputData;
AAudioStreamBuilder_delete(builder);
- printf("exiting - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
- return (result != AAUDIO_OK) ? EXIT_FAILURE : EXIT_SUCCESS;
+ printf(RESULT_TAG "error = %d = %s\n", result, AAudio_convertResultToText(result));
+ if ((result != AAUDIO_OK)) {
+ printf("error %d = %s\n", result, AAudio_convertResultToText(result));
+ return EXIT_FAILURE;
+ } else {
+ printf("SUCCESS\n");
+ return EXIT_SUCCESS;
+ }
}
diff --git a/media/libaaudio/examples/utils/dummy.cpp b/media/libaaudio/examples/utils/dummy.cpp
new file mode 100644
index 0000000..8ef7e36
--- /dev/null
+++ b/media/libaaudio/examples/utils/dummy.cpp
@@ -0,0 +1,5 @@
+/**
+ * Dummy file needed to get Android Studio to scan this folder.
+ */
+
+int g_DoNotUseThisVariable = 0;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 27c121f..92399f1 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -311,8 +311,14 @@
int64_t mMinCttsOffsetTicks;
int64_t mMaxCttsOffsetTicks;
- // Save the last 10 frames' timestamp for debug.
- std::list<std::pair<int64_t, int64_t>> mTimestampDebugHelper;
+ // Save the last 10 frames' timestamp and frame type for debug.
+ struct TimestampDebugHelperEntry {
+ int64_t pts;
+ int64_t dts;
+ std::string frameType;
+ };
+
+ std::list<TimestampDebugHelperEntry> mTimestampDebugHelper;
// Sequence parameter set or picture parameter set
struct AVCParamSet {
@@ -2543,12 +2549,12 @@
}
void MPEG4Writer::Track::dumpTimeStamps() {
- ALOGE("Dumping %s track's last 10 frames timestamp ", getTrackType());
+ ALOGE("Dumping %s track's last 10 frames timestamp and frame type ", getTrackType());
std::string timeStampString;
- for (std::list<std::pair<int64_t, int64_t>>::iterator num = mTimestampDebugHelper.begin();
- num != mTimestampDebugHelper.end(); ++num) {
- timeStampString += "(" + std::to_string(num->first)+
- "us, " + std::to_string(num->second) + "us) ";
+ for (std::list<TimestampDebugHelperEntry>::iterator entry = mTimestampDebugHelper.begin();
+ entry != mTimestampDebugHelper.end(); ++entry) {
+ timeStampString += "(" + std::to_string(entry->pts)+
+ "us, " + std::to_string(entry->dts) + "us " + entry->frameType + ") ";
}
ALOGE("%s", timeStampString.c_str());
}
@@ -2758,9 +2764,9 @@
previousPausedDurationUs += pausedDurationUs - lastDurationUs;
mResumed = false;
}
- std::pair<int64_t, int64_t> timestampPair;
+ TimestampDebugHelperEntry timestampDebugEntry;
timestampUs -= previousPausedDurationUs;
- timestampPair.first = timestampUs;
+ timestampDebugEntry.pts = timestampUs;
if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
copy->release();
mSource->stop();
@@ -2790,6 +2796,14 @@
}
mLastDecodingTimeUs = decodingTimeUs;
+ timestampDebugEntry.dts = decodingTimeUs;
+ timestampDebugEntry.frameType = isSync ? "Key frame" : "Non-Key frame";
+ // Insert the timestamp into the mTimestampDebugHelper
+ if (mTimestampDebugHelper.size() >= kTimestampDebugCount) {
+ mTimestampDebugHelper.pop_front();
+ }
+ mTimestampDebugHelper.push_back(timestampDebugEntry);
+
cttsOffsetTimeUs =
timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs;
if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) {
@@ -2919,12 +2933,6 @@
lastDurationUs = timestampUs - lastTimestampUs;
lastDurationTicks = currDurationTicks;
lastTimestampUs = timestampUs;
- timestampPair.second = timestampUs;
- // Insert the timestamp into the mTimestampDebugHelper
- if (mTimestampDebugHelper.size() >= kTimestampDebugCount) {
- mTimestampDebugHelper.pop_front();
- }
- mTimestampDebugHelper.push_back(timestampPair);
if (isSync != 0) {
addOneStssTableEntry(mStszTableEntries->count());
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index f1a55f1..f4428fe 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -1333,6 +1333,24 @@
ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p",
cmdCode, mHasControl, mEffect.unsafe_get());
+ // reject commands reserved for internal use by audio framework if coming from outside
+ // of audioserver
+ switch(cmdCode) {
+ case EFFECT_CMD_ENABLE:
+ case EFFECT_CMD_DISABLE:
+ case EFFECT_CMD_SET_PARAM:
+ case EFFECT_CMD_SET_PARAM_DEFERRED:
+ case EFFECT_CMD_SET_PARAM_COMMIT:
+ case EFFECT_CMD_GET_PARAM:
+ break;
+ default:
+ if (cmdCode >= EFFECT_CMD_FIRST_PROPRIETARY) {
+ break;
+ }
+ android_errorWriteLog(0x534e4554, "62019992");
+ return BAD_VALUE;
+ }
+
if (cmdCode == EFFECT_CMD_ENABLE) {
if (*replySize < sizeof(int)) {
android_errorWriteLog(0x534e4554, "32095713");
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c175259..20bd5e4 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -73,6 +73,7 @@
using binder::Status;
using hardware::ICamera;
using hardware::ICameraClient;
+using hardware::ICameraServiceProxy;
using hardware::ICameraServiceListener;
using hardware::camera::common::V1_0::CameraDeviceStatus;
using hardware::camera::common::V1_0::TorchModeStatus;
@@ -2213,7 +2214,7 @@
// Transition device state to OPEN
sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
- mCameraIdStr);
+ mCameraIdStr, mCameraFacing, mClientPackageName);
return OK;
}
@@ -2237,7 +2238,7 @@
// Transition device state to CLOSED
sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
- mCameraIdStr);
+ mCameraIdStr, mCameraFacing, mClientPackageName);
}
// Always stop watching, even if no camera op is active
if (mOpsCallback != NULL) {
@@ -2741,12 +2742,12 @@
onStatusUpdatedLocked(cameraId, status);
}
-void CameraService::updateProxyDeviceState(ICameraServiceProxy::CameraState newState,
- const String8& cameraId) {
+void CameraService::updateProxyDeviceState(int newState,
+ const String8& cameraId, int facing, const String16& clientName) {
sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
if (proxyBinder == nullptr) return;
String16 id(cameraId);
- proxyBinder->notifyCameraState(id, newState);
+ proxyBinder->notifyCameraState(id, newState, facing, clientName);
}
status_t CameraService::getTorchStatusLocked(
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 87603a3..6d5dde8 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -19,6 +19,7 @@
#include <android/hardware/BnCameraService.h>
#include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/ICameraServiceProxy.h>
#include <cutils/multiuser.h>
#include <utils/Vector.h>
@@ -26,7 +27,6 @@
#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
#include <binder/IAppOpsCallback.h>
-#include <camera/ICameraServiceProxy.h>
#include <hardware/camera.h>
#include <android/hardware/camera/common/1.0/types.h>
@@ -182,8 +182,10 @@
* the camera proxy service in the system service
*/
static void updateProxyDeviceState(
- ICameraServiceProxy::CameraState newState,
- const String8& cameraId);
+ int newState,
+ const String8& cameraId,
+ int facing,
+ const String16& clientName);
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
@@ -772,7 +774,7 @@
static StatusInternal mapToInternal(hardware::camera::common::V1_0::CameraDeviceStatus status);
static int32_t mapToInterface(StatusInternal status);
- static sp<ICameraServiceProxy> getCameraServiceProxy();
+ static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
static void pingCameraServiceProxy();
};
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index 075c2e3..a407d0b 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -256,8 +256,8 @@
disableMsgType(CAMERA_MSG_ALL_MSGS);
mHardware->stopPreview();
sCameraService->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_IDLE,
- String8::format("%d", mCameraId));
+ hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
+ mCameraIdStr, mCameraFacing, mClientPackageName);
mHardware->cancelPicture();
// Release the hardware resources.
mHardware->release();
@@ -418,8 +418,8 @@
result = mHardware->startPreview();
if (result == NO_ERROR) {
sCameraService->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_ACTIVE,
- String8::format("%d", mCameraId));
+ hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE,
+ mCameraIdStr, mCameraFacing, mClientPackageName);
}
return result;
}
@@ -461,8 +461,8 @@
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
mHardware->stopPreview();
sCameraService->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_IDLE,
- String8::format("%d", mCameraId));
+ hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
+ mCameraIdStr, mCameraFacing, mClientPackageName);
mPreviewBuffer.clear();
}
@@ -960,8 +960,8 @@
// Shutters only happen in response to takePicture, so mark device as
// idle now, until preview is restarted
sCameraService->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_IDLE,
- String8::format("%d", mCameraId));
+ hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
+ mCameraIdStr, mCameraFacing, mClientPackageName);
mLock.unlock();
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 32ee273..51ef160 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -248,7 +248,8 @@
void Camera2ClientBase<TClientBase>::notifyIdle() {
if (mDeviceActive) {
getCameraService()->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_IDLE, TClientBase::mCameraIdStr);
+ hardware::ICameraServiceProxy::CAMERA_STATE_IDLE, TClientBase::mCameraIdStr,
+ TClientBase::mCameraFacing, TClientBase::mClientPackageName);
}
mDeviceActive = false;
@@ -263,7 +264,8 @@
if (!mDeviceActive) {
getCameraService()->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_ACTIVE, TClientBase::mCameraIdStr);
+ hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE, TClientBase::mCameraIdStr,
+ TClientBase::mCameraFacing, TClientBase::mClientPackageName);
}
mDeviceActive = true;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index e022057..0807c0a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -138,13 +138,15 @@
requestQueueRet.description().c_str());
return DEAD_OBJECT;
}
+
+ std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
auto resultQueueRet = session->getCaptureResultMetadataQueue(
- [&queue = mResultMetadataQueue](const auto& descriptor) {
- queue = std::make_unique<ResultMetadataQueue>(descriptor);
- if (!queue->isValid() || queue->availableToWrite() <= 0) {
+ [&resQueue](const auto& descriptor) {
+ resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+ if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
ALOGE("HAL returns empty result metadata fmq, not use it");
- queue = nullptr;
- // Don't use the queue onwards.
+ resQueue = nullptr;
+ // Don't use the resQueue onwards.
}
});
if (!resultQueueRet.isOk()) {
@@ -153,7 +155,7 @@
return DEAD_OBJECT;
}
- mInterface = std::make_unique<HalInterface>(session, queue);
+ mInterface = new HalInterface(session, queue);
std::string providerType;
mVendorTagId = manager->getProviderTagIdLocked(mId.string());
@@ -182,7 +184,7 @@
mTagMonitor.initialize(mVendorTagId);
/** Start up request queue thread */
- mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get());
+ mRequestThread = new RequestThread(this, mStatusTracker, mInterface);
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -237,7 +239,7 @@
ALOGI("%s: E", __FUNCTION__);
status_t res = OK;
-
+ std::vector<wp<Camera3StreamInterface>> streams;
{
Mutex::Autolock l(mLock);
if (mStatus == STATUS_UNINITIALIZED) return res;
@@ -269,8 +271,13 @@
mRequestThread->requestExit();
}
- mOutputStreams.clear();
- mInputStream.clear();
+ streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ streams.push_back(mOutputStreams[i]);
+ }
+ if (mInputStream != nullptr) {
+ streams.push_back(mInputStream);
+ }
}
// Joining done without holding mLock, otherwise deadlocks may ensue
@@ -289,11 +296,8 @@
HalInterface* interface;
{
Mutex::Autolock l(mLock);
-
mRequestThread.clear();
mStatusTracker.clear();
- mBufferManager.clear();
-
interface = mInterface.get();
}
@@ -301,12 +305,25 @@
// wait on assorted callbacks,etc, to complete before it can return.
interface->close();
+ flushInflightRequests();
+
{
Mutex::Autolock l(mLock);
mInterface->clear();
+ mOutputStreams.clear();
+ mInputStream.clear();
+ mBufferManager.clear();
internalUpdateStatusLocked(STATUS_UNINITIALIZED);
}
+ for (auto& weakStream : streams) {
+ sp<Camera3StreamInterface> stream = weakStream.promote();
+ if (stream != nullptr) {
+ ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+ __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
+ }
+ }
+
ALOGI("%s: X", __FUNCTION__);
return res;
}
@@ -834,6 +851,13 @@
hardware::Return<void> Camera3Device::processCaptureResult(
const hardware::hidl_vec<
hardware::camera::device::V3_2::CaptureResult>& results) {
+ {
+ Mutex::Autolock l(mLock);
+ if (mStatus == STATUS_ERROR) {
+ // Per API contract, HAL should act as closed after device error
+ ALOGW("%s: received capture result in error state!", __FUNCTION__);
+ }
+ }
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
@@ -965,6 +989,13 @@
hardware::Return<void> Camera3Device::notify(
const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
+ {
+ Mutex::Autolock l(mLock);
+ if (mStatus == STATUS_ERROR) {
+ // Per API contract, HAL should act as closed after device error
+ ALOGW("%s: received notify message in error state!", __FUNCTION__);
+ }
+ }
for (const auto& msg : msgs) {
notify(msg);
}
@@ -2434,6 +2465,53 @@
}
}
+void Camera3Device::flushInflightRequests() {
+ { // First return buffers cached in mInFlightMap
+ Mutex::Autolock l(mInFlightLock);
+ for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
+ const InFlightRequest &request = mInFlightMap.valueAt(idx);
+ returnOutputBuffers(request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(), 0);
+ }
+ mInFlightMap.clear();
+ }
+
+ // Then return all inflight buffers not returned by HAL
+ std::vector<std::pair<int32_t, int32_t>> inflightKeys;
+ mInterface->getInflightBufferKeys(&inflightKeys);
+
+ int32_t inputStreamId = (mInputStream != nullptr) ? mInputStream->getId() : -1;
+ for (auto& pair : inflightKeys) {
+ int32_t frameNumber = pair.first;
+ int32_t streamId = pair.second;
+ buffer_handle_t* buffer;
+ status_t res = mInterface->popInflightBuffer(frameNumber, streamId, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
+ __FUNCTION__, frameNumber, streamId);
+ continue;
+ }
+
+ camera3_stream_buffer_t streamBuffer;
+ streamBuffer.buffer = buffer;
+ streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ streamBuffer.acquire_fence = -1;
+ streamBuffer.release_fence = -1;
+ if (streamId == inputStreamId) {
+ streamBuffer.stream = mInputStream->asHalStream();
+ res = mInputStream->returnInputBuffer(streamBuffer);
+ if (res != OK) {
+ ALOGE("%s: Can't return input buffer for frame %d to"
+ " its stream:%s (%d)", __FUNCTION__,
+ frameNumber, strerror(-res), res);
+ }
+ } else {
+ streamBuffer.stream = mOutputStreams.valueFor(streamId)->asHalStream();
+ returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
+ }
+ }
+}
+
void Camera3Device::insertResultLocked(CaptureResult *result,
uint32_t frameNumber) {
if (result == nullptr) return;
@@ -3349,6 +3427,20 @@
return res;
}
+void Camera3Device::HalInterface::getInflightBufferKeys(
+ std::vector<std::pair<int32_t, int32_t>>* out) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ out->clear();
+ out->reserve(mInflightBufferMap.size());
+ for (auto& pair : mInflightBufferMap) {
+ uint64_t key = pair.first;
+ int32_t streamId = key & 0xFFFFFFFF;
+ int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
+ out->push_back(std::make_pair(frameNumber, streamId));
+ }
+ return;
+}
+
status_t Camera3Device::HalInterface::pushInflightBufferLocked(
int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer, int acquireFence) {
uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
@@ -3423,7 +3515,7 @@
Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
sp<StatusTracker> statusTracker,
- HalInterface* interface) :
+ sp<HalInterface> interface) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5549dd1..0251d62 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -265,6 +265,10 @@
status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
/*out*/ buffer_handle_t **buffer);
+ // Get a vector of (frameNumber, streamId) pair of currently inflight
+ // buffers
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
+
private:
camera3_device_t *mHal3Device;
sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
@@ -333,7 +337,7 @@
std::vector<std::pair<int, uint64_t>> mFreedBuffers;
};
- std::unique_ptr<HalInterface> mInterface;
+ sp<HalInterface> mInterface;
CameraMetadata mDeviceInfo;
@@ -624,7 +628,7 @@
RequestThread(wp<Camera3Device> parent,
sp<camera3::StatusTracker> statusTracker,
- HalInterface* interface);
+ sp<HalInterface> interface);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -774,7 +778,7 @@
wp<Camera3Device> mParent;
wp<camera3::StatusTracker> mStatusTracker;
- HalInterface* mInterface;
+ sp<HalInterface> mInterface;
wp<NotificationListener> mListener;
@@ -1023,6 +1027,10 @@
// Remove the in-flight request of the given index from mInFlightMap
// if it's no longer needed. It must only be called with mInFlightLock held.
void removeInFlightRequestIfReadyLocked(int idx);
+ // Remove all in-flight requests and return all buffers.
+ // This is used after HAL interface is closed to cleanup any request/buffers
+ // not returned by HAL.
+ void flushInflightRequests();
/**** End scope for mInFlightLock ****/
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 4eb15ad..35096eb 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -293,8 +293,9 @@
void Camera3InputStream::onBufferFreed(const wp<GraphicBuffer>& gb) {
const sp<GraphicBuffer> buffer = gb.promote();
if (buffer != nullptr) {
- if (mBufferFreedListener != nullptr) {
- mBufferFreedListener->onBufferFreed(mId, buffer->handle);
+ sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
+ if (callback != nullptr) {
+ callback->onBufferFreed(mId, buffer->handle);
}
} else {
ALOGE("%s: GraphicBuffer is freed before onBufferFreed callback finishes!", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index e15aa43..b02cd6a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -727,7 +727,7 @@
void Camera3OutputStream::onBuffersRemovedLocked(
const std::vector<sp<GraphicBuffer>>& removedBuffers) {
- Camera3StreamBufferFreedListener* callback = mBufferFreedListener;
+ sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
if (callback != nullptr) {
for (auto gb : removedBuffers) {
callback->onBufferFreed(mId, gb->handle);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 9297ac8..e77421a 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -523,6 +523,8 @@
return BAD_VALUE;
}
+ removeOutstandingBuffer(buffer);
+
/**
* TODO: Check that the state is valid first.
*
@@ -540,7 +542,6 @@
// buffer to be returned.
mOutputBufferReturnedSignal.signal();
- removeOutstandingBuffer(buffer);
return res;
}
@@ -591,13 +592,14 @@
return BAD_VALUE;
}
+ removeOutstandingBuffer(buffer);
+
status_t res = returnInputBufferLocked(buffer);
if (res == OK) {
fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false);
mInputBufferReturnedSignal.signal();
}
- removeOutstandingBuffer(buffer);
return res;
}
@@ -744,7 +746,7 @@
}
void Camera3Stream::setBufferFreedListener(
- Camera3StreamBufferFreedListener* listener) {
+ wp<Camera3StreamBufferFreedListener> listener) {
Mutex::Autolock l(mLock);
// Only allow set listener during stream configuration because stream is guaranteed to be IDLE
// at this state, so setBufferFreedListener won't collide with onBufferFreed callbacks
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index b6c8396..0940d62 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -371,7 +371,7 @@
// Setting listener will remove previous listener (if exists)
virtual void setBufferFreedListener(
- Camera3StreamBufferFreedListener* listener) override;
+ wp<Camera3StreamBufferFreedListener> listener) override;
/**
* Return if the buffer queue of the stream is abandoned.
@@ -416,7 +416,7 @@
android_dataspace dataSpace, camera3_stream_rotation_t rotation,
int setId);
- Camera3StreamBufferFreedListener* mBufferFreedListener;
+ wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
/**
* Interface to be implemented by derived classes
diff --git a/services/camera/libcameraservice/device3/Camera3StreamBufferFreedListener.h b/services/camera/libcameraservice/device3/Camera3StreamBufferFreedListener.h
index 478a752..104cd22 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamBufferFreedListener.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamBufferFreedListener.h
@@ -24,7 +24,7 @@
namespace camera3 {
-class Camera3StreamBufferFreedListener {
+class Camera3StreamBufferFreedListener : public virtual RefBase {
public:
// onBufferFreed is called when a buffer is no longer being managed
// by this stream. This will not be called in events when all
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index c695a10..0544a1b 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -298,7 +298,7 @@
* Client is responsible to keep the listener object alive throughout the lifecycle of this
* Camera3Stream.
*/
- virtual void setBufferFreedListener(Camera3StreamBufferFreedListener* listener) = 0;
+ virtual void setBufferFreedListener(wp<Camera3StreamBufferFreedListener> listener) = 0;
};
} // namespace camera3