Merge "GenericSource: close unused file descriptor" into oc-dev
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 80aad2f..dfd5df7 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -1048,6 +1048,10 @@
bool haveVideo = false;
for (size_t i = 0; i < numTracks; ++i) {
sp<IMediaSource> source = extractor->getTrack(i);
+ if (source == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, track count %zu.\n", i, numTracks);
+ continue;
+ }
const char *mime;
CHECK(source->getFormat()->findCString(
@@ -1110,6 +1114,10 @@
}
mediaSource = extractor->getTrack(i);
+ if (mediaSource == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
+ return -1;
+ }
}
}
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 8f9333a..2e1d240 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -171,7 +171,8 @@
mWriter = new MPEG2TSWriter(
this, &MyConvertingStreamSource::WriteDataWrapper);
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ size_t numTracks = extractor->countTracks();
+ for (size_t i = 0; i < numTracks; ++i) {
const sp<MetaData> &meta = extractor->getTrackMetaData(i);
const char *mime;
@@ -181,7 +182,12 @@
continue;
}
- CHECK_EQ(mWriter->addSource(extractor->getTrack(i)), (status_t)OK);
+ sp<IMediaSource> track = extractor->getTrack(i);
+ if (track == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, total tracks %zu\n", i, numTracks);
+ continue;
+ }
+ CHECK_EQ(mWriter->addSource(track), (status_t)OK);
}
CHECK_EQ(mWriter->start(), (status_t)OK);
diff --git a/include/ndk/NdkImageReader.h b/include/ndk/NdkImageReader.h
index a158da9..e3600c2 100644
--- a/include/ndk/NdkImageReader.h
+++ b/include/ndk/NdkImageReader.h
@@ -307,22 +307,38 @@
* for the consumer usage. All other parameters and the return values are identical to those passed
* to {@line AImageReader_new}.
*
- * @param usage0 specifies how the consumer will access the AImage, using combination of the
- * AHARDWAREBUFFER_USAGE0 flags described in {@link hardware_buffer.h}.
- * Passing {@link AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN} is equivalent to calling
- * {@link AImageReader_new} with the same parameters. Note that consumers that do not
- * require CPU access to the buffer should omit {@link
- * AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN} to improve performance.
- * @param usage1 specifies how the consumer will access the AImage, using combination of the
- * AHARDWAREBUFFER_USAGE1 flags described in {@link hardware_buffer.h}.
+ * @param usage specifies how the consumer will access the AImage, using combination of the
+ * AHARDWAREBUFFER_USAGE flags described in {@link hardware_buffer.h}.
+ * Passing {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN} is equivalent to calling
+ * {@link AImageReader_new} with the same parameters.
+ *
+ * Note that not all format and usage flag combination is supported by the {@link AImageReader}.
+ * Below are the combinations supported by the {@link AImageReader}.
+ * <table>
+ * <tr>
+ * <th>Format</th>
+ * <th>Compatible usage flags</th>
+ * </tr>
+ * <tr>
+ * <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ * </td>
+ * <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
+ * </tr>
+ * <tr>
+ * <td>{@link AIMAGE_FORMAT_RGBA_8888}</td>
+ * <td>{@link AHARDWAREBUFFER_USAGE_VIDEO_ENCODE} or
+ * {@link AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE}, or combined</td>
+ * </tr>
+ * </table>
*
* @see AImage
* @see AImageReader_new
* @see AHardwareBuffer
*/
media_status_t AImageReader_newWithUsage(
- int32_t width, int32_t height, int32_t format, uint64_t usage0,
- uint64_t usage1, int32_t maxImages, /*out*/ AImageReader** reader);
+ int32_t width, int32_t height, int32_t format, uint64_t usage, int32_t maxImages,
+ /*out*/ AImageReader** reader);
/*
* Acquire the next {@link AImage} from the image reader's queue asynchronously.
diff --git a/media/libaaudio/examples/write_sine/jni/Android.mk b/media/libaaudio/examples/write_sine/jni/Android.mk
index 5a884e1..0bda008 100644
--- a/media/libaaudio/examples/write_sine/jni/Android.mk
+++ b/media/libaaudio/examples/write_sine/jni/Android.mk
@@ -4,7 +4,8 @@
LOCAL_MODULE_TAGS := tests
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
- frameworks/av/media/libaaudio/include
+ frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/src
# NDK recommends using this kind of relative path instead of an absolute path.
LOCAL_SRC_FILES:= ../src/write_sine.cpp
diff --git a/media/libaaudio/examples/write_sine/src/SineGenerator.h b/media/libaaudio/examples/write_sine/src/SineGenerator.h
index f2eb984..64b772d 100644
--- a/media/libaaudio/examples/write_sine/src/SineGenerator.h
+++ b/media/libaaudio/examples/write_sine/src/SineGenerator.h
@@ -79,7 +79,7 @@
}
}
- double mAmplitude = 0.005; // unitless scaler
+ double mAmplitude = 0.05; // unitless scaler
double mPhase = 0.0;
double mPhaseIncrement = 440 * M_PI * 2 / 48000;
double mFrameRate = 48000;
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index df55c3f..9107a7c 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -22,8 +22,8 @@
#include <aaudio/AAudio.h>
#include "SineGenerator.h"
-#define SAMPLE_RATE 48000
-#define NUM_SECONDS 5
+#define SAMPLE_RATE 48000
+#define NUM_SECONDS 15
#define NANOS_PER_MICROSECOND ((int64_t)1000)
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index a7e32bd..cc0c3a4 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -26,7 +26,7 @@
#include <aaudio/AAudio.h>
#include "SineGenerator.h"
-#define NUM_SECONDS 5
+#define NUM_SECONDS 15
//#define SHARING_MODE AAUDIO_SHARING_MODE_EXCLUSIVE
#define SHARING_MODE AAUDIO_SHARING_MODE_SHARED
diff --git a/media/libaaudio/examples/write_sine/static/Android.mk b/media/libaaudio/examples/write_sine/static/Android.mk
index e4da6a8..3fee08a 100644
--- a/media/libaaudio/examples/write_sine/static/Android.mk
+++ b/media/libaaudio/examples/write_sine/static/Android.mk
@@ -4,6 +4,7 @@
LOCAL_MODULE_TAGS := examples
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
+ frameworks/av/media/libaaudio/src \
frameworks/av/media/libaaudio/include
# NDK recommends using this kind of relative path instead of an absolute path.
diff --git a/media/libaaudio/src/Android.mk b/media/libaaudio/src/Android.mk
index b5bb75f..f43c0ad 100644
--- a/media/libaaudio/src/Android.mk
+++ b/media/libaaudio/src/Android.mk
@@ -39,6 +39,7 @@
utility/FixedBlockAdapter.cpp \
utility/FixedBlockReader.cpp \
utility/FixedBlockWriter.cpp \
+ utility/LinearRamp.cpp \
fifo/FifoBuffer.cpp \
fifo/FifoControllerBase.cpp \
client/AudioEndpoint.cpp \
@@ -93,6 +94,7 @@
utility/FixedBlockAdapter.cpp \
utility/FixedBlockReader.cpp \
utility/FixedBlockWriter.cpp \
+ utility/LinearRamp.cpp \
fifo/FifoBuffer.cpp \
fifo/FifoControllerBase.cpp \
client/AudioEndpoint.cpp \
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index af4b93a..810751a 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -32,9 +32,10 @@
#include "binding/AAudioStreamConfiguration.h"
#include "binding/IAAudioService.h"
#include "binding/AAudioServiceMessage.h"
-#include "fifo/FifoBuffer.h"
-
#include "core/AudioStreamBuilder.h"
+#include "fifo/FifoBuffer.h"
+#include "utility/LinearRamp.h"
+
#include "AudioStreamInternal.h"
#define LOG_TIMESTAMPS 0
@@ -478,8 +479,9 @@
ALOGW("WARNING - processCommands() AAUDIO_SERVICE_EVENT_DISCONNECTED");
break;
case AAUDIO_SERVICE_EVENT_VOLUME:
- mVolume = message->event.dataDouble;
- ALOGD_IF(MYLOG_CONDITION, "processCommands() AAUDIO_SERVICE_EVENT_VOLUME %f", mVolume);
+ mVolumeRamp.setTarget((float) message->event.dataDouble);
+ ALOGD_IF(MYLOG_CONDITION, "processCommands() AAUDIO_SERVICE_EVENT_VOLUME %f",
+ message->event.dataDouble);
break;
default:
ALOGW("WARNING - processCommands() Unrecognized event = %d",
@@ -639,10 +641,10 @@
}
-// TODO this function needs a major cleanup.
aaudio_result_t AudioStreamInternal::writeNowWithConversion(const void *buffer,
int32_t numFrames) {
- // ALOGD_IF(MYLOG_CONDITION, "AudioStreamInternal::writeNowWithConversion(%p, %d)", buffer, numFrames);
+ // ALOGD_IF(MYLOG_CONDITION, "AudioStreamInternal::writeNowWithConversion(%p, %d)",
+ // buffer, numFrames);
WrappingBuffer wrappingBuffer;
uint8_t *source = (uint8_t *) buffer;
int32_t framesLeft = numFrames;
@@ -659,31 +661,67 @@
framesToWrite = framesAvailable;
}
int32_t numBytes = getBytesPerFrame() * framesToWrite;
- // TODO handle volume scaling
- if (getFormat() == mDeviceFormat) {
- // Copy straight through.
- memcpy(wrappingBuffer.data[partIndex], source, numBytes);
- } else if (getFormat() == AAUDIO_FORMAT_PCM_FLOAT
- && mDeviceFormat == AAUDIO_FORMAT_PCM_I16) {
- // Data conversion.
- AAudioConvert_floatToPcm16(
- (const float *) source,
- framesToWrite * getSamplesPerFrame(),
- (int16_t *) wrappingBuffer.data[partIndex]);
- } else if (getFormat() == AAUDIO_FORMAT_PCM_I16
- && mDeviceFormat == AAUDIO_FORMAT_PCM_FLOAT) {
- // Data conversion.
- AAudioConvert_pcm16ToFloat(
- (const int16_t *) source,
- framesToWrite * getSamplesPerFrame(),
- (float *) wrappingBuffer.data[partIndex]);
- } else {
- // TODO handle more conversions
- ALOGE("AudioStreamInternal::writeNowWithConversion() unsupported formats: %d, %d",
- getFormat(), mDeviceFormat);
- return AAUDIO_ERROR_UNEXPECTED_VALUE;
+ int32_t numSamples = framesToWrite * getSamplesPerFrame();
+ // Data conversion.
+ float levelFrom;
+ float levelTo;
+ bool ramping = mVolumeRamp.nextSegment(framesToWrite * getSamplesPerFrame(),
+ &levelFrom, &levelTo);
+ // The formats are validated when the stream is opened so we do not have to
+ // check for illegal combinations here.
+ if (getFormat() == AAUDIO_FORMAT_PCM_FLOAT) {
+ if (mDeviceFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+ AAudio_linearRamp(
+ (const float *) source,
+ (float *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ } else if (mDeviceFormat == AAUDIO_FORMAT_PCM_I16) {
+ if (ramping) {
+ AAudioConvert_floatToPcm16(
+ (const float *) source,
+ (int16_t *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ } else {
+ AAudioConvert_floatToPcm16(
+ (const float *) source,
+ (int16_t *) wrappingBuffer.data[partIndex],
+ numSamples,
+ levelTo);
+ }
+ }
+ } else if (getFormat() == AAUDIO_FORMAT_PCM_I16) {
+ if (mDeviceFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+ if (ramping) {
+ AAudioConvert_pcm16ToFloat(
+ (const int16_t *) source,
+ (float *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ } else {
+ AAudioConvert_pcm16ToFloat(
+ (const int16_t *) source,
+ (float *) wrappingBuffer.data[partIndex],
+ numSamples,
+ levelTo);
+ }
+ } else if (mDeviceFormat == AAUDIO_FORMAT_PCM_I16) {
+ AAudio_linearRamp(
+ (const int16_t *) source,
+ (int16_t *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ }
}
-
source += numBytes;
framesLeft -= framesToWrite;
} else {
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 8244311..e550ba3 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -22,11 +22,11 @@
#include "binding/IAAudioService.h"
#include "binding/AudioEndpointParcelable.h"
+#include "binding/AAudioServiceInterface.h"
#include "client/IsochronousClockModel.h"
#include "client/AudioEndpoint.h"
#include "core/AudioStream.h"
-
-#include "binding/AAudioServiceInterface.h"
+#include "utility/LinearRamp.h"
using android::sp;
using android::IAAudioService;
@@ -154,7 +154,7 @@
int64_t mLastFramesRead = 0; // used to prevent retrograde motion
int32_t mFramesPerBurst; // frames per HAL transfer
int32_t mXRunCount = 0; // how many underrun events?
- float mVolume = 1.0; // volume that the server told us to use
+ LinearRamp mVolumeRamp;
AAudioServiceInterface &mServiceInterface; // abstract interface to the service
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 1bb9e53..96fd427 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -32,6 +32,7 @@
// Arbitrary and somewhat generous number of bursts.
#define DEFAULT_BURSTS_PER_BUFFER_CAPACITY 8
+static const bool FAST_TRACKS_ENABLED = true;
/*
* Create a stream that uses the AudioTrack.
@@ -69,7 +70,9 @@
samplesPerFrame, channelMask);
// TODO add more performance options
- audio_output_flags_t flags = (audio_output_flags_t) AUDIO_OUTPUT_FLAG_FAST;
+ audio_output_flags_t flags = FAST_TRACKS_ENABLED
+ ? AUDIO_OUTPUT_FLAG_FAST
+ : AUDIO_OUTPUT_FLAG_NONE;
int32_t frameCount = builder.getBufferCapacity();
ALOGD("AudioStreamTrack::open(), requested buffer capacity %d", frameCount);
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index efbbfc5..5fa228a 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -27,6 +27,11 @@
using namespace android;
+// This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data.
+// It is designed to allow occasional transient peaks.
+#define MAX_HEADROOM (1.41253754f)
+#define MIN_HEADROOM (0 - MAX_HEADROOM)
+
int32_t AAudioConvert_formatToSizeInBytes(aaudio_audio_format_t format) {
int32_t size = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
switch (format) {
@@ -42,24 +47,153 @@
return size;
}
-// TODO This similar to a function in audio_utils. Consider using that instead.
-void AAudioConvert_floatToPcm16(const float *source, int32_t numSamples, int16_t *destination) {
+
+// TODO call clamp16_from_float function in primitives.h
+static inline int16_t clamp16_from_float(float f) {
+ /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
+ * floating point significand. The normal shift is 3<<22, but the -15 offset
+ * is used to multiply by 32768.
+ */
+ static const float offset = (float)(3 << (22 - 15));
+ /* zero = (0x10f << 22) = 0x43c00000 (not directly used) */
+ static const int32_t limneg = (0x10f << 22) /*zero*/ - 32768; /* 0x43bf8000 */
+ static const int32_t limpos = (0x10f << 22) /*zero*/ + 32767; /* 0x43c07fff */
+
+ union {
+ float f;
+ int32_t i;
+ } u;
+
+ u.f = f + offset; /* recenter valid range */
+ /* Now the valid range is represented as integers between [limneg, limpos].
+ * Clamp using the fact that float representation (as an integer) is an ordered set.
+ */
+ if (u.i < limneg)
+ u.i = -32768;
+ else if (u.i > limpos)
+ u.i = 32767;
+ return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+}
+
+// Same but without clipping.
+// Convert -1.0f to +1.0f to -32768 to +32767
+static inline int16_t floatToInt16(float f) {
+ static const float offset = (float)(3 << (22 - 15));
+ union {
+ float f;
+ int32_t i;
+ } u;
+ u.f = f + offset; /* recenter valid range */
+ return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+}
+
+static float clipAndClampFloatToPcm16(float sample, float scaler) {
+ // Clip to valid range of a float sample to prevent excessive volume.
+ if (sample > MAX_HEADROOM) sample = MAX_HEADROOM;
+ else if (sample < MIN_HEADROOM) sample = MIN_HEADROOM;
+
+ // Scale and convert to a short.
+ float fval = sample * scaler;
+ return clamp16_from_float(fval);
+}
+
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numSamples,
+ float amplitude) {
+ float scaler = amplitude;
for (int i = 0; i < numSamples; i++) {
- float fval = source[i];
- fval += 1.0; // to avoid discontinuity at 0.0 caused by truncation
- fval *= 32768.0f;
- int32_t sample = (int32_t) fval;
- // clip to 16-bit range
- if (sample < 0) sample = 0;
- else if (sample > 0x0FFFF) sample = 0x0FFFF;
- sample -= 32768; // center at zero
- destination[i] = (int16_t) sample;
+ float sample = *source++;
+ *destination++ = clipAndClampFloatToPcm16(sample, scaler);
}
}
-void AAudioConvert_pcm16ToFloat(const int16_t *source, int32_t numSamples, float *destination) {
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1;
+ // divide by numFrames so that we almost reach amplitude2
+ float delta = (amplitude2 - amplitude1) / numFrames;
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ float sample = *source++;
+ *destination++ = clipAndClampFloatToPcm16(sample, scaler);
+ }
+ scaler += delta;
+ }
+}
+
+#define SHORT_SCALE 32768
+
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numSamples,
+ float amplitude) {
+ float scaler = amplitude / SHORT_SCALE;
for (int i = 0; i < numSamples; i++) {
- destination[i] = source[i] * (1.0f / 32768.0f);
+ destination[i] = source[i] * scaler;
+ }
+}
+
+// This code assumes amplitude1 and amplitude2 are between 0.0 and 1.0
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1 / SHORT_SCALE;
+ float delta = (amplitude2 - amplitude1) / (SHORT_SCALE * (float) numFrames);
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ *destination++ = *source++ * scaler;
+ }
+ scaler += delta;
+ }
+}
+
+// This code assumes amplitude1 and amplitude2 are between 0.0 and 1.0
+void AAudio_linearRamp(const float *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1;
+ float delta = (amplitude2 - amplitude1) / numFrames;
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ float sample = *source++;
+
+ // Clip to valid range of a float sample to prevent excessive volume.
+ if (sample > MAX_HEADROOM) sample = MAX_HEADROOM;
+ else if (sample < MIN_HEADROOM) sample = MIN_HEADROOM;
+
+ *destination++ = sample * scaler;
+ }
+ scaler += delta;
+ }
+}
+
+// This code assumes amplitude1 and amplitude2 are between 0.0 and 1.0
+void AAudio_linearRamp(const int16_t *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1 / SHORT_SCALE;
+ float delta = (amplitude2 - amplitude1) / (SHORT_SCALE * (float) numFrames);
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ // No need to clip because int16_t range is inherently limited.
+ float sample = *source++ * scaler;
+ *destination++ = floatToInt16(sample);
+ }
+ scaler += delta;
}
}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index 3dc501e..0078cbb 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -35,9 +35,120 @@
*/
aaudio_result_t AAudioConvert_androidToAAudioResult(android::status_t status);
-void AAudioConvert_floatToPcm16(const float *source, int32_t numSamples, int16_t *destination);
+/**
+ * Convert an array of floats to an array of int16_t.
+ *
+ * @param source
+ * @param destination
+ * @param numSamples number of values in the array
+ * @param amplitude level between 0.0 and 1.0
+ */
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numSamples,
+ float amplitude);
-void AAudioConvert_pcm16ToFloat(const int16_t *source, int32_t numSamples, float *destination);
+/**
+ * Convert floats to int16_t and scale by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame AKA number of channels
+ * @param amplitude1 level at start of ramp, between 0.0 and 1.0
+ * @param amplitude2 level past end of ramp, between 0.0 and 1.0
+ */
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
+
+/**
+ * Convert int16_t array to float array ranging from -1.0 to +1.0.
+ * @param source
+ * @param destination
+ * @param numSamples
+ */
+//void AAudioConvert_pcm16ToFloat(const int16_t *source, int32_t numSamples,
+// float *destination);
+
+/**
+ *
+ * Convert int16_t array to float array ranging from +/- amplitude.
+ * @param source
+ * @param destination
+ * @param numSamples
+ * @param amplitude
+ */
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numSamples,
+ float amplitude);
+
+/**
+ * Convert floats to int16_t and scale by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame AKA number of channels
+ * @param amplitude1 level at start of ramp, between 0.0 and 1.0
+ * @param amplitude2 level at end of ramp, between 0.0 and 1.0
+ */
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
+
+/**
+ * Scale floats by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame
+ * @param amplitude1
+ * @param amplitude2
+ */
+void AAudio_linearRamp(const float *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
+
+/**
+ * Scale int16_t's by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame
+ * @param amplitude1
+ * @param amplitude2
+ */
+void AAudio_linearRamp(const int16_t *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
/**
* Calculate the number of bytes and prevent numeric overflow.
diff --git a/media/libaaudio/src/utility/LinearRamp.cpp b/media/libaaudio/src/utility/LinearRamp.cpp
new file mode 100644
index 0000000..1714bbf
--- /dev/null
+++ b/media/libaaudio/src/utility/LinearRamp.cpp
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "LinearRamp.h"
+
+bool LinearRamp::isRamping() {
+ float target = mTarget.load();
+ if (target != mLevelTo) {
+ // Update target. Continue from previous level.
+ mLevelTo = target;
+ mRemaining = mLengthInFrames;
+ return true;
+ } else {
+ return mRemaining > 0;
+ }
+}
+
+bool LinearRamp::nextSegment(int32_t frames, float *levelFrom, float *levelTo) {
+ bool ramping = isRamping();
+ *levelFrom = mLevelFrom;
+ if (ramping) {
+ float level;
+ if (frames >= mRemaining) {
+ level = mLevelTo;
+ mRemaining = 0;
+ } else {
+ // Interpolate to a point along the full ramp.
+ level = mLevelFrom + (frames * (mLevelTo - mLevelFrom) / mRemaining);
+ mRemaining -= frames;
+ }
+ mLevelFrom = level; // for next ramp
+ *levelTo = level;
+ } else {
+ *levelTo = mLevelTo;
+ }
+ return ramping;
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/utility/LinearRamp.h b/media/libaaudio/src/utility/LinearRamp.h
new file mode 100644
index 0000000..ff09dce
--- /dev/null
+++ b/media/libaaudio/src/utility/LinearRamp.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AAUDIO_LINEAR_RAMP_H
+#define AAUDIO_LINEAR_RAMP_H
+
+#include <atomic>
+#include <stdint.h>
+
+/**
+ * Generate segments along a linear ramp.
+ * The ramp target can be updated from another thread.
+ * When the target is updated, a new ramp is started from the current position.
+ *
+ * The first ramp starts at 0.0.
+ *
+ */
+class LinearRamp {
+public:
+ LinearRamp() {
+ mTarget.store(1.0f);
+ }
+
+ void setLengthInFrames(int32_t frames) {
+ mLengthInFrames = frames;
+ }
+
+ int32_t getLengthInFrames() {
+ return mLengthInFrames;
+ }
+
+ /**
+ * This may be called by another thread.
+ * @param target
+ */
+ void setTarget(float target) {
+ mTarget.store(target);
+ }
+
+ float getTarget() {
+ return mTarget.load();
+ }
+
+ /**
+ * Force the nextSegment to start from this level.
+ *
+ * WARNING: this can cause a discontinuity if called while the ramp is being used.
+ * Only call this when setting the initial ramp.
+ *
+ * @param level
+ */
+ void forceCurrent(float level) {
+ mLevelFrom = level;
+ mLevelTo = level; // forces a ramp if it does not match target
+ }
+
+ float getCurrent() {
+ return mLevelFrom;
+ }
+
+ /**
+ * Get levels for next ramp segment.
+ *
+ * @param frames number of frames in the segment
+ * @param levelFrom pointer to starting amplitude
+ * @param levelTo pointer to ending amplitude
+ * @return true if ramp is still moving towards the target
+ */
+ bool nextSegment(int32_t frames, float *levelFrom, float *levelTo);
+
+private:
+
+ bool isRamping();
+
+ std::atomic<float> mTarget;
+
+ int32_t mLengthInFrames = 48000 / 50; // 20 msec at 48000 Hz
+ int32_t mRemaining = 0;
+ float mLevelFrom = 0.0f;
+ float mLevelTo = 0.0f;
+};
+
+
+#endif //AAUDIO_LINEAR_RAMP_H
diff --git a/media/libaaudio/tests/Android.mk b/media/libaaudio/tests/Android.mk
index 06c9364..01360b1 100644
--- a/media/libaaudio/tests/Android.mk
+++ b/media/libaaudio/tests/Android.mk
@@ -35,3 +35,15 @@
LOCAL_STATIC_LIBRARIES := libaaudio
LOCAL_MODULE := test_block_adapter
include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/src
+LOCAL_SRC_FILES:= test_linear_ramp.cpp
+LOCAL_SHARED_LIBRARIES := libaudioclient libaudioutils libbinder \
+ libcutils liblog libmedia libutils
+LOCAL_STATIC_LIBRARIES := libaaudio
+LOCAL_MODULE := test_linear_ramp
+include $(BUILD_NATIVE_TEST)
diff --git a/media/libaaudio/tests/test_linear_ramp.cpp b/media/libaaudio/tests/test_linear_ramp.cpp
new file mode 100644
index 0000000..5c53982
--- /dev/null
+++ b/media/libaaudio/tests/test_linear_ramp.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include "utility/AAudioUtilities.h"
+#include "utility/LinearRamp.h"
+
+
+TEST(test_linear_ramp, linear_ramp_segments) {
+ LinearRamp ramp;
+ const float source[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+ float destination[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+
+ float levelFrom = -1.0f;
+ float levelTo = -1.0f;
+ ramp.setLengthInFrames(8);
+ ramp.setTarget(8.0f);
+
+ ASSERT_EQ(8, ramp.getLengthInFrames());
+
+ bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(1, ramping);
+ ASSERT_EQ(0.0f, levelFrom);
+ ASSERT_EQ(4.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(0.0f, destination[0]);
+ ASSERT_EQ(1.0f, destination[1]);
+ ASSERT_EQ(2.0f, destination[2]);
+ ASSERT_EQ(3.0f, destination[3]);
+
+ ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(1, ramping);
+ ASSERT_EQ(4.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(4.0f, destination[0]);
+ ASSERT_EQ(5.0f, destination[1]);
+ ASSERT_EQ(6.0f, destination[2]);
+ ASSERT_EQ(7.0f, destination[3]);
+
+ ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(0, ramping);
+ ASSERT_EQ(8.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(8.0f, destination[0]);
+ ASSERT_EQ(8.0f, destination[1]);
+ ASSERT_EQ(8.0f, destination[2]);
+ ASSERT_EQ(8.0f, destination[3]);
+
+};
+
+
+TEST(test_linear_ramp, linear_ramp_forced) {
+ LinearRamp ramp;
+ const float source[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+ float destination[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+
+ float levelFrom = -1.0f;
+ float levelTo = -1.0f;
+ ramp.setLengthInFrames(4);
+ ramp.setTarget(8.0f);
+ ramp.forceCurrent(4.0f);
+ ASSERT_EQ(4.0f, ramp.getCurrent());
+
+ bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(1, ramping);
+ ASSERT_EQ(4.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(4.0f, destination[0]);
+ ASSERT_EQ(5.0f, destination[1]);
+ ASSERT_EQ(6.0f, destination[2]);
+ ASSERT_EQ(7.0f, destination[3]);
+
+ ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(0, ramping);
+ ASSERT_EQ(8.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(8.0f, destination[0]);
+ ASSERT_EQ(8.0f, destination[1]);
+ ASSERT_EQ(8.0f, destination[2]);
+ ASSERT_EQ(8.0f, destination[3]);
+
+};
+
diff --git a/media/libaudiohal/Android.mk b/media/libaudiohal/Android.mk
index 68a1f7b..e592169 100644
--- a/media/libaudiohal/Android.mk
+++ b/media/libaudiohal/Android.mk
@@ -5,30 +5,37 @@
LOCAL_SHARED_LIBRARIES := \
libcutils \
liblog \
- libutils
+ libutils \
+ libhardware
+
+LOCAL_SRC_FILES := \
+ DeviceHalLocal.cpp \
+ DevicesFactoryHalHybrid.cpp \
+ DevicesFactoryHalLocal.cpp \
+ StreamHalLocal.cpp
+
+LOCAL_CFLAGS := -Wall -Werror
ifeq ($(USE_LEGACY_LOCAL_AUDIO_HAL), true)
# Use audiohal directly w/o hwbinder middleware.
# This is for performance comparison and debugging only.
-LOCAL_SRC_FILES := \
- DeviceHalLocal.cpp \
- DevicesFactoryHalLocal.cpp \
+LOCAL_SRC_FILES += \
EffectBufferHalLocal.cpp \
- EffectHalLocal.cpp \
EffectsFactoryHalLocal.cpp \
- StreamHalLocal.cpp
+ EffectHalLocal.cpp
LOCAL_SHARED_LIBRARIES += \
- libeffects \
- libhardware
+ libeffects
+
+LOCAL_CFLAGS += -DUSE_LEGACY_LOCAL_AUDIO_HAL
else # if !USE_LEGACY_LOCAL_AUDIO_HAL
-LOCAL_SRC_FILES := \
+LOCAL_SRC_FILES += \
ConversionHelperHidl.cpp \
- HalDeathHandlerHidl.cpp \
+ HalDeathHandlerHidl.cpp \
DeviceHalHidl.cpp \
DevicesFactoryHalHidl.cpp \
EffectBufferHalHidl.cpp \
@@ -60,6 +67,4 @@
LOCAL_MODULE := libaudiohal
-LOCAL_CFLAGS := -Wall -Werror
-
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.cpp b/media/libaudiohal/DevicesFactoryHalHidl.cpp
index fc2645e..31da263 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/DevicesFactoryHalHidl.cpp
@@ -33,11 +33,6 @@
namespace android {
-// static
-sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
- return new DevicesFactoryHalHidl();
-}
-
DevicesFactoryHalHidl::DevicesFactoryHalHidl() {
mDevicesFactory = IDevicesFactory::getService();
if (mDevicesFactory != 0) {
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.h b/media/libaudiohal/DevicesFactoryHalHidl.h
index a26dec1..e2f1ad1 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/DevicesFactoryHalHidl.h
@@ -36,7 +36,7 @@
virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
private:
- friend class DevicesFactoryHalInterface;
+ friend class DevicesFactoryHalHybrid;
sp<IDevicesFactory> mDevicesFactory;
diff --git a/media/libaudiohal/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/DevicesFactoryHalHybrid.cpp
new file mode 100644
index 0000000..454b03b
--- /dev/null
+++ b/media/libaudiohal/DevicesFactoryHalHybrid.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DevicesFactoryHalHybrid"
+//#define LOG_NDEBUG 0
+
+#include "DevicesFactoryHalHybrid.h"
+#include "DevicesFactoryHalLocal.h"
+#ifndef USE_LEGACY_LOCAL_AUDIO_HAL
+#include "DevicesFactoryHalHidl.h"
+#endif
+
+namespace android {
+
+// static
+sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
+ return new DevicesFactoryHalHybrid();
+}
+
+DevicesFactoryHalHybrid::DevicesFactoryHalHybrid()
+ : mLocalFactory(new DevicesFactoryHalLocal()),
+ mHidlFactory(
+#ifdef USE_LEGACY_LOCAL_AUDIO_HAL
+ nullptr
+#else
+ new DevicesFactoryHalHidl()
+#endif
+ ) {
+}
+
+DevicesFactoryHalHybrid::~DevicesFactoryHalHybrid() {
+}
+
+status_t DevicesFactoryHalHybrid::openDevice(const char *name, sp<DeviceHalInterface> *device) {
+ if (mHidlFactory != 0 && strcmp(AUDIO_HARDWARE_MODULE_ID_A2DP, name) != 0) {
+ return mHidlFactory->openDevice(name, device);
+ }
+ return mLocalFactory->openDevice(name, device);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/DevicesFactoryHalHybrid.h b/media/libaudiohal/DevicesFactoryHalHybrid.h
new file mode 100644
index 0000000..abd57d6
--- /dev/null
+++ b/media/libaudiohal/DevicesFactoryHalHybrid.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
+#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
+
+#include <media/audiohal/DevicesFactoryHalInterface.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
+{
+ public:
+ // Opens a device with the specified name. To close the device, it is
+ // necessary to release references to the returned object.
+ virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
+
+ private:
+ friend class DevicesFactoryHalInterface;
+
+ // Can not be constructed directly by clients.
+ DevicesFactoryHalHybrid();
+
+ virtual ~DevicesFactoryHalHybrid();
+
+ sp<DevicesFactoryHalInterface> mLocalFactory;
+ sp<DevicesFactoryHalInterface> mHidlFactory;
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/DevicesFactoryHalLocal.cpp b/media/libaudiohal/DevicesFactoryHalLocal.cpp
index cd9a9e7..13a9acd 100644
--- a/media/libaudiohal/DevicesFactoryHalLocal.cpp
+++ b/media/libaudiohal/DevicesFactoryHalLocal.cpp
@@ -27,11 +27,6 @@
namespace android {
-// static
-sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
- return new DevicesFactoryHalLocal();
-}
-
static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
{
const hw_module_t *mod;
diff --git a/media/libaudiohal/DevicesFactoryHalLocal.h b/media/libaudiohal/DevicesFactoryHalLocal.h
index 58ce4ff..b9d18ab 100644
--- a/media/libaudiohal/DevicesFactoryHalLocal.h
+++ b/media/libaudiohal/DevicesFactoryHalLocal.h
@@ -33,7 +33,7 @@
virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
private:
- friend class DevicesFactoryHalInterface;
+ friend class DevicesFactoryHalHybrid;
// Can not be constructed directly by clients.
DevicesFactoryHalLocal() {}
diff --git a/media/libaudiohal/EffectBufferHalHidl.cpp b/media/libaudiohal/EffectBufferHalHidl.cpp
index d6a41a2..8b5201b 100644
--- a/media/libaudiohal/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/EffectBufferHalHidl.cpp
@@ -47,7 +47,7 @@
status_t EffectBufferHalInterface::mirror(
void* external, size_t size, sp<EffectBufferHalInterface>* buffer) {
sp<EffectBufferHalInterface> tempBuffer = new EffectBufferHalHidl(size);
- status_t result = reinterpret_cast<EffectBufferHalHidl*>(tempBuffer.get())->init();
+ status_t result = static_cast<EffectBufferHalHidl*>(tempBuffer.get())->init();
if (result == OK) {
tempBuffer->setExternalData(external);
*buffer = tempBuffer;
@@ -56,7 +56,8 @@
}
EffectBufferHalHidl::EffectBufferHalHidl(size_t size)
- : mBufferSize(size), mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
+ : mBufferSize(size), mFrameCountChanged(false),
+ mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
mHidlBuffer.id = makeUniqueId();
mHidlBuffer.frameCount = 0;
}
@@ -107,6 +108,13 @@
void EffectBufferHalHidl::setFrameCount(size_t frameCount) {
mHidlBuffer.frameCount = frameCount;
mAudioBuffer.frameCount = frameCount;
+ mFrameCountChanged = true;
+}
+
+bool EffectBufferHalHidl::checkFrameCountChange() {
+ bool result = mFrameCountChanged;
+ mFrameCountChanged = false;
+ return result;
}
void EffectBufferHalHidl::setExternalData(void* external) {
diff --git a/media/libaudiohal/EffectBufferHalHidl.h b/media/libaudiohal/EffectBufferHalHidl.h
index 6e9fd0b..66a81c2 100644
--- a/media/libaudiohal/EffectBufferHalHidl.h
+++ b/media/libaudiohal/EffectBufferHalHidl.h
@@ -37,6 +37,7 @@
virtual void setExternalData(void* external);
virtual void setFrameCount(size_t frameCount);
+ virtual bool checkFrameCountChange();
virtual void update();
virtual void commit();
@@ -51,6 +52,7 @@
static uint64_t makeUniqueId();
const size_t mBufferSize;
+ bool mFrameCountChanged;
void* mExternalData;
AudioBuffer mHidlBuffer;
sp<IMemory> mMemory;
diff --git a/media/libaudiohal/EffectBufferHalLocal.cpp b/media/libaudiohal/EffectBufferHalLocal.cpp
index 9fe2c7b..7951c8e 100644
--- a/media/libaudiohal/EffectBufferHalLocal.cpp
+++ b/media/libaudiohal/EffectBufferHalLocal.cpp
@@ -39,13 +39,13 @@
EffectBufferHalLocal::EffectBufferHalLocal(size_t size)
: mOwnBuffer(new uint8_t[size]),
- mBufferSize(size),
+ mBufferSize(size), mFrameCountChanged(false),
mAudioBuffer{0, {mOwnBuffer.get()}} {
}
EffectBufferHalLocal::EffectBufferHalLocal(void* external, size_t size)
: mOwnBuffer(nullptr),
- mBufferSize(size),
+ mBufferSize(size), mFrameCountChanged(false),
mAudioBuffer{0, {external}} {
}
@@ -62,6 +62,7 @@
void EffectBufferHalLocal::setFrameCount(size_t frameCount) {
mAudioBuffer.frameCount = frameCount;
+ mFrameCountChanged = true;
}
void EffectBufferHalLocal::setExternalData(void* external) {
@@ -69,6 +70,12 @@
mAudioBuffer.raw = external;
}
+bool EffectBufferHalLocal::checkFrameCountChange() {
+ bool result = mFrameCountChanged;
+ mFrameCountChanged = false;
+ return result;
+}
+
void EffectBufferHalLocal::update() {
}
diff --git a/media/libaudiohal/EffectBufferHalLocal.h b/media/libaudiohal/EffectBufferHalLocal.h
index 202d878..d2b624b 100644
--- a/media/libaudiohal/EffectBufferHalLocal.h
+++ b/media/libaudiohal/EffectBufferHalLocal.h
@@ -32,6 +32,7 @@
virtual void setExternalData(void* external);
virtual void setFrameCount(size_t frameCount);
+ virtual bool checkFrameCountChange();
virtual void update();
virtual void commit();
@@ -43,6 +44,7 @@
std::unique_ptr<uint8_t[]> mOwnBuffer;
const size_t mBufferSize;
+ bool mFrameCountChanged;
audio_buffer_t mAudioBuffer;
// Can not be constructed directly by clients.
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index 0babfda..b49b975 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -168,13 +168,20 @@
return OK;
}
+bool EffectHalHidl::needToResetBuffers() {
+ if (mBuffersChanged) return true;
+ bool inBufferFrameCountUpdated = mInBuffer->checkFrameCountChange();
+ bool outBufferFrameCountUpdated = mOutBuffer->checkFrameCountChange();
+ return inBufferFrameCountUpdated || outBufferFrameCountUpdated;
+}
+
status_t EffectHalHidl::processImpl(uint32_t mqFlag) {
if (mEffect == 0 || mInBuffer == 0 || mOutBuffer == 0) return NO_INIT;
status_t status;
if (!mStatusMQ && (status = prepareForProcessing()) != OK) {
return status;
}
- if (mBuffersChanged && (status = setProcessBuffers()) != OK) {
+ if (needToResetBuffers() && (status = setProcessBuffers()) != OK) {
return status;
}
// The data is already in the buffers, just need to flush it and wake up the server side.
@@ -202,8 +209,8 @@
status_t EffectHalHidl::setProcessBuffers() {
Return<Result> ret = mEffect->setProcessBuffers(
- reinterpret_cast<EffectBufferHalHidl*>(mInBuffer.get())->hidlBuffer(),
- reinterpret_cast<EffectBufferHalHidl*>(mOutBuffer.get())->hidlBuffer());
+ static_cast<EffectBufferHalHidl*>(mInBuffer.get())->hidlBuffer(),
+ static_cast<EffectBufferHalHidl*>(mOutBuffer.get())->hidlBuffer());
if (ret.isOk() && ret == Result::OK) {
mBuffersChanged = false;
return OK;
diff --git a/media/libaudiohal/EffectHalHidl.h b/media/libaudiohal/EffectHalHidl.h
index c8db36f..6ffdaf1 100644
--- a/media/libaudiohal/EffectHalHidl.h
+++ b/media/libaudiohal/EffectHalHidl.h
@@ -58,6 +58,9 @@
// Free resources on the remote side.
virtual status_t close();
+ // Whether it's a local implementation.
+ virtual bool isLocal() const { return false; }
+
uint64_t effectId() const { return mEffectId; }
static void effectDescriptorToHal(
@@ -92,6 +95,7 @@
status_t getConfigImpl(uint32_t cmdCode, uint32_t *replySize, void *pReplyData);
status_t prepareForProcessing();
+ bool needToResetBuffers();
status_t processImpl(uint32_t mqFlag);
status_t setConfigImpl(
uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
diff --git a/media/libaudiohal/EffectHalLocal.h b/media/libaudiohal/EffectHalLocal.h
index b499462..693fb50 100644
--- a/media/libaudiohal/EffectHalLocal.h
+++ b/media/libaudiohal/EffectHalLocal.h
@@ -48,6 +48,9 @@
// Free resources on the remote side.
virtual status_t close();
+ // Whether it's a local implementation.
+ virtual bool isLocal() const { return true; }
+
effect_handle_t handle() const { return mHandle; }
private:
diff --git a/media/libaudiohal/StreamHalLocal.cpp b/media/libaudiohal/StreamHalLocal.cpp
index b25e518..05800a0 100644
--- a/media/libaudiohal/StreamHalLocal.cpp
+++ b/media/libaudiohal/StreamHalLocal.cpp
@@ -79,11 +79,13 @@
}
status_t StreamHalLocal::addEffect(sp<EffectHalInterface> effect) {
+ LOG_ALWAYS_FATAL_IF(!effect->isLocal(), "Only local effects can be added for a local stream");
return mStream->add_audio_effect(mStream,
static_cast<EffectHalLocal*>(effect.get())->handle());
}
status_t StreamHalLocal::removeEffect(sp<EffectHalInterface> effect) {
+ LOG_ALWAYS_FATAL_IF(!effect->isLocal(), "Only local effects can be removed for a local stream");
return mStream->remove_audio_effect(mStream,
static_cast<EffectHalLocal*>(effect.get())->handle());
}
@@ -162,7 +164,7 @@
// correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
// already running, because the destructor is invoked after the refcount has been atomically
// decremented.
- wp<StreamOutHalLocal> weakSelf(reinterpret_cast<StreamOutHalLocal*>(cookie));
+ wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
sp<StreamOutHalLocal> self = weakSelf.promote();
if (self == 0) return 0;
sp<StreamOutHalInterfaceCallback> callback = self->mCallback.promote();
diff --git a/media/libaudiohal/include/EffectBufferHalInterface.h b/media/libaudiohal/include/EffectBufferHalInterface.h
index 6fa7940..e862f6e 100644
--- a/media/libaudiohal/include/EffectBufferHalInterface.h
+++ b/media/libaudiohal/include/EffectBufferHalInterface.h
@@ -39,6 +39,8 @@
virtual void setExternalData(void* external) = 0;
virtual void setFrameCount(size_t frameCount) = 0;
+ virtual bool checkFrameCountChange() = 0; // returns whether frame count has been updated
+ // since the last call to this method
virtual void update() = 0; // copies data from the external buffer, noop for allocated buffers
virtual void commit() = 0; // copies data to the external buffer, noop for allocated buffers
diff --git a/media/libaudiohal/include/EffectHalInterface.h b/media/libaudiohal/include/EffectHalInterface.h
index 7f9a6fd..92622aa 100644
--- a/media/libaudiohal/include/EffectHalInterface.h
+++ b/media/libaudiohal/include/EffectHalInterface.h
@@ -52,6 +52,9 @@
// Free resources on the remote side.
virtual status_t close() = 0;
+ // Whether it's a local implementation.
+ virtual bool isLocal() const = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
EffectHalInterface() {}
diff --git a/media/libmedia/include/media/IMediaExtractor.h b/media/libmedia/include/media/IMediaExtractor.h
index cf1b9fb..ab40f53 100644
--- a/media/libmedia/include/media/IMediaExtractor.h
+++ b/media/libmedia/include/media/IMediaExtractor.h
@@ -34,6 +34,9 @@
DECLARE_META_INTERFACE(MediaExtractor);
virtual size_t countTracks() = 0;
+ // This function could return NULL IMediaSource even when index is within the
+ // track count returned by countTracks, since it's possible the track is malformed
+ // and it's not detected during countTracks call.
virtual sp<IMediaSource> getTrack(size_t index) = 0;
enum GetTrackMetaDataFlags {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index d048777..0d4c730 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -202,7 +202,8 @@
mPaused(false),
mPausedByClient(true),
mPausedForBuffering(false),
- mIsDrmProtected(false) {
+ mIsDrmProtected(false),
+ mDataSourceType(DATA_SOURCE_TYPE_NONE) {
clearFlushComplete();
}
@@ -225,6 +226,7 @@
msg->setObject("source", new StreamingSource(notify, source));
msg->post();
+ mDataSourceType = DATA_SOURCE_TYPE_STREAM;
}
static bool IsHTTPLiveURL(const char *url) {
@@ -258,10 +260,12 @@
if (IsHTTPLiveURL(url)) {
source = new HTTPLiveSource(notify, httpService, url, headers);
ALOGV("setDataSourceAsync HTTPLiveSource %s", url);
+ mDataSourceType = DATA_SOURCE_TYPE_HTTP_LIVE;
} else if (!strncasecmp(url, "rtsp://", 7)) {
source = new RTSPSource(
notify, httpService, url, headers, mUIDValid, mUID);
ALOGV("setDataSourceAsync RTSPSource %s", url);
+ mDataSourceType = DATA_SOURCE_TYPE_RTSP;
} else if ((!strncasecmp(url, "http://", 7)
|| !strncasecmp(url, "https://", 8))
&& ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
@@ -269,6 +273,7 @@
source = new RTSPSource(
notify, httpService, url, headers, mUIDValid, mUID, true);
ALOGV("setDataSourceAsync RTSPSource http/https/.sdp %s", url);
+ mDataSourceType = DATA_SOURCE_TYPE_RTSP;
} else {
ALOGV("setDataSourceAsync GenericSource %s", url);
@@ -282,6 +287,9 @@
} else {
ALOGE("Failed to set data source!");
}
+
+ // regardless of success/failure
+ mDataSourceType = DATA_SOURCE_TYPE_GENERIC_URL;
}
msg->setObject("source", source);
msg->post();
@@ -307,6 +315,7 @@
msg->setObject("source", source);
msg->post();
+ mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
}
void NuPlayer::setDataSourceAsync(const sp<DataSource> &dataSource) {
@@ -323,6 +332,7 @@
msg->setObject("source", source);
msg->post();
+ mDataSourceType = DATA_SOURCE_TYPE_MEDIA;
}
status_t NuPlayer::getDefaultBufferingSettings(
@@ -2651,6 +2661,32 @@
}
}
+const char *NuPlayer::getDataSourceType() {
+ switch (mDataSourceType) {
+ case DATA_SOURCE_TYPE_HTTP_LIVE:
+ return "HTTPLive";
+
+ case DATA_SOURCE_TYPE_RTSP:
+ return "RTSP";
+
+ case DATA_SOURCE_TYPE_GENERIC_URL:
+ return "GenURL";
+
+ case DATA_SOURCE_TYPE_GENERIC_FD:
+ return "GenFD";
+
+ case DATA_SOURCE_TYPE_MEDIA:
+ return "Media";
+
+ case DATA_SOURCE_TYPE_STREAM:
+ return "Stream";
+
+ case DATA_SOURCE_TYPE_NONE:
+ default:
+ return "None";
+ }
+ }
+
// Modular DRM begin
status_t NuPlayer::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
{
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index d542749..c69835f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -93,6 +93,8 @@
status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
status_t releaseDrm();
+ const char *getDataSourceType();
+
protected:
virtual ~NuPlayer();
@@ -236,6 +238,18 @@
sp<ICrypto> mCrypto;
bool mIsDrmProtected;
+ typedef enum {
+ DATA_SOURCE_TYPE_NONE,
+ DATA_SOURCE_TYPE_HTTP_LIVE,
+ DATA_SOURCE_TYPE_RTSP,
+ DATA_SOURCE_TYPE_GENERIC_URL,
+ DATA_SOURCE_TYPE_GENERIC_FD,
+ DATA_SOURCE_TYPE_MEDIA,
+ DATA_SOURCE_TYPE_STREAM,
+ } DATA_SOURCE_TYPE;
+
+ std::atomic<DATA_SOURCE_TYPE> mDataSourceType;
+
inline const sp<DecoderBase> &getDecoder(bool audio) {
return audio ? mAudioDecoder : mVideoDecoder;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 01008b4..0c06976 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -53,6 +53,7 @@
static const char *kPlayerPlaying = "android.media.mediaplayer.playingMs";
static const char *kPlayerError = "android.media.mediaplayer.err";
static const char *kPlayerErrorCode = "android.media.mediaplayer.errcode";
+static const char *kPlayerDataSourceType = "android.media.mediaplayer.dataSource";
NuPlayerDriver::NuPlayerDriver(pid_t pid)
@@ -570,6 +571,8 @@
mAnalyticsItem->setInt64(kPlayerDuration, duration_ms);
mAnalyticsItem->setInt64(kPlayerPlaying, (mPlayingTimeUs+500)/1000 );
+
+ mAnalyticsItem->setCString(kPlayerDataSourceType, mPlayer->getDataSourceType());
}
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index e3ca516..51f1ba3 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -297,6 +297,10 @@
sp<IMediaSource> source = mImpl->getTrack(index);
+ if (source == nullptr) {
+ return ERROR_MALFORMED;
+ }
+
status_t ret = source->start();
if (ret != OK) {
return ret;
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 95fdf36..6d28d1b 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -31,12 +31,10 @@
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
-AImage::AImage(AImageReader* reader, int32_t format, uint64_t usage0, uint64_t usage1,
- BufferItem* buffer, int64_t timestamp,
- int32_t width, int32_t height, int32_t numPlanes) :
- mReader(reader), mFormat(format), mUsage0(usage0), mUsage1(usage1),
- mBuffer(buffer), mLockedBuffer(nullptr), mTimestamp(timestamp),
- mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
+AImage::AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
+ int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes) :
+ mReader(reader), mFormat(format), mUsage(usage), mBuffer(buffer), mLockedBuffer(nullptr),
+ mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
}
// Can only be called by free() with mLock hold
@@ -178,9 +176,9 @@
return AMEDIA_ERROR_INVALID_OBJECT;
}
- if ((mUsage0 & AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN) == 0) {
+ if ((mUsage & AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN) == 0) {
ALOGE("%s: AImage %p does not have any software read usage bits set, usage=%" PRIu64 "",
- __FUNCTION__, this, mUsage0);
+ __FUNCTION__, this, mUsage);
return AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE;
}
@@ -191,13 +189,10 @@
auto lockedBuffer = std::make_unique<CpuConsumer::LockedBuffer>();
- uint64_t producerUsage;
- uint64_t consumerUsage;
- android_hardware_HardwareBuffer_convertToGrallocUsageBits(
- &producerUsage, &consumerUsage, mUsage0, mUsage1);
+ uint64_t grallocUsage = android_hardware_HardwareBuffer_convertToGrallocUsageBits(mUsage);
status_t ret =
- lockImageFromBuffer(mBuffer, consumerUsage, mBuffer->mFence->dup(), lockedBuffer.get());
+ lockImageFromBuffer(mBuffer, grallocUsage, mBuffer->mFence->dup(), lockedBuffer.get());
if (ret != OK) {
ALOGE("%s: AImage %p failed to lock, error=%d", __FUNCTION__, this, ret);
return AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE;
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 1fcb495..e9073d5 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -32,9 +32,8 @@
// TODO: this only supports ImageReader
struct AImage {
- AImage(AImageReader* reader, int32_t format, uint64_t usage0, uint64_t usage1,
- BufferItem* buffer, int64_t timestamp,
- int32_t width, int32_t height, int32_t numPlanes);
+ AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
+ int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes);
// free all resources while keeping object alive. Caller must obtain reader lock
void close() { close(-1); }
@@ -75,8 +74,7 @@
// When reader is close, AImage will only accept close API call
wp<AImageReader> mReader;
const int32_t mFormat;
- const uint64_t mUsage0; // AHARDWAREBUFFER_USAGE0* flags.
- const uint64_t mUsage1; // AHARDWAREBUFFER_USAGE1* flags.
+ const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
BufferItem* mBuffer;
std::unique_ptr<CpuConsumer::LockedBuffer> mLockedBuffer;
const int64_t mTimestamp;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c449611..5d1a20b 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -238,14 +238,12 @@
AImageReader::AImageReader(int32_t width,
int32_t height,
int32_t format,
- uint64_t usage0,
- uint64_t usage1,
+ uint64_t usage,
int32_t maxImages)
: mWidth(width),
mHeight(height),
mFormat(format),
- mUsage0(usage0),
- mUsage1(usage1),
+ mUsage(usage),
mMaxImages(maxImages),
mNumPlanes(getNumPlanesForFormat(format)),
mFrameListener(new FrameListener(this)),
@@ -256,20 +254,14 @@
PublicFormat publicFormat = static_cast<PublicFormat>(mFormat);
mHalFormat = android_view_Surface_mapPublicFormatToHalFormat(publicFormat);
mHalDataSpace = android_view_Surface_mapPublicFormatToHalDataspace(publicFormat);
-
- uint64_t producerUsage;
- uint64_t consumerUsage;
- android_hardware_HardwareBuffer_convertToGrallocUsageBits(
- &producerUsage, &consumerUsage, mUsage0, mUsage1);
- // Strip out producerUsage here.
- mHalUsage = android_convertGralloc1To0Usage(0, consumerUsage);
+ mHalUsage = android_hardware_HardwareBuffer_convertToGrallocUsageBits(mUsage);
sp<IGraphicBufferProducer> gbProducer;
sp<IGraphicBufferConsumer> gbConsumer;
BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
- String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "u%" PRIu64 "m%d-%d-%d",
- mWidth, mHeight, mFormat, mUsage0, mUsage1, mMaxImages, getpid(),
+ String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "m%d-%d-%d",
+ mWidth, mHeight, mFormat, mUsage, mMaxImages, getpid(),
createProcessUniqueId());
mBufferItemConsumer =
@@ -445,10 +437,10 @@
}
if (mHalFormat == HAL_PIXEL_FORMAT_BLOB) {
- *image = new AImage(this, mFormat, mUsage0, mUsage1, buffer, buffer->mTimestamp,
+ *image = new AImage(this, mFormat, mUsage, buffer, buffer->mTimestamp,
readerWidth, readerHeight, mNumPlanes);
} else {
- *image = new AImage(this, mFormat, mUsage0, mUsage1, buffer, buffer->mTimestamp,
+ *image = new AImage(this, mFormat, mUsage, buffer, buffer->mTimestamp,
bufferWidth, bufferHeight, mNumPlanes);
}
mAcquiredImages.push_back(*image);
@@ -587,12 +579,12 @@
/*out*/AImageReader** reader) {
ALOGV("%s", __FUNCTION__);
return AImageReader_newWithUsage(
- width, height, format, AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN, 0, maxImages, reader);
+ width, height, format, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, maxImages, reader);
}
EXPORT
media_status_t AImageReader_newWithUsage(
- int32_t width, int32_t height, int32_t format, uint64_t usage0, uint64_t usage1,
+ int32_t width, int32_t height, int32_t format, uint64_t usage,
int32_t maxImages, /*out*/ AImageReader** reader) {
ALOGV("%s", __FUNCTION__);
@@ -626,7 +618,7 @@
}
AImageReader* tmpReader = new AImageReader(
- width, height, format, usage0, usage1, maxImages);
+ width, height, format, usage, maxImages);
if (tmpReader == nullptr) {
ALOGE("%s: AImageReader allocation failed", __FUNCTION__);
return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index a233ec8..35af169 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -55,8 +55,7 @@
AImageReader(int32_t width,
int32_t height,
int32_t format,
- uint64_t usage0,
- uint64_t usage1,
+ uint64_t usage,
int32_t maxImages);
~AImageReader();
@@ -117,8 +116,7 @@
const int32_t mWidth;
const int32_t mHeight;
const int32_t mFormat;
- const uint64_t mUsage0; // AHARDWAREBUFFER_USAGE0* flags.
- const uint64_t mUsage1; // AHARDWAREBUFFER_USAGE1* flags.
+ const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
const int32_t mMaxImages;
// TODO(jwcai) Seems completely unused in AImageReader class.
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 3665875..8d018d1 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8296,22 +8296,24 @@
mEffectChains[0]->setVolume_l(&vol, &vol);
volume = (float)vol / (1 << 24);
}
-
- mOutput->stream->setVolume(volume, volume);
-
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback != 0) {
- int channelCount;
- if (isOutput()) {
- channelCount = audio_channel_count_from_out_mask(mChannelMask);
+ // Try to use HW volume control and fall back to SW control if not implemented
+ if (mOutput->stream->setVolume(volume, volume) != NO_ERROR) {
+ sp<MmapStreamCallback> callback = mCallback.promote();
+ if (callback != 0) {
+ int channelCount;
+ if (isOutput()) {
+ channelCount = audio_channel_count_from_out_mask(mChannelMask);
+ } else {
+ channelCount = audio_channel_count_from_in_mask(mChannelMask);
+ }
+ Vector<float> values;
+ for (int i = 0; i < channelCount; i++) {
+ values.add(volume);
+ }
+ callback->onVolumeChanged(mChannelMask, values);
} else {
- channelCount = audio_channel_count_from_in_mask(mChannelMask);
+ ALOGW("Could not set MMAP stream volume: no volume callback!");
}
- Vector<float> values;
- for (int i = 0; i < channelCount; i++) {
- values.add(volume);
- }
- callback->onVolumeChanged(mChannelMask, values);
}
}
}
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 733a78e..6e21126 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -41,11 +41,7 @@
{
SharedParameters::Lock l(client->getParameters());
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- mUsePartialResult = (mNumPartialResults > 1);
- } else {
- mUsePartialResult = l.mParameters.quirks.partialResults;
- }
+ mUsePartialResult = (mNumPartialResults > 1);
// Initialize starting 3A state
m3aState.afTriggerId = l.mParameters.afTriggerCounter;
@@ -76,16 +72,7 @@
bool isPartialResult = false;
if (mUsePartialResult) {
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
- } else {
- camera_metadata_entry_t entry;
- entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
- if (entry.count > 0 &&
- entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- isPartialResult = true;
- }
- }
+ isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
}
if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
@@ -291,16 +278,8 @@
gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
&pendingState.awbState, frameNumber, cameraId);
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
- pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
- } else {
- gotAllStates &= updatePendingState<int32_t>(metadata,
- ANDROID_CONTROL_AF_TRIGGER_ID, &pendingState.afTriggerId, frameNumber, cameraId);
-
- gotAllStates &= updatePendingState<int32_t>(metadata,
- ANDROID_CONTROL_AE_PRECAPTURE_ID, &pendingState.aeTriggerId, frameNumber, cameraId);
- }
+ pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
+ pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
if (!gotAllStates) {
// If not all states are received, put the pending state to mPending3AStates.
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 1c78a08..a305bc7 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -2854,32 +2854,14 @@
}
sizes->clear();
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- Vector<StreamConfiguration> scs = getStreamConfigurations();
- for (size_t i=0; i < scs.size(); i++) {
- const StreamConfiguration &sc = scs[i];
- if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
- sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
- sc.width <= limit.width && sc.height <= limit.height) {
- Size sz = {sc.width, sc.height};
- sizes->push(sz);
- }
- }
- } else {
- const size_t SIZE_COUNT = sizeof(Size) / sizeof(int);
- camera_metadata_ro_entry_t availableProcessedSizes =
- staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, SIZE_COUNT);
- if (availableProcessedSizes.count < SIZE_COUNT) return BAD_VALUE;
-
- Size filteredSize;
- for (size_t i = 0; i < availableProcessedSizes.count; i += SIZE_COUNT) {
- filteredSize.width = availableProcessedSizes.data.i32[i];
- filteredSize.height = availableProcessedSizes.data.i32[i+1];
- // Need skip the preview sizes that are too large.
- if (filteredSize.width <= limit.width &&
- filteredSize.height <= limit.height) {
- sizes->push(filteredSize);
- }
+ Vector<StreamConfiguration> scs = getStreamConfigurations();
+ for (size_t i=0; i < scs.size(); i++) {
+ const StreamConfiguration &sc = scs[i];
+ if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+ sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
+ sc.width <= limit.width && sc.height <= limit.height) {
+ Size sz = {sc.width, sc.height};
+ sizes->push(sz);
}
}
@@ -2934,10 +2916,6 @@
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_IS_INPUT_OFFSET = 3;
Vector<StreamConfiguration> scs;
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("StreamConfiguration is only valid after device HAL 3.2!");
- return scs;
- }
camera_metadata_ro_entry_t availableStreamConfigs =
staticInfo(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
@@ -2953,37 +2931,10 @@
}
int64_t Parameters::getJpegStreamMinFrameDurationNs(Parameters::Size size) {
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
- } else {
- Vector<Size> availableJpegSizes = getAvailableJpegSizes();
- size_t streamIdx = availableJpegSizes.size();
- for (size_t i = 0; i < availableJpegSizes.size(); i++) {
- if (availableJpegSizes[i].width == size.width &&
- availableJpegSizes[i].height == size.height) {
- streamIdx = i;
- break;
- }
- }
- if (streamIdx != availableJpegSizes.size()) {
- camera_metadata_ro_entry_t jpegMinDurations =
- staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS);
- if (streamIdx < jpegMinDurations.count) {
- return jpegMinDurations.data.i64[streamIdx];
- }
- }
- }
- ALOGE("%s: cannot find min frame duration for jpeg size %dx%d",
- __FUNCTION__, size.width, size.height);
- return -1;
+ return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
}
int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("Min frame duration for HAL 3.1 or lower is not supported");
- return -1;
- }
-
const int STREAM_DURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
const int STREAM_WIDTH_OFFSET = 1;
@@ -3005,11 +2956,6 @@
}
bool Parameters::isFpsSupported(const Vector<Size> &sizes, int format, int32_t fps) {
- // Skip the check for older HAL version, as the min duration is not supported.
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- return true;
- }
-
// Get min frame duration for each size and check if the given fps range can be supported.
for (size_t i = 0 ; i < sizes.size(); i++) {
int64_t minFrameDuration = getMinFrameDurationNs(sizes[i], format);
@@ -3030,48 +2976,29 @@
SortedVector<int32_t> Parameters::getAvailableOutputFormats() {
SortedVector<int32_t> outputFormats; // Non-duplicated output formats
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- Vector<StreamConfiguration> scs = getStreamConfigurations();
- for (size_t i = 0; i < scs.size(); i++) {
- const StreamConfiguration &sc = scs[i];
- if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
- outputFormats.add(sc.format);
- }
- }
- } else {
- camera_metadata_ro_entry_t availableFormats = staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
- for (size_t i = 0; i < availableFormats.count; i++) {
- outputFormats.add(availableFormats.data.i32[i]);
+ Vector<StreamConfiguration> scs = getStreamConfigurations();
+ for (size_t i = 0; i < scs.size(); i++) {
+ const StreamConfiguration &sc = scs[i];
+ if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
+ outputFormats.add(sc.format);
}
}
+
return outputFormats;
}
Vector<Parameters::Size> Parameters::getAvailableJpegSizes() {
Vector<Parameters::Size> jpegSizes;
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- Vector<StreamConfiguration> scs = getStreamConfigurations();
- for (size_t i = 0; i < scs.size(); i++) {
- const StreamConfiguration &sc = scs[i];
- if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
- sc.format == HAL_PIXEL_FORMAT_BLOB) {
- Size sz = {sc.width, sc.height};
- jpegSizes.add(sz);
- }
- }
- } else {
- const int JPEG_SIZE_ENTRY_COUNT = 2;
- const int WIDTH_OFFSET = 0;
- const int HEIGHT_OFFSET = 1;
- camera_metadata_ro_entry_t availableJpegSizes =
- staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
- for (size_t i = 0; i < availableJpegSizes.count; i+= JPEG_SIZE_ENTRY_COUNT) {
- int width = availableJpegSizes.data.i32[i + WIDTH_OFFSET];
- int height = availableJpegSizes.data.i32[i + HEIGHT_OFFSET];
- Size sz = {width, height};
+ Vector<StreamConfiguration> scs = getStreamConfigurations();
+ for (size_t i = 0; i < scs.size(); i++) {
+ const StreamConfiguration &sc = scs[i];
+ if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+ sc.format == HAL_PIXEL_FORMAT_BLOB) {
+ Size sz = {sc.width, sc.height};
jpegSizes.add(sz);
}
}
+
return jpegSizes;
}
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index bf92a2b..d79e430 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -114,16 +114,11 @@
}
// Use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG for ZSL streaming case.
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_0) {
- if (params.useZeroShutterLag() && !params.recordingHint) {
- res = device->createDefaultRequest(CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG,
- &mPreviewRequest);
- } else {
- res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
- &mPreviewRequest);
- }
+ if (params.useZeroShutterLag() && !params.recordingHint) {
+ res = device->createDefaultRequest(
+ CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG, &mPreviewRequest);
} else {
- res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
+ res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
&mPreviewRequest);
}
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 4f788ae..44540bb 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -307,11 +307,6 @@
virtual status_t prepare(int maxCount, int streamId) = 0;
/**
- * Get the HAL device version.
- */
- virtual uint32_t getDeviceVersion() = 0;
-
- /**
* Set the deferred consumer surface and finish the rest of the stream configuration.
*/
virtual status_t setConsumerSurfaces(int streamId,
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 9e78f88..41c953b 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -32,8 +32,7 @@
mDevice(device),
mNumPartialResults(1) {
sp<CameraDeviceBase> cameraDevice = device.promote();
- if (cameraDevice != 0 &&
- cameraDevice->getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
+ if (cameraDevice != 0) {
CameraMetadata staticInfo = cameraDevice->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (entry.count > 0) {
@@ -171,18 +170,8 @@
camera_metadata_ro_entry_t entry;
// Check if this result is partial.
- bool isPartialResult = false;
- if (device->getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- isPartialResult = result.mResultExtras.partialResultCount < mNumPartialResults;
- } else {
- entry = result.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
- if (entry.count != 0 &&
- entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- ALOGV("%s: Camera %s: This is a partial result",
- __FUNCTION__, device->getId().string());
- isPartialResult = true;
- }
- }
+ bool isPartialResult =
+ result.mResultExtras.partialResultCount < mNumPartialResults;
// TODO: instead of getting requestID from CameraMetadata, we should get it
// from CaptureResultExtras. This will require changing Camera2Device.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b64488c..fb303bf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -163,7 +163,6 @@
/** Everything is good to go */
- mDeviceVersion = device->common.version;
mDeviceInfo = info.static_camera_characteristics;
mInterface = std::make_unique<HalInterface>(device);
@@ -231,9 +230,6 @@
// Don't use the queue onwards.
}
- // TODO: camera service will absorb 3_2/3_3/3_4 differences in the future
- // for now use 3_4 to keep legacy devices working
- mDeviceVersion = CAMERA_DEVICE_API_VERSION_3_4;
mInterface = std::make_unique<HalInterface>(session, queue);
std::string providerType;
mVendorTagId = manager->getProviderTagIdLocked(mId.string());
@@ -262,17 +258,8 @@
mTagMonitor.initialize(mVendorTagId);
- bool aeLockAvailable = false;
- camera_metadata_entry aeLockAvailableEntry = mDeviceInfo.find(
- ANDROID_CONTROL_AE_LOCK_AVAILABLE);
- if (aeLockAvailableEntry.count > 0) {
- aeLockAvailable = (aeLockAvailableEntry.data.u8[0] ==
- ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE);
- }
-
/** Start up request queue thread */
- mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get(), mDeviceVersion,
- aeLockAvailable);
+ mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get());
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -284,13 +271,6 @@
mPreparerThread = new PreparerThread();
- // Determine whether we need to derive sensitivity boost values for older devices.
- // If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control
- // be listed (as the default value 100)
- if (mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
- mDerivePostRawSensKey = true;
- }
-
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mNextStreamId = 0;
mDummyStreamId = NO_STREAM;
@@ -306,19 +286,11 @@
}
// Will the HAL be sending in early partial result metadata?
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_entry partialResultsCount =
- mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
- if (partialResultsCount.count > 0) {
- mNumPartialResults = partialResultsCount.data.i32[0];
- mUsePartialResult = (mNumPartialResults > 1);
- }
- } else {
- camera_metadata_entry partialResultsQuirk =
- mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
- if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) {
- mUsePartialResult = true;
- }
+ camera_metadata_entry partialResultsCount =
+ mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
+ if (partialResultsCount.count > 0) {
+ mNumPartialResults = partialResultsCount.data.i32[0];
+ mUsePartialResult = (mNumPartialResults > 1);
}
camera_metadata_entry configs =
@@ -434,48 +406,32 @@
Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
int32_t maxJpegWidth = 0, maxJpegHeight = 0;
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- const int STREAM_CONFIGURATION_SIZE = 4;
- const int STREAM_FORMAT_OFFSET = 0;
- const int STREAM_WIDTH_OFFSET = 1;
- const int STREAM_HEIGHT_OFFSET = 2;
- const int STREAM_IS_INPUT_OFFSET = 3;
- camera_metadata_ro_entry_t availableStreamConfigs =
- mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- if (availableStreamConfigs.count == 0 ||
- availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
- return Size(0, 0);
- }
+ const int STREAM_CONFIGURATION_SIZE = 4;
+ const int STREAM_FORMAT_OFFSET = 0;
+ const int STREAM_WIDTH_OFFSET = 1;
+ const int STREAM_HEIGHT_OFFSET = 2;
+ const int STREAM_IS_INPUT_OFFSET = 3;
+ camera_metadata_ro_entry_t availableStreamConfigs =
+ mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ if (availableStreamConfigs.count == 0 ||
+ availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+ return Size(0, 0);
+ }
- // Get max jpeg size (area-wise).
- for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
- int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
- int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
- int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
- int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
- if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
- && format == HAL_PIXEL_FORMAT_BLOB &&
- (width * height > maxJpegWidth * maxJpegHeight)) {
- maxJpegWidth = width;
- maxJpegHeight = height;
- }
- }
- } else {
- camera_metadata_ro_entry availableJpegSizes =
- mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
- if (availableJpegSizes.count == 0 || availableJpegSizes.count % 2 != 0) {
- return Size(0, 0);
- }
-
- // Get max jpeg size (area-wise).
- for (size_t i = 0; i < availableJpegSizes.count; i += 2) {
- if ((availableJpegSizes.data.i32[i] * availableJpegSizes.data.i32[i + 1])
- > (maxJpegWidth * maxJpegHeight)) {
- maxJpegWidth = availableJpegSizes.data.i32[i];
- maxJpegHeight = availableJpegSizes.data.i32[i + 1];
- }
+ // Get max jpeg size (area-wise).
+ for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+ int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+ int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+ int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+ int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+ if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+ && format == HAL_PIXEL_FORMAT_BLOB &&
+ (width * height > maxJpegWidth * maxJpegHeight)) {
+ maxJpegWidth = width;
+ maxJpegHeight = height;
}
}
+
return Size(maxJpegWidth, maxJpegHeight);
}
@@ -497,31 +453,6 @@
return measured;
}
-/**
- * Map Android N dataspace definitions back to Android M definitions, for
- * use with HALv3.3 or older.
- *
- * Only map where correspondences exist, and otherwise preserve the value.
- */
-android_dataspace Camera3Device::mapToLegacyDataspace(android_dataspace dataSpace) {
- switch (dataSpace) {
- case HAL_DATASPACE_V0_SRGB_LINEAR:
- return HAL_DATASPACE_SRGB_LINEAR;
- case HAL_DATASPACE_V0_SRGB:
- return HAL_DATASPACE_SRGB;
- case HAL_DATASPACE_V0_JFIF:
- return HAL_DATASPACE_JFIF;
- case HAL_DATASPACE_V0_BT601_625:
- return HAL_DATASPACE_BT601_625;
- case HAL_DATASPACE_V0_BT601_525:
- return HAL_DATASPACE_BT601_525;
- case HAL_DATASPACE_V0_BT709:
- return HAL_DATASPACE_BT709;
- default:
- return dataSpace;
- }
-}
-
hardware::graphics::common::V1_0::PixelFormat Camera3Device::mapToPixelFormat(
int frameworkFormat) {
return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
@@ -1365,32 +1296,17 @@
assert(mStatus != STATUS_ACTIVE);
sp<Camera3OutputStream> newStream;
- // Overwrite stream set id to invalid for HAL3.2 or lower, as buffer manager does support
- // such devices.
- if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2) {
- streamSetId = CAMERA3_STREAM_SET_ID_INVALID;
- }
if (consumers.size() == 0 && !hasDeferredConsumer) {
ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
return BAD_VALUE;
}
- // HAL3.1 doesn't support deferred consumer stream creation as it requires buffer registration
- // which requires a consumer surface to be available.
- if (hasDeferredConsumer && mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("HAL3.1 doesn't support deferred consumer stream creation");
- return BAD_VALUE;
- }
if (hasDeferredConsumer && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
return BAD_VALUE;
}
- // Use legacy dataspace values for older HALs
- if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_3) {
- dataSpace = mapToLegacyDataspace(dataSpace);
- }
if (format == HAL_PIXEL_FORMAT_BLOB) {
ssize_t blobBufferSize;
if (dataSpace != HAL_DATASPACE_DEPTH) {
@@ -1433,15 +1349,7 @@
}
newStream->setStatusTracker(mStatusTracker);
- /**
- * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs ( < HAL3.2)
- * requires buffers to be statically allocated for internal static buffer registration, while
- * the buffers provided by buffer manager are really dynamically allocated. For HAL3.2, because
- * not all HAL implementation supports dynamic buffer registeration, exlude it as well.
- */
- if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
- newStream->setBufferManager(mBufferManager);
- }
+ newStream->setBufferManager(mBufferManager);
res = mOutputStreams.add(mNextStreamId, newStream);
if (res < 0) {
@@ -1659,15 +1567,6 @@
set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
mRequestTemplateCache[templateId].acquire(rawRequest);
- // Derive some new keys for backward compatibility
- if (mDerivePostRawSensKey && !mRequestTemplateCache[templateId].exists(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
- int32_t defaultBoost[1] = {100};
- mRequestTemplateCache[templateId].update(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
- defaultBoost, 1);
- }
-
*request = mRequestTemplateCache[templateId];
return OK;
}
@@ -1919,15 +1818,7 @@
mRequestThread->clear(/*out*/frameNumber);
}
- status_t res;
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
- res = mRequestThread->flush();
- } else {
- Mutex::Autolock l(mLock);
- res = waitUntilDrainedLocked();
- }
-
- return res;
+ return mRequestThread->flush();
}
status_t Camera3Device::prepare(int streamId) {
@@ -1968,14 +1859,6 @@
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- // Teardown can only be accomplished on devices that don't require register_stream_buffers,
- // since we cannot call register_stream_buffers except right after configure_streams.
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("%s: Unable to tear down streams on device HAL v%x",
- __FUNCTION__, mDeviceVersion);
- return NO_INIT;
- }
-
sp<Camera3StreamInterface> stream;
ssize_t outputStreamIdx = mOutputStreams.indexOfKey(streamId);
if (outputStreamIdx == NAME_NOT_FOUND) {
@@ -2013,12 +1896,6 @@
return OK;
}
-uint32_t Camera3Device::getDeviceVersion() {
- ATRACE_CALL();
- Mutex::Autolock il(mInterfaceLock);
- return mDeviceVersion;
-}
-
/**
* Methods called by subclasses
*/
@@ -2511,14 +2388,13 @@
status_t Camera3Device::registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride,
bool hasAppCallback) {
ATRACE_CALL();
Mutex::Autolock l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
- aeTriggerCancelOverride, hasAppCallback));
+ hasAppCallback));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2603,8 +2479,8 @@
}
}
-void Camera3Device::insertResultLocked(CaptureResult *result, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+void Camera3Device::insertResultLocked(CaptureResult *result,
+ uint32_t frameNumber) {
if (result == nullptr) return;
camera_metadata_t *meta = const_cast<camera_metadata_t *>(
@@ -2623,8 +2499,6 @@
return;
}
- overrideResultForPrecaptureCancel(&result->mMetadata, aeTriggerCancelOverride);
-
// Valid result, insert into queue
List<CaptureResult>::iterator queuedResult =
mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
@@ -2639,15 +2513,14 @@
void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
Mutex::Autolock l(mOutputLock);
CaptureResult captureResult;
captureResult.mResultExtras = resultExtras;
captureResult.mMetadata = partialResult;
- insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+ insertResultLocked(&captureResult, frameNumber);
}
@@ -2655,8 +2528,7 @@
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
- bool reprocess,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+ bool reprocess) {
if (pendingMetadata.isEmpty())
return;
@@ -2690,15 +2562,6 @@
captureResult.mMetadata.append(collectedPartialResult);
}
- // Derive some new keys for backward compaibility
- if (mDerivePostRawSensKey && !captureResult.mMetadata.exists(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
- int32_t defaultBoost[1] = {100};
- captureResult.mMetadata.update(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
- defaultBoost, 1);
- }
-
captureResult.mMetadata.sort();
// Check that there's a timestamp in the result metadata
@@ -2712,7 +2575,7 @@
mTagMonitor.monitorMetadata(TagMonitor::RESULT,
frameNumber, timestamp.data.i64[0], captureResult.mMetadata);
- insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+ insertResultLocked(&captureResult, frameNumber);
}
/**
@@ -2732,10 +2595,7 @@
return;
}
- // For HAL3.2 or above, If HAL doesn't support partial, it must always set
- // partial_result to 1 when metadata is included in this result.
if (!mUsePartialResult &&
- mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 &&
result->result != NULL &&
result->partial_result != 1) {
SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
@@ -2780,39 +2640,21 @@
// Check if this result carries only partial metadata
if (mUsePartialResult && result->result != NULL) {
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
- SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
- " the range of [1, %d] when metadata is included in the result",
- frameNumber, result->partial_result, mNumPartialResults);
- return;
- }
- isPartialResult = (result->partial_result < mNumPartialResults);
- if (isPartialResult) {
- request.collectedPartialResult.append(result->result);
- }
- } else {
- camera_metadata_ro_entry_t partialResultEntry;
- res = find_camera_metadata_ro_entry(result->result,
- ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);
- if (res != NAME_NOT_FOUND &&
- partialResultEntry.count > 0 &&
- partialResultEntry.data.u8[0] ==
- ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- // A partial result. Flag this as such, and collect this
- // set of metadata into the in-flight entry.
- isPartialResult = true;
- request.collectedPartialResult.append(
- result->result);
- request.collectedPartialResult.erase(
- ANDROID_QUIRKS_PARTIAL_RESULT);
- }
+ if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
+ SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
+ " the range of [1, %d] when metadata is included in the result",
+ frameNumber, result->partial_result, mNumPartialResults);
+ return;
+ }
+ isPartialResult = (result->partial_result < mNumPartialResults);
+ if (isPartialResult) {
+ request.collectedPartialResult.append(result->result);
}
if (isPartialResult && request.hasCallback) {
// Send partial capture result
- sendPartialCaptureResult(result->result, request.resultExtras, frameNumber,
- request.aeTriggerCancelOverride);
+ sendPartialCaptureResult(result->result, request.resultExtras,
+ frameNumber);
}
}
@@ -2877,8 +2719,8 @@
CameraMetadata metadata;
metadata = result->result;
sendCaptureResult(metadata, request.resultExtras,
- collectedPartialResult, frameNumber, hasInputBufferInRequest,
- request.aeTriggerCancelOverride);
+ collectedPartialResult, frameNumber,
+ hasInputBufferInRequest);
}
}
@@ -3058,7 +2900,7 @@
// send pending result and buffers
sendCaptureResult(r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer, r.aeTriggerCancelOverride);
+ r.hasInputBuffer);
}
returnOutputBuffers(r.pendingOutputBuffers.array(),
r.pendingOutputBuffers.size(), r.shutterTimestamp);
@@ -3606,14 +3448,11 @@
Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
sp<StatusTracker> statusTracker,
- HalInterface* interface,
- uint32_t deviceVersion,
- bool aeLockAvailable) :
+ HalInterface* interface) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
mInterface(interface),
- mDeviceVersion(deviceVersion),
mListener(nullptr),
mId(getId(parent)),
mReconfigured(false),
@@ -3625,7 +3464,6 @@
mCurrentPreCaptureTriggerId(0),
mRepeatingLastFrameNumber(
hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
- mAeLockAvailable(aeLockAvailable),
mPrepareVideoStream(false) {
mStatusId = statusTracker->addComponent();
}
@@ -3820,11 +3658,7 @@
ATRACE_CALL();
Mutex::Autolock l(mFlushLock);
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
- return mInterface->flush();
- }
-
- return -ENOTSUP;
+ return mInterface->flush();
}
void Camera3Device::RequestThread::setPaused(bool paused) {
@@ -3857,65 +3691,6 @@
mRequestSignal.signal();
}
-
-/**
- * For devices <= CAMERA_DEVICE_API_VERSION_3_2, AE_PRECAPTURE_TRIGGER_CANCEL is not supported so
- * we need to override AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE and AE_LOCK_OFF
- * to AE_LOCK_ON to start cancelling AE precapture. If AE lock is not available, it still overrides
- * AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE but doesn't add AE_LOCK_ON to the
- * request.
- */
-void Camera3Device::RequestThread::handleAePrecaptureCancelRequest(const sp<CaptureRequest>& request) {
- request->mAeTriggerCancelOverride.applyAeLock = false;
- request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = false;
-
- if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
- return;
- }
-
- camera_metadata_entry_t aePrecaptureTrigger =
- request->mSettings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
- if (aePrecaptureTrigger.count > 0 &&
- aePrecaptureTrigger.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) {
- // Always override CANCEL to IDLE
- uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
- request->mSettings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
- request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = true;
- request->mAeTriggerCancelOverride.aePrecaptureTrigger =
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL;
-
- if (mAeLockAvailable == true) {
- camera_metadata_entry_t aeLock = request->mSettings.find(ANDROID_CONTROL_AE_LOCK);
- if (aeLock.count == 0 || aeLock.data.u8[0] == ANDROID_CONTROL_AE_LOCK_OFF) {
- uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_ON;
- request->mSettings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
- request->mAeTriggerCancelOverride.applyAeLock = true;
- request->mAeTriggerCancelOverride.aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
- }
- }
- }
-}
-
-/**
- * Override result metadata for cancelling AE precapture trigger applied in
- * handleAePrecaptureCancelRequest().
- */
-void Camera3Device::overrideResultForPrecaptureCancel(
- CameraMetadata *result, const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
- if (aeTriggerCancelOverride.applyAeLock) {
- // Only devices <= v3.2 should have this override
- assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2);
- result->update(ANDROID_CONTROL_AE_LOCK, &aeTriggerCancelOverride.aeLock, 1);
- }
-
- if (aeTriggerCancelOverride.applyAePrecaptureTrigger) {
- // Only devices <= v3.2 should have this override
- assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2);
- result->update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- &aeTriggerCancelOverride.aePrecaptureTrigger, 1);
- }
-}
-
void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
bool surfaceAbandoned = false;
int64_t lastFrameNumber = 0;
@@ -4283,7 +4058,6 @@
res = parent->registerInFlight(halRequest->frame_number,
totalNumBuffers, captureRequest->mResultExtras,
/*hasInput*/halRequest->input_buffer != NULL,
- captureRequest->mAeTriggerCancelOverride,
hasCallback);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
", burstId = %" PRId32 ".",
@@ -4542,8 +4316,6 @@
}
}
- handleAePrecaptureCancelRequest(nextRequest);
-
return nextRequest;
}
@@ -4629,9 +4401,7 @@
request->mResultExtras.afTriggerId = triggerId;
mCurrentAfTriggerId = triggerId;
}
- if (parent->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- continue; // Trigger ID tag is deprecated since device HAL 3.2
- }
+ continue;
}
camera_metadata_entry entry = metadata.find(tag);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 8b76a97..c9876a6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -166,8 +166,6 @@
status_t prepare(int maxCount, int streamId) override;
- uint32_t getDeviceVersion() override;
-
ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
ssize_t getPointCloudBufferSize() const;
ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
@@ -342,12 +340,6 @@
CameraMetadata mRequestTemplateCache[CAMERA3_TEMPLATE_COUNT];
- uint32_t mDeviceVersion;
-
- // whether Camera3Device should derive ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST for
- // backward compatibility. Should not be changed after initialization.
- bool mDerivePostRawSensKey = false;
-
struct Size {
uint32_t width;
uint32_t height;
@@ -405,13 +397,6 @@
// words, camera device shouldn't be open during CPU suspend.
nsecs_t mTimestampOffset;
- typedef struct AeTriggerCancelOverride {
- bool applyAeLock;
- uint8_t aeLock;
- bool applyAePrecaptureTrigger;
- uint8_t aePrecaptureTrigger;
- } AeTriggerCancelOverride_t;
-
class CaptureRequest : public LightRefBase<CaptureRequest> {
public:
CameraMetadata mSettings;
@@ -421,9 +406,6 @@
mOutputStreams;
SurfaceMap mOutputSurfaces;
CaptureResultExtras mResultExtras;
- // Used to cancel AE precapture trigger for devices doesn't support
- // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
- AeTriggerCancelOverride_t mAeTriggerCancelOverride;
// The number of requests that should be submitted to HAL at a time.
// For example, if batch size is 8, this request and the following 7
// requests will be submitted to HAL at a time. The batch size for
@@ -599,11 +581,6 @@
static nsecs_t getMonoToBoottimeOffset();
/**
- * Helper function to map between legacy and new dataspace enums
- */
- static android_dataspace mapToLegacyDataspace(android_dataspace dataSpace);
-
- /**
* Helper functions to map between framework and HIDL values
*/
static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
@@ -648,9 +625,7 @@
RequestThread(wp<Camera3Device> parent,
sp<camera3::StatusTracker> statusTracker,
- HalInterface* interface,
- uint32_t deviceVersion,
- bool aeLockAvailable);
+ HalInterface* interface);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -784,9 +759,6 @@
// If the input request is in mRepeatingRequests. Must be called with mRequestLock hold
bool isRepeatingRequestLocked(const sp<CaptureRequest>&);
- // Handle AE precapture trigger cancel for devices <= CAMERA_DEVICE_API_VERSION_3_2.
- void handleAePrecaptureCancelRequest(const sp<CaptureRequest>& request);
-
// Clear repeating requests. Must be called with mRequestLock held.
status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
@@ -799,7 +771,6 @@
wp<Camera3Device> mParent;
wp<camera3::StatusTracker> mStatusTracker;
HalInterface* mInterface;
- uint32_t mDeviceVersion;
wp<NotificationListener> mListener;
@@ -849,9 +820,6 @@
int64_t mRepeatingLastFrameNumber;
- // Whether the device supports AE lock
- bool mAeLockAvailable;
-
// Flag indicating if we should prepare video stream for video requests.
bool mPrepareVideoStream;
};
@@ -892,10 +860,6 @@
// the shutter event.
Vector<camera3_stream_buffer_t> pendingOutputBuffers;
- // Used to cancel AE precapture trigger for devices doesn't support
- // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
- AeTriggerCancelOverride_t aeTriggerCancelOverride;
-
// Whether this inflight request's shutter and result callback are to be
// called. The policy is that if the request is the last one in the constrained
// high speed recording request list, this flag will be true. If the request list
@@ -910,12 +874,11 @@
haveResultMetadata(false),
numBuffersLeft(0),
hasInputBuffer(false),
- aeTriggerCancelOverride({false, 0, false, 0}),
hasCallback(true) {
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- AeTriggerCancelOverride aeTriggerCancelOverride, bool hasAppCallback) :
+ bool hasAppCallback) :
shutterTimestamp(0),
sensorTimestamp(0),
requestStatus(OK),
@@ -923,7 +886,6 @@
numBuffersLeft(numBuffers),
resultExtras(extras),
hasInputBuffer(hasInput),
- aeTriggerCancelOverride(aeTriggerCancelOverride),
hasCallback(hasAppCallback) {
}
};
@@ -937,14 +899,7 @@
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride, bool callback);
-
- /**
- * Override result metadata for cancelling AE precapture trigger applied in
- * handleAePrecaptureCancelRequest().
- */
- void overrideResultForPrecaptureCancel(CameraMetadata* result,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ bool callback);
/**
* Tracking for idle detection
@@ -1039,21 +994,19 @@
// Send a partial capture result.
void sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber);
// Send a total capture result given the pending metadata and result extras,
// partial results, and the frame number to the result queue.
void sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess, const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ bool reprocess);
// Insert the result to the result queue after updating frame number and overriding AE
// trigger cancel.
// mOutputLock must be held when calling this function.
- void insertResultLocked(CaptureResult *result, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ void insertResultLocked(CaptureResult *result, uint32_t frameNumber);
/**** Scope for mInFlightLock ****/
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 2b1a899..b45ef77 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -663,89 +663,6 @@
}
}
-status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) {
- ATRACE_CALL();
-
- /**
- * >= CAMERA_DEVICE_API_VERSION_3_2:
- *
- * camera3_device_t->ops->register_stream_buffers() is not called and must
- * be NULL.
- */
- if (hal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGV("%s: register_stream_buffers unused as of HAL3.2", __FUNCTION__);
-
- if (hal3Device->ops->register_stream_buffers != NULL) {
- ALOGE("%s: register_stream_buffers is deprecated in HAL3.2; "
- "must be set to NULL in camera3_device::ops", __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- return OK;
- }
-
- ALOGV("%s: register_stream_buffers using deprecated code path", __FUNCTION__);
-
- status_t res;
-
- size_t bufferCount = getBufferCountLocked();
-
- Vector<buffer_handle_t*> buffers;
- buffers.insertAt(/*prototype_item*/NULL, /*index*/0, bufferCount);
-
- camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set();
- bufferSet.stream = this;
- bufferSet.num_buffers = bufferCount;
- bufferSet.buffers = buffers.editArray();
-
- Vector<camera3_stream_buffer_t> streamBuffers;
- streamBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
-
- // Register all buffers with the HAL. This means getting all the buffers
- // from the stream, providing them to the HAL with the
- // register_stream_buffers() method, and then returning them back to the
- // stream in the error state, since they won't have valid data.
- //
- // Only registered buffers can be sent to the HAL.
-
- uint32_t bufferIdx = 0;
- for (; bufferIdx < bufferCount; bufferIdx++) {
- res = getBufferLocked( &streamBuffers.editItemAt(bufferIdx) );
- if (res != OK) {
- ALOGE("%s: Unable to get buffer %d for registration with HAL",
- __FUNCTION__, bufferIdx);
- // Skip registering, go straight to cleanup
- break;
- }
-
- sp<Fence> fence = new Fence(streamBuffers[bufferIdx].acquire_fence);
- fence->waitForever("Camera3Stream::registerBuffers");
-
- buffers.editItemAt(bufferIdx) = streamBuffers[bufferIdx].buffer;
- }
- if (bufferIdx == bufferCount) {
- // Got all buffers, register with HAL
- ALOGV("%s: Registering %zu buffers with camera HAL",
- __FUNCTION__, bufferCount);
- ATRACE_BEGIN("camera3->register_stream_buffers");
- res = hal3Device->ops->register_stream_buffers(hal3Device,
- &bufferSet);
- ATRACE_END();
- }
-
- // Return all valid buffers to stream, in ERROR state to indicate
- // they weren't filled.
- for (size_t i = 0; i < bufferIdx; i++) {
- streamBuffers.editItemAt(i).release_fence = -1;
- streamBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
- returnBufferLocked(streamBuffers[i], 0);
- }
-
- mPrepared = true;
-
- return res;
-}
-
status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *,
const std::vector<size_t>&) {
ALOGE("%s: This type of stream does not support output", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 27ef86d..9cdc1b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -474,9 +474,6 @@
Condition mInputBufferReturnedSignal;
static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
- // Gets all buffers from endpoint and registers them with the HAL.
- status_t registerBuffersLocked(camera3_device *hal3Device);
-
void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
bool acquired, bool output);
List<wp<Camera3StreamBufferListener> > mBufferListenerList;
diff --git a/services/mediadrm/Android.mk b/services/mediadrm/Android.mk
index 1d5fa07..fa3a02b 100644
--- a/services/mediadrm/Android.mk
+++ b/services/mediadrm/Android.mk
@@ -43,8 +43,9 @@
# TODO: Some legacy DRM plugins only support 32-bit. They need to be migrated to
# 64-bit. (b/18948909) Once all of a device's legacy DRM plugins support 64-bit,
-# that device can turn on ENABLE_MEDIADRM_64 to build this service as 64-bit.
-ifneq ($(ENABLE_MEDIADRM_64), true)
+# that device can turn on TARGET_ENABLE_MEDIADRM_64 to build this service as
+# 64-bit.
+ifneq ($(TARGET_ENABLE_MEDIADRM_64), true)
LOCAL_32_BIT_ONLY := true
endif
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index b197798..d3e182a 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -46,6 +46,7 @@
// Use 2 for "double buffered"
#define BUFFER_SIZE_IN_BURSTS 2
+#define BURSTS_PER_MIX_LOOP 1
// The mStreamInternal will use a service interface that does not go through Binder.
AAudioServiceEndpoint::AAudioServiceEndpoint(AAudioService &audioService)