Merge "audio policy: fix mute delay when setting output device"
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 2c48bbf..79db323 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -154,8 +154,9 @@
* streamType: Select the type of audio stream this track is attached to
* (e.g. AUDIO_STREAM_MUSIC).
* sampleRate: Data source sampling rate in Hz.
- * format: Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed
- * 16 bits per sample).
+ * format: Audio format. For mixed tracks, any PCM format supported by server is OK
+ * or AUDIO_FORMAT_PCM_8_BIT which is handled on client side. For direct
+ * and offloaded tracks, the possible format(s) depends on the output sink.
* channelMask: Channel mask, such that audio_is_output_channel(channelMask) is true.
* frameCount: Minimum size of track PCM buffer in frames. This defines the
* application's contribution to the
diff --git a/include/ndk/NdkMediaCodec.h b/include/ndk/NdkMediaCodec.h
index 73ece1b..2af88d0b 100644
--- a/include/ndk/NdkMediaCodec.h
+++ b/include/ndk/NdkMediaCodec.h
@@ -49,6 +49,7 @@
enum {
AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM = 4,
+ AMEDIACODEC_CONFIGURE_FLAG_ENCODE = 1,
AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED = -3,
AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED = -2,
AMEDIACODEC_INFO_TRY_AGAIN_LATER = -1
@@ -56,20 +57,22 @@
/**
- * Create decoder by name. Use this if you know the exact codec you want to use.
+ * Create codec by name. Use this if you know the exact codec you want to use.
+ * When configuring, you will need to specify whether to use the codec as an
+ * encoder or decoder.
*/
-AMediaCodec* AMediaCodec_createByCodecName(const char *name);
+AMediaCodec* AMediaCodec_createCodecByName(const char *name);
/**
* Create codec by mime type. Most applications will use this, specifying a
* mime type obtained from media extractor.
*/
-AMediaCodec* AMediaCodec_createByCodecType(const char *mime_type);
+AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type);
/**
* Create encoder by name.
*/
-AMediaCodec* AMediaCodec_createEncoderByName(const char *name);
+AMediaCodec* AMediaCodec_createEncoderByType(const char *mime_type);
/**
* delete the codec and free its resources
@@ -79,7 +82,8 @@
/**
* Configure the codec. For decoding you would typically get the format from an extractor.
*/
-int AMediaCodec_configure(AMediaCodec*, const AMediaFormat* format, ANativeWindow* surface); // TODO: other args
+int AMediaCodec_configure(AMediaCodec*, const AMediaFormat* format,
+ ANativeWindow* surface, uint32_t flags); // TODO: other args
/**
* Start the codec. A codec must be configured before it can be started, and must be started
diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk
index 2bb6dbe..e0ca8af 100644
--- a/media/libeffects/downmix/Android.mk
+++ b/media/libeffects/downmix/Android.mk
@@ -15,16 +15,10 @@
LOCAL_MODULE_RELATIVE_PATH := soundfx
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
-LOCAL_PRELINK_MODULE := false
-
LOCAL_CFLAGS += -fvisibility=hidden
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk
index 9e8cb83..ea3c59d 100644
--- a/media/libeffects/preprocessing/Android.mk
+++ b/media/libeffects/preprocessing/Android.mk
@@ -24,12 +24,7 @@
libutils \
liblog
-ifeq ($(TARGET_SIMULATOR),true)
-LOCAL_LDLIBS += -ldl
-else
LOCAL_SHARED_LIBRARIES += libdl
-endif
-
LOCAL_CFLAGS += -fvisibility=hidden
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index dc4f90e..aaaa3f1 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -315,12 +315,20 @@
flags = (audio_output_flags_t)(flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
}
- if (audio_is_linear_pcm(format)) {
- mFrameSize = channelCount * audio_bytes_per_sample(format);
- mFrameSizeAF = channelCount * sizeof(int16_t);
+ if (flags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ if (audio_is_linear_pcm(format)) {
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ } else {
+ mFrameSize = sizeof(uint8_t);
+ }
+ mFrameSizeAF = mFrameSize;
} else {
- mFrameSize = sizeof(uint8_t);
- mFrameSizeAF = sizeof(uint8_t);
+ ALOG_ASSERT(audio_is_linear_pcm(format));
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ mFrameSizeAF = channelCount * audio_bytes_per_sample(
+ format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format);
+ // createTrack will return an error if PCM format is not supported by server,
+ // so no need to check for specific PCM formats here
}
// Make copy of input parameter offloadInfo so that in the future:
@@ -931,7 +939,11 @@
// Ensure that buffer alignment matches channel count
// 8-bit data in shared memory is not currently supported by AudioFlinger
- size_t alignment = /* mFormat == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2;
+ size_t alignment = audio_bytes_per_sample(
+ mFormat == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : mFormat);
+ if (alignment & 1) {
+ alignment = 1;
+ }
if (mChannelCount > 1) {
// More than 2 channels does not require stronger alignment than stereo
alignment <<= 1;
@@ -947,7 +959,7 @@
// there's no frameCount parameter.
// But when initializing a shared buffer AudioTrack via set(),
// there _is_ a frameCount parameter. We silently ignore it.
- frameCount = mSharedBuffer->size()/mChannelCount/sizeof(int16_t);
+ frameCount = mSharedBuffer->size() / mFrameSizeAF;
} else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index e7f009e..9592af8 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -88,16 +88,16 @@
}
-AMediaCodec* AMediaCodec_createByCodecName(const char *name) {
+AMediaCodec* AMediaCodec_createCodecByName(const char *name) {
return createAMediaCodec(name, false, false);
}
-AMediaCodec* AMediaCodec_createByCodecType(const char *mime_type) {
+AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type) {
return createAMediaCodec(mime_type, true, false);
}
-AMediaCodec* AMediaCodec_createEncoderByName(const char *name) {
- return createAMediaCodec(name, false, true);
+AMediaCodec* AMediaCodec_createEncoderByType(const char *name) {
+ return createAMediaCodec(name, true, true);
}
int AMediaCodec_delete(AMediaCodec *mData) {
@@ -115,7 +115,8 @@
return OK;
}
-int AMediaCodec_configure(AMediaCodec *mData, const AMediaFormat* format, ANativeWindow* window) {
+int AMediaCodec_configure(
+ AMediaCodec *mData, const AMediaFormat* format, ANativeWindow* window, uint32_t flags) {
sp<AMessage> nativeFormat;
AMediaFormat_getFormat(format, &nativeFormat);
ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
@@ -124,7 +125,7 @@
surface = (Surface*) window;
}
- return translate_error(mData->mCodec->configure(nativeFormat, surface, NULL, 0));
+ return translate_error(mData->mCodec->configure(nativeFormat, surface, NULL, flags));
}
int AMediaCodec_start(AMediaCodec *mData) {