Merge "Add done() to SingleStateQueue"
diff --git a/include/media/JetPlayer.h b/include/media/JetPlayer.h
index 388f767..63d1980 100644
--- a/include/media/JetPlayer.h
+++ b/include/media/JetPlayer.h
@@ -22,6 +22,7 @@
#include <libsonivox/jet.h>
#include <libsonivox/eas_types.h>
#include <media/AudioTrack.h>
+#include <media/MidiIoWrapper.h>
namespace android {
@@ -86,15 +87,13 @@
int mMaxTracks; // max number of MIDI tracks, usually 32
EAS_DATA_HANDLE mEasData;
- EAS_FILE_LOCATOR mEasJetFileLoc;
+ sp<MidiIoWrapper> mIoWrapper;
EAS_PCM* mAudioBuffer;// EAS renders the MIDI data into this buffer,
sp<AudioTrack> mAudioTrack; // and we play it in this audio track
int mTrackBufferSize;
S_JET_STATUS mJetStatus;
S_JET_STATUS mPreviousJetStatus;
- char mJetFilePath[PATH_MAX];
-
class JetPlayerThread : public Thread {
public:
JetPlayerThread(JetPlayer *player) : mPlayer(player) {
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index c412299..0fc7dcf 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -43,8 +43,6 @@
template<typename T> class SortedVector;
enum player_type {
- PV_PLAYER = 1,
- SONIVOX_PLAYER = 2,
STAGEFRIGHT_PLAYER = 3,
NU_PLAYER = 4,
// Test players are available only in the 'test' and 'eng' builds.
diff --git a/include/media/MidiIoWrapper.h b/include/media/MidiIoWrapper.h
new file mode 100644
index 0000000..e6f8cf7
--- /dev/null
+++ b/include/media/MidiIoWrapper.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MIDI_IO_WRAPPER_H_
+#define MIDI_IO_WRAPPER_H_
+
+#include <libsonivox/eas_types.h>
+
+#include "media/stagefright/DataSource.h"
+
+namespace android {
+
+class MidiIoWrapper : public RefBase {
+public:
+ MidiIoWrapper(const char *path);
+ MidiIoWrapper(int fd, off64_t offset, int64_t size);
+ MidiIoWrapper(const sp<DataSource> &source);
+
+ ~MidiIoWrapper();
+
+ int readAt(void *buffer, int offset, int size);
+ int size();
+
+ EAS_FILE_LOCATOR getLocator();
+
+private:
+ int mFd;
+ off64_t mBase;
+ int64_t mLength;
+ sp<DataSource> mDataSource;
+ EAS_FILE mEasFile;
+};
+
+
+} // namespace android
+
+#endif // MIDI_IO_WRAPPER_H_
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index 8605d99..c2bbe4d 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -52,6 +52,12 @@
static sp<IMediaCodecList> getLocalInstance();
private:
+ class BinderDeathObserver : public IBinder::DeathRecipient {
+ void binderDied(const wp<IBinder> &the_late_who __unused);
+ };
+
+ static sp<BinderDeathObserver> sBinderDeathObserver;
+
enum Section {
SECTION_TOPLEVEL,
SECTION_DECODERS,
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index e67d4d5..1f9ff45 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -36,6 +36,7 @@
extern const char *MEDIA_MIMETYPE_AUDIO_MPEG; // layer III
extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I;
extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II;
+extern const char *MEDIA_MIMETYPE_AUDIO_MIDI;
extern const char *MEDIA_MIMETYPE_AUDIO_AAC;
extern const char *MEDIA_MIMETYPE_AUDIO_QCELP;
extern const char *MEDIA_MIMETYPE_AUDIO_VORBIS;
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 3ddeb4e..6aeb919 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -766,6 +766,122 @@
return 0;
} /* end LvmBundle_process */
+
+//----------------------------------------------------------------------------
+// EqualizerUpdateActiveParams()
+//----------------------------------------------------------------------------
+// Purpose: Update ActiveParams for Equalizer
+//
+// Inputs:
+// pContext: effect engine context
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+void EqualizerUpdateActiveParams(EffectContext *pContext) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+
+ /* Get the current settings */
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerUpdateActiveParams")
+ //ALOGV("\tEqualizerUpdateActiveParams Succesfully returned from LVM_GetControlParameters\n");
+ //ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+ ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
+ ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
+ }
+
+ /* Activate the initial settings */
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerUpdateActiveParams")
+ //ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+}
+
+//----------------------------------------------------------------------------
+// LvmEffect_limitLevel()
+//----------------------------------------------------------------------------
+// Purpose: limit the overall level to a value less than 0 dB preserving
+// the overall EQ band gain and BassBoost relative levels.
+//
+// Inputs:
+// pContext: effect engine context
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+void LvmEffect_limitLevel(EffectContext *pContext) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+
+ /* Get the current settings */
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_limitLevel")
+ //ALOGV("\tLvmEffect_limitLevel Succesfully returned from LVM_GetControlParameters\n");
+ //ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+ int gainCorrection = 0;
+ //Count the energy contribution per band for EQ and BassBoost only if they are active.
+ float energyContribution = 0;
+
+ //EQ contribution
+ if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ float bandEnergy = (pContext->pBundledContext->bandGaindB[i] *
+ LimitLevel_bandEnergyContribution[i])/15.0;
+ if (bandEnergy > 0)
+ energyContribution += bandEnergy;
+ }
+ }
+
+ //BassBoost contribution
+ if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
+ float bandEnergy = (pContext->pBundledContext->BassStrengthSaved *
+ LimitLevel_bassBoostEnergyContribution)/1000.0;
+ if (bandEnergy > 0)
+ energyContribution += bandEnergy;
+ }
+
+ //Virtualizer contribution
+ if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
+ energyContribution += LimitLevel_virtualizerContribution;
+ }
+
+ //roundoff
+ int maxLevelRound = (int)(energyContribution + 0.99);
+ if (maxLevelRound + pContext->pBundledContext->volume > 0) {
+ gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
+ }
+
+ ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
+ if (ActiveParams.VC_EffectLevel < -96) {
+ ActiveParams.VC_EffectLevel = -96;
+ }
+ ALOGV("\tVol:%d, GainCorrection: %d, Actual vol: %d", pContext->pBundledContext->volume,
+ gainCorrection, ActiveParams.VC_EffectLevel);
+
+ /* Activate the initial settings */
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_limitLevel")
+ //ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+ //ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
+ if (pContext->pBundledContext->firstVolume == LVM_TRUE){
+ LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
+ ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
+ pContext->pBundledContext->firstVolume = LVM_FALSE;
+ }
+}
+
//----------------------------------------------------------------------------
// LvmEffect_enable()
//----------------------------------------------------------------------------
@@ -814,6 +930,7 @@
//ALOGV("\tLvmEffect_enable Succesfully called LVM_SetControlParameters\n");
//ALOGV("\tLvmEffect_enable end");
+ LvmEffect_limitLevel(pContext);
return 0;
}
@@ -864,6 +981,7 @@
//ALOGV("\tLvmEffect_disable Succesfully called LVM_SetControlParameters\n");
//ALOGV("\tLvmEffect_disable end");
+ LvmEffect_limitLevel(pContext);
return 0;
}
@@ -1099,6 +1217,8 @@
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "BassSetStrength")
//ALOGV("\tBassSetStrength Succesfully called LVM_SetControlParameters\n");
+
+ LvmEffect_limitLevel(pContext);
} /* end BassSetStrength */
//----------------------------------------------------------------------------
@@ -1159,13 +1279,14 @@
/* Virtualizer parameters */
ActiveParams.CS_EffectLevel = (int)((strength*32767)/1000);
- //ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength );
- //ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", ActiveParams.CS_EffectLevel );
+ ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength );
+ ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", ActiveParams.CS_EffectLevel );
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VirtualizerSetStrength")
//ALOGV("\tVirtualizerSetStrength Succesfully called LVM_SetControlParameters\n\n");
+ LvmEffect_limitLevel(pContext);
} /* end setStrength */
//----------------------------------------------------------------------------
@@ -1343,104 +1464,6 @@
}
//----------------------------------------------------------------------------
-// EqualizerLimitBandLevels()
-//----------------------------------------------------------------------------
-// Purpose: limit all EQ band gains to a value less than 0 dB while
-// preserving the relative band levels.
-//
-// Inputs:
-// pContext: effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-void EqualizerLimitBandLevels(EffectContext *pContext) {
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
-
- /* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerLimitBandLevels")
- //ALOGV("\tEqualizerLimitBandLevels Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tEqualizerLimitBandLevels just Got -> %d\n",
- // ActiveParams.pEQNB_BandDefinition[band].Gain);
-
- // Apply a volume correction to avoid clipping in the EQ based on 2 factors:
- // - the maximum EQ band gain: the volume correction is such that the total of volume + max
- // band gain is <= 0 dB
- // - the average gain in all bands weighted by their proximity to max gain band.
- int maxGain = 0;
- int avgGain = 0;
- int avgCount = 0;
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- if (pContext->pBundledContext->bandGaindB[i] >= maxGain) {
- int tmpMaxGain = pContext->pBundledContext->bandGaindB[i];
- int tmpAvgGain = 0;
- int tmpAvgCount = 0;
- for (int j = 0; j < FIVEBAND_NUMBANDS; j++) {
- int gain = pContext->pBundledContext->bandGaindB[j];
- // skip current band and gains < 0 dB
- if (j == i || gain < 0)
- continue;
- // no need to continue if one band not processed yet has a higher gain than current
- // max
- if (gain > tmpMaxGain) {
- // force skipping "if (tmpAvgGain >= avgGain)" below as tmpAvgGain is not
- // meaningful in this case
- tmpAvgGain = -1;
- break;
- }
-
- int weight = 1;
- if (j < (i + 2) && j > (i - 2))
- weight = 4;
- tmpAvgGain += weight * gain;
- tmpAvgCount += weight;
- }
- if (tmpAvgGain >= avgGain) {
- maxGain = tmpMaxGain;
- avgGain = tmpAvgGain;
- avgCount = tmpAvgCount;
- }
- }
- ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
- ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
- ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
- }
-
- int gainCorrection = 0;
- if (maxGain + pContext->pBundledContext->volume > 0) {
- gainCorrection = maxGain + pContext->pBundledContext->volume;
- }
- if (avgCount) {
- gainCorrection += avgGain/avgCount;
- }
-
- ALOGV("EqualizerLimitBandLevels() gainCorrection %d maxGain %d avgGain %d avgCount %d",
- gainCorrection, maxGain, avgGain, avgCount);
-
- ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
- if (ActiveParams.VC_EffectLevel < -96) {
- ActiveParams.VC_EffectLevel = -96;
- }
-
- /* Activate the initial settings */
- LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerLimitBandLevels")
- //ALOGV("\tEqualizerLimitBandLevels just Set -> %d\n",
- // ActiveParams.pEQNB_BandDefinition[band].Gain);
-
- //ALOGV("\tEqualizerLimitBandLevels just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
- if(pContext->pBundledContext->firstVolume == LVM_TRUE){
- LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
- ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
- pContext->pBundledContext->firstVolume = LVM_FALSE;
- }
-}
-
-
-//----------------------------------------------------------------------------
// EqualizerGetBandLevel()
//----------------------------------------------------------------------------
// Purpose: Retrieve the gain currently being used for the band passed in
@@ -1482,7 +1505,8 @@
pContext->pBundledContext->bandGaindB[band] = gainRounded;
pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
- EqualizerLimitBandLevels(pContext);
+ EqualizerUpdateActiveParams(pContext);
+ LvmEffect_limitLevel(pContext);
}
//----------------------------------------------------------------------------
@@ -1617,7 +1641,8 @@
EQNB_5BandSoftPresets[i + preset * FIVEBAND_NUMBANDS];
}
- EqualizerLimitBandLevels(pContext);
+ EqualizerUpdateActiveParams(pContext);
+ LvmEffect_limitLevel(pContext);
//ALOGV("\tEqualizerSetPreset Succesfully called LVM_SetControlParameters\n");
return;
@@ -1672,7 +1697,7 @@
pContext->pBundledContext->volume = level / 100;
}
- EqualizerLimitBandLevels(pContext);
+ LvmEffect_limitLevel(pContext);
return 0;
} /* end VolumeSetVolumeLevel */
@@ -1721,7 +1746,7 @@
pContext->pBundledContext->volume = pContext->pBundledContext->levelSaved;
}
- EqualizerLimitBandLevels(pContext);
+ LvmEffect_limitLevel(pContext);
return 0;
} /* end setMute */
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 420f973..b3071f4 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -142,6 +142,7 @@
{1800001, 7000000},
{7000001, 1}};
+//Note: If these frequencies change, please update LimitLevel values accordingly.
static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = {
60, /* Frequencies in Hz */
230,
@@ -192,6 +193,20 @@
{"Pop"},
{"Rock"}};
+/* The following tables have been computed using the actual levels measured by the output of
+ * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
+ * the actual energy that 'could' be present in the given band.
+ * If the frequency values in EQNB_5BandPresetsFrequencies change, these values might need to be
+ * updated.
+ */
+
+static const float LimitLevel_bandEnergyContribution[FIVEBAND_NUMBANDS] = {
+ 5.0, 6.5, 6.45, 4.8, 1.7 };
+
+static const float LimitLevel_bassBoostEnergyContribution = 6.7;
+
+static const float LimitLevel_virtualizerContribution = 1.9;
+
#if __cplusplus
} // extern "C"
#endif
diff --git a/media/libeffects/testlibs/Android.mk_ b/media/libeffects/testlibs/Android.mk_
index 672ebba..14c373f 100644
--- a/media/libeffects/testlibs/Android.mk_
+++ b/media/libeffects/testlibs/Android.mk_
@@ -3,24 +3,18 @@
# Test Reverb library
include $(CLEAR_VARS)
-LOCAL_SRC_FILES:= \
+LOCAL_SRC_FILES := \
EffectReverb.c.arm \
EffectsMath.c.arm
-LOCAL_CFLAGS+= -O2
+
+LOCAL_CFLAGS := -O2
LOCAL_SHARED_LIBRARIES := \
- libcutils
+ libcutils \
+ libdl
LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libreverbtest
-
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
-ifneq ($(TARGET_SIMULATOR),true)
-LOCAL_SHARED_LIBRARIES += libdl
-endif
+LOCAL_MODULE := libreverbtest
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-effects) \
@@ -33,7 +27,7 @@
# Test Equalizer library
include $(CLEAR_VARS)
-LOCAL_SRC_FILES:= \
+LOCAL_SRC_FILES := \
EffectsMath.c.arm \
EffectEqualizer.cpp \
AudioBiquadFilter.cpp.arm \
@@ -42,21 +36,14 @@
AudioShelvingFilter.cpp.arm \
AudioEqualizer.cpp.arm
-LOCAL_CFLAGS+= -O2
+LOCAL_CFLAGS := -O2
LOCAL_SHARED_LIBRARIES := \
- libcutils
+ libcutils \
+ libdl
LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libequalizertest
-
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
-ifneq ($(TARGET_SIMULATOR),true)
-LOCAL_SHARED_LIBRARIES += libdl
-endif
+LOCAL_MODULE := libequalizertest
LOCAL_C_INCLUDES := \
$(call include-path-for, graphics corecg) \
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index b6801f5..dc82102 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -42,6 +42,7 @@
mediarecorder.cpp \
IMediaMetadataRetriever.cpp \
mediametadataretriever.cpp \
+ MidiIoWrapper.cpp \
ToneGenerator.cpp \
JetPlayer.cpp \
IOMX.cpp \
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 6bdf865..735db5c 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -335,6 +335,11 @@
((flags | AUDIO_OUTPUT_FLAG_DIRECT) & ~AUDIO_OUTPUT_FLAG_FAST);
}
+ // force direct flag if HW A/V sync requested
+ if ((flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) {
+ flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_DIRECT);
+ }
+
if (flags & AUDIO_OUTPUT_FLAG_DIRECT) {
if (audio_is_linear_pcm(format)) {
mFrameSize = channelCount * audio_bytes_per_sample(format);
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index a7568b9..4d33d40 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -261,6 +261,7 @@
size_t size;
status_t status = decode(fd, offset, length, &sampleRate, &numChannels, &format,
heap, &size);
+ ::close(fd);
reply->writeInt32(status);
if (status == NO_ERROR) {
reply->writeInt32(sampleRate);
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index f0f1832..721d8d7 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -36,7 +36,6 @@
mPaused(false),
mMaxTracks(maxTracks),
mEasData(NULL),
- mEasJetFileLoc(NULL),
mTrackBufferSize(trackBufferSize)
{
ALOGV("JetPlayer constructor");
@@ -133,10 +132,7 @@
JET_Shutdown(mEasData);
EAS_Shutdown(mEasData);
}
- if (mEasJetFileLoc) {
- free(mEasJetFileLoc);
- mEasJetFileLoc = NULL;
- }
+ mIoWrapper.clear();
if (mAudioTrack != 0) {
mAudioTrack->stop();
mAudioTrack->flush();
@@ -327,16 +323,9 @@
Mutex::Autolock lock(mMutex);
- mEasJetFileLoc = (EAS_FILE_LOCATOR) malloc(sizeof(EAS_FILE));
- strncpy(mJetFilePath, path, sizeof(mJetFilePath));
- mJetFilePath[sizeof(mJetFilePath) - 1] = '\0';
- mEasJetFileLoc->path = mJetFilePath;
+ mIoWrapper = new MidiIoWrapper(path);
- mEasJetFileLoc->fd = 0;
- mEasJetFileLoc->length = 0;
- mEasJetFileLoc->offset = 0;
-
- EAS_RESULT result = JET_OpenFile(mEasData, mEasJetFileLoc);
+ EAS_RESULT result = JET_OpenFile(mEasData, mIoWrapper->getLocator());
if (result != EAS_SUCCESS)
mState = EAS_STATE_ERROR;
else
@@ -352,13 +341,9 @@
Mutex::Autolock lock(mMutex);
- mEasJetFileLoc = (EAS_FILE_LOCATOR) malloc(sizeof(EAS_FILE));
- mEasJetFileLoc->fd = fd;
- mEasJetFileLoc->offset = offset;
- mEasJetFileLoc->length = length;
- mEasJetFileLoc->path = NULL;
+ mIoWrapper = new MidiIoWrapper(fd, offset, length);
- EAS_RESULT result = JET_OpenFile(mEasData, mEasJetFileLoc);
+ EAS_RESULT result = JET_OpenFile(mEasData, mIoWrapper->getLocator());
if (result != EAS_SUCCESS)
mState = EAS_STATE_ERROR;
else
@@ -459,7 +444,6 @@
//-------------------------------------------------------------------------------------------------
void JetPlayer::dump()
{
- ALOGE("JetPlayer dump: JET file=%s", mEasJetFileLoc->path);
}
void JetPlayer::dumpJetStatus(S_JET_STATUS* pJetStatus)
diff --git a/media/libmedia/MidiIoWrapper.cpp b/media/libmedia/MidiIoWrapper.cpp
new file mode 100644
index 0000000..2181111
--- /dev/null
+++ b/media/libmedia/MidiIoWrapper.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MidiIoWrapper"
+#include <utils/Log.h>
+#include <utils/RefBase.h>
+
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#include "media/MidiIoWrapper.h"
+
+static int readAt(void *handle, void *buffer, int pos, int size) {
+ return ((android::MidiIoWrapper*)handle)->readAt(buffer, pos, size);
+}
+static int size(void *handle) {
+ return ((android::MidiIoWrapper*)handle)->size();
+}
+
+namespace android {
+
+MidiIoWrapper::MidiIoWrapper(const char *path) {
+ ALOGV("MidiIoWrapper(%s)", path);
+ mFd = open(path, O_RDONLY | O_LARGEFILE);
+ mBase = 0;
+ mLength = lseek(mFd, 0, SEEK_END);
+}
+
+MidiIoWrapper::MidiIoWrapper(int fd, off64_t offset, int64_t size) {
+ ALOGV("MidiIoWrapper(fd=%d)", fd);
+ mFd = dup(fd);
+ mBase = offset;
+ mLength = size;
+}
+
+MidiIoWrapper::MidiIoWrapper(const sp<DataSource> &source) {
+ mDataSource = source;
+ off64_t l;
+ if (mDataSource->getSize(&l) == OK) {
+ mLength = l;
+ } else {
+ mLength = 0;
+ }
+}
+
+MidiIoWrapper::~MidiIoWrapper() {
+ ALOGV("~MidiIoWrapper");
+ close(mFd);
+}
+
+int MidiIoWrapper::readAt(void *buffer, int offset, int size) {
+ ALOGV("readAt(%p, %d, %d)", buffer, offset, size);
+
+ if (mDataSource != NULL) {
+ return mDataSource->readAt(offset, buffer, size);
+ }
+ lseek(mFd, mBase + offset, SEEK_SET);
+ if (offset + size > mLength) {
+ size = mLength - offset;
+ }
+ return read(mFd, buffer, size);
+}
+
+int MidiIoWrapper::size() {
+ ALOGV("size() = %d", mLength);
+ return mLength;
+}
+
+EAS_FILE_LOCATOR MidiIoWrapper::getLocator() {
+ mEasFile.handle = this;
+ mEasFile.readAt = ::readAt;
+ mEasFile.size = ::size;
+ return &mEasFile;
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 2cf5710..9d8fe62 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -15,8 +15,6 @@
MediaPlayerService.cpp \
MediaRecorderClient.cpp \
MetadataRetrieverClient.cpp \
- MidiFile.cpp \
- MidiMetadataRetriever.cpp \
RemoteDisplay.cpp \
SharedLibrary.cpp \
StagefrightPlayer.cpp \
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index aeefb4c..48884b9 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -15,6 +15,7 @@
** limitations under the License.
*/
+//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPlayerFactory"
#include <utils/Log.h>
@@ -29,7 +30,6 @@
#include "MediaPlayerFactory.h"
-#include "MidiFile.h"
#include "TestPlayerStub.h"
#include "StagefrightPlayer.h"
#include "nuplayer/NuPlayerDriver.h"
@@ -279,75 +279,6 @@
}
};
-class SonivoxPlayerFactory : public MediaPlayerFactory::IFactory {
- public:
- virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
- const char* url,
- float curScore) {
- static const float kOurScore = 0.4;
- static const char* const FILE_EXTS[] = { ".mid",
- ".midi",
- ".smf",
- ".xmf",
- ".mxmf",
- ".imy",
- ".rtttl",
- ".rtx",
- ".ota" };
- if (kOurScore <= curScore)
- return 0.0;
-
- // use MidiFile for MIDI extensions
- int lenURL = strlen(url);
- for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
- int len = strlen(FILE_EXTS[i]);
- int start = lenURL - len;
- if (start > 0) {
- if (!strncasecmp(url + start, FILE_EXTS[i], len)) {
- return kOurScore;
- }
- }
- }
-
- return 0.0;
- }
-
- virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
- int fd,
- int64_t offset,
- int64_t length,
- float curScore) {
- static const float kOurScore = 0.8;
-
- if (kOurScore <= curScore)
- return 0.0;
-
- // Some kind of MIDI?
- EAS_DATA_HANDLE easdata;
- if (EAS_Init(&easdata) == EAS_SUCCESS) {
- EAS_FILE locator;
- locator.path = NULL;
- locator.fd = fd;
- locator.offset = offset;
- locator.length = length;
- EAS_HANDLE eashandle;
- if (EAS_OpenFile(easdata, &locator, &eashandle) == EAS_SUCCESS) {
- EAS_CloseFile(easdata, eashandle);
- EAS_Shutdown(easdata);
- return kOurScore;
- }
- EAS_Shutdown(easdata);
- }
-
- return 0.0;
- }
-
- virtual sp<MediaPlayerBase> createPlayer() {
- ALOGV(" create MidiFile");
- return new MidiFile();
- }
-};
-
class TestPlayerFactory : public MediaPlayerFactory::IFactory {
public:
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
@@ -374,7 +305,6 @@
registerFactory_l(new StagefrightPlayerFactory(), STAGEFRIGHT_PLAYER);
registerFactory_l(new NuPlayerFactory(), NU_PLAYER);
- registerFactory_l(new SonivoxPlayerFactory(), SONIVOX_PLAYER);
registerFactory_l(new TestPlayerFactory(), TEST_PLAYER);
sInitComplete = true;
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 071b894..cf6771e 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -70,7 +70,6 @@
#include "MetadataRetrieverClient.h"
#include "MediaPlayerFactory.h"
-#include "MidiFile.h"
#include "TestPlayerStub.h"
#include "StagefrightPlayer.h"
#include "nuplayer/NuPlayerDriver.h"
@@ -1398,7 +1397,6 @@
Exit:
if (player != 0) player->reset();
- ::close(fd);
return status;
}
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index fa28451..715cc0c 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -35,7 +35,6 @@
#include <media/MediaMetadataRetrieverInterface.h>
#include <media/MediaPlayerInterface.h>
#include <private/media/VideoFrame.h>
-#include "MidiMetadataRetriever.h"
#include "MetadataRetrieverClient.h"
#include "StagefrightMetadataRetriever.h"
#include "MediaPlayerFactory.h"
@@ -90,10 +89,6 @@
p = new StagefrightMetadataRetriever;
break;
}
- case SONIVOX_PLAYER:
- ALOGV("create midi metadata retriever");
- p = new MidiMetadataRetriever();
- break;
default:
// TODO:
// support for TEST_PLAYER
diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp
deleted file mode 100644
index 749ef96..0000000
--- a/media/libmediaplayerservice/MidiFile.cpp
+++ /dev/null
@@ -1,560 +0,0 @@
-/* MidiFile.cpp
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MidiFile"
-#include "utils/Log.h"
-
-#include <stdio.h>
-#include <assert.h>
-#include <limits.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sched.h>
-#include <utils/threads.h>
-#include <libsonivox/eas_reverb.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <unistd.h>
-
-#include <system/audio.h>
-
-#include "MidiFile.h"
-
-// ----------------------------------------------------------------------------
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-// The midi engine buffers are a bit small (128 frames), so we batch them up
-static const int NUM_BUFFERS = 4;
-
-// TODO: Determine appropriate return codes
-static status_t ERROR_NOT_OPEN = -1;
-static status_t ERROR_OPEN_FAILED = -2;
-static status_t ERROR_EAS_FAILURE = -3;
-static status_t ERROR_ALLOCATE_FAILED = -4;
-
-static const S_EAS_LIB_CONFIG* pLibConfig = NULL;
-
-MidiFile::MidiFile() :
- mEasData(NULL), mEasHandle(NULL), mAudioBuffer(NULL),
- mPlayTime(-1), mDuration(-1), mState(EAS_STATE_ERROR),
- mStreamType(AUDIO_STREAM_MUSIC), mLoop(false), mExit(false),
- mPaused(false), mRender(false), mTid(-1)
-{
- ALOGV("constructor");
-
- mFileLocator.path = NULL;
- mFileLocator.fd = -1;
- mFileLocator.offset = 0;
- mFileLocator.length = 0;
-
- // get the library configuration and do sanity check
- if (pLibConfig == NULL)
- pLibConfig = EAS_Config();
- if ((pLibConfig == NULL) || (LIB_VERSION != pLibConfig->libVersion)) {
- ALOGE("EAS library/header mismatch");
- goto Failed;
- }
-
- // initialize EAS library
- if (EAS_Init(&mEasData) != EAS_SUCCESS) {
- ALOGE("EAS_Init failed");
- goto Failed;
- }
-
- // select reverb preset and enable
- EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_PRESET, EAS_PARAM_REVERB_CHAMBER);
- EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_BYPASS, EAS_FALSE);
-
- // create playback thread
- {
- Mutex::Autolock l(mMutex);
- mThread = new MidiFileThread(this);
- mThread->run("midithread", ANDROID_PRIORITY_AUDIO);
- mCondition.wait(mMutex);
- ALOGV("thread started");
- }
-
- // indicate success
- if (mTid > 0) {
- ALOGV(" render thread(%d) started", mTid);
- mState = EAS_STATE_READY;
- }
-
-Failed:
- return;
-}
-
-status_t MidiFile::initCheck()
-{
- if (mState == EAS_STATE_ERROR) return ERROR_EAS_FAILURE;
- return NO_ERROR;
-}
-
-MidiFile::~MidiFile() {
- ALOGV("MidiFile destructor");
- release();
-}
-
-status_t MidiFile::setDataSource(
- const sp<IMediaHTTPService> & /*httpService*/,
- const char* path,
- const KeyedVector<String8, String8> *) {
- ALOGV("MidiFile::setDataSource url=%s", path);
- Mutex::Autolock lock(mMutex);
-
- // file still open?
- if (mEasHandle) {
- reset_nosync();
- }
-
- // open file and set paused state
- mFileLocator.path = strdup(path);
- mFileLocator.fd = -1;
- mFileLocator.offset = 0;
- mFileLocator.length = 0;
- EAS_RESULT result = EAS_OpenFile(mEasData, &mFileLocator, &mEasHandle);
- if (result == EAS_SUCCESS) {
- updateState();
- }
-
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_OpenFile failed: [%d]", (int)result);
- mState = EAS_STATE_ERROR;
- return ERROR_OPEN_FAILED;
- }
-
- mState = EAS_STATE_OPEN;
- mPlayTime = 0;
- return NO_ERROR;
-}
-
-status_t MidiFile::setDataSource(int fd, int64_t offset, int64_t length)
-{
- ALOGV("MidiFile::setDataSource fd=%d", fd);
- Mutex::Autolock lock(mMutex);
-
- // file still open?
- if (mEasHandle) {
- reset_nosync();
- }
-
- // open file and set paused state
- mFileLocator.fd = dup(fd);
- mFileLocator.offset = offset;
- mFileLocator.length = length;
- EAS_RESULT result = EAS_OpenFile(mEasData, &mFileLocator, &mEasHandle);
- updateState();
-
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_OpenFile failed: [%d]", (int)result);
- mState = EAS_STATE_ERROR;
- return ERROR_OPEN_FAILED;
- }
-
- mState = EAS_STATE_OPEN;
- mPlayTime = 0;
- return NO_ERROR;
-}
-
-status_t MidiFile::prepare()
-{
- ALOGV("MidiFile::prepare");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- EAS_RESULT result;
- if ((result = EAS_Prepare(mEasData, mEasHandle)) != EAS_SUCCESS) {
- ALOGE("EAS_Prepare failed: [%ld]", result);
- return ERROR_EAS_FAILURE;
- }
- updateState();
- return NO_ERROR;
-}
-
-status_t MidiFile::prepareAsync()
-{
- ALOGV("MidiFile::prepareAsync");
- status_t ret = prepare();
-
- // don't hold lock during callback
- if (ret == NO_ERROR) {
- sendEvent(MEDIA_PREPARED);
- } else {
- sendEvent(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ret);
- }
- return ret;
-}
-
-status_t MidiFile::start()
-{
- ALOGV("MidiFile::start");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
-
- // resuming after pause?
- if (mPaused) {
- if (EAS_Resume(mEasData, mEasHandle) != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
- mPaused = false;
- updateState();
- }
-
- mRender = true;
- if (mState == EAS_STATE_PLAY) {
- sendEvent(MEDIA_STARTED);
- }
-
- // wake up render thread
- ALOGV(" wakeup render thread");
- mCondition.signal();
- return NO_ERROR;
-}
-
-status_t MidiFile::stop()
-{
- ALOGV("MidiFile::stop");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- if (!mPaused && (mState != EAS_STATE_STOPPED)) {
- EAS_RESULT result = EAS_Pause(mEasData, mEasHandle);
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_Pause returned error %ld", result);
- return ERROR_EAS_FAILURE;
- }
- }
- mPaused = false;
- sendEvent(MEDIA_STOPPED);
- return NO_ERROR;
-}
-
-status_t MidiFile::seekTo(int position)
-{
- ALOGV("MidiFile::seekTo %d", position);
- // hold lock during EAS calls
- {
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- EAS_RESULT result;
- if ((result = EAS_Locate(mEasData, mEasHandle, position, false))
- != EAS_SUCCESS)
- {
- ALOGE("EAS_Locate returned %ld", result);
- return ERROR_EAS_FAILURE;
- }
- EAS_GetLocation(mEasData, mEasHandle, &mPlayTime);
- }
- sendEvent(MEDIA_SEEK_COMPLETE);
- return NO_ERROR;
-}
-
-status_t MidiFile::pause()
-{
- ALOGV("MidiFile::pause");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- if ((mState == EAS_STATE_PAUSING) || (mState == EAS_STATE_PAUSED)) return NO_ERROR;
- if (EAS_Pause(mEasData, mEasHandle) != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
- mPaused = true;
- sendEvent(MEDIA_PAUSED);
- return NO_ERROR;
-}
-
-bool MidiFile::isPlaying()
-{
- ALOGV("MidiFile::isPlaying, mState=%d", int(mState));
- if (!mEasHandle || mPaused) return false;
- return (mState == EAS_STATE_PLAY);
-}
-
-status_t MidiFile::getCurrentPosition(int* position)
-{
- ALOGV("MidiFile::getCurrentPosition");
- if (!mEasHandle) {
- ALOGE("getCurrentPosition(): file not open");
- return ERROR_NOT_OPEN;
- }
- if (mPlayTime < 0) {
- ALOGE("getCurrentPosition(): mPlayTime = %ld", mPlayTime);
- return ERROR_EAS_FAILURE;
- }
- *position = mPlayTime;
- return NO_ERROR;
-}
-
-status_t MidiFile::getDuration(int* duration)
-{
-
- ALOGV("MidiFile::getDuration");
- {
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) return ERROR_NOT_OPEN;
- *duration = mDuration;
- }
-
- // if no duration cached, get the duration
- // don't need a lock here because we spin up a new engine
- if (*duration < 0) {
- EAS_I32 temp;
- EAS_DATA_HANDLE easData = NULL;
- EAS_HANDLE easHandle = NULL;
- EAS_RESULT result = EAS_Init(&easData);
- if (result == EAS_SUCCESS) {
- result = EAS_OpenFile(easData, &mFileLocator, &easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_Prepare(easData, easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_ParseMetaData(easData, easHandle, &temp);
- }
- if (easHandle) {
- EAS_CloseFile(easData, easHandle);
- }
- if (easData) {
- EAS_Shutdown(easData);
- }
-
- if (result != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
-
- // cache successful result
- mDuration = *duration = int(temp);
- }
-
- return NO_ERROR;
-}
-
-status_t MidiFile::release()
-{
- ALOGV("MidiFile::release");
- Mutex::Autolock l(mMutex);
- reset_nosync();
-
- // wait for render thread to exit
- mExit = true;
- mCondition.signal();
-
- // wait for thread to exit
- if (mAudioBuffer) {
- mCondition.wait(mMutex);
- }
-
- // release resources
- if (mEasData) {
- EAS_Shutdown(mEasData);
- mEasData = NULL;
- }
- return NO_ERROR;
-}
-
-status_t MidiFile::reset()
-{
- ALOGV("MidiFile::reset");
- Mutex::Autolock lock(mMutex);
- return reset_nosync();
-}
-
-// call only with mutex held
-status_t MidiFile::reset_nosync()
-{
- ALOGV("MidiFile::reset_nosync");
- sendEvent(MEDIA_STOPPED);
- // close file
- if (mEasHandle) {
- EAS_CloseFile(mEasData, mEasHandle);
- mEasHandle = NULL;
- }
- if (mFileLocator.path) {
- free((void*)mFileLocator.path);
- mFileLocator.path = NULL;
- }
- if (mFileLocator.fd >= 0) {
- close(mFileLocator.fd);
- }
- mFileLocator.fd = -1;
- mFileLocator.offset = 0;
- mFileLocator.length = 0;
-
- mPlayTime = -1;
- mDuration = -1;
- mLoop = false;
- mPaused = false;
- mRender = false;
- return NO_ERROR;
-}
-
-status_t MidiFile::setLooping(int loop)
-{
- ALOGV("MidiFile::setLooping");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- loop = loop ? -1 : 0;
- if (EAS_SetRepeat(mEasData, mEasHandle, loop) != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
- return NO_ERROR;
-}
-
-status_t MidiFile::createOutputTrack() {
- if (mAudioSink->open(pLibConfig->sampleRate, pLibConfig->numChannels,
- CHANNEL_MASK_USE_CHANNEL_ORDER, AUDIO_FORMAT_PCM_16_BIT, 2 /*bufferCount*/) != NO_ERROR) {
- ALOGE("mAudioSink open failed");
- return ERROR_OPEN_FAILED;
- }
- return NO_ERROR;
-}
-
-int MidiFile::render() {
- EAS_RESULT result = EAS_FAILURE;
- EAS_I32 count;
- int temp;
- bool audioStarted = false;
-
- ALOGV("MidiFile::render");
-
- // allocate render buffer
- mAudioBuffer = new EAS_PCM[pLibConfig->mixBufferSize * pLibConfig->numChannels * NUM_BUFFERS];
- if (!mAudioBuffer) {
- ALOGE("mAudioBuffer allocate failed");
- goto threadExit;
- }
-
- // signal main thread that we started
- {
- Mutex::Autolock l(mMutex);
- mTid = gettid();
- ALOGV("render thread(%d) signal", mTid);
- mCondition.signal();
- }
-
- while (1) {
- mMutex.lock();
-
- // nothing to render, wait for client thread to wake us up
- while (!mRender && !mExit)
- {
- ALOGV("MidiFile::render - signal wait");
- mCondition.wait(mMutex);
- ALOGV("MidiFile::render - signal rx'd");
- }
- if (mExit) {
- mMutex.unlock();
- break;
- }
-
- // render midi data into the input buffer
- //ALOGV("MidiFile::render - rendering audio");
- int num_output = 0;
- EAS_PCM* p = mAudioBuffer;
- for (int i = 0; i < NUM_BUFFERS; i++) {
- result = EAS_Render(mEasData, p, pLibConfig->mixBufferSize, &count);
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_Render returned %ld", result);
- }
- p += count * pLibConfig->numChannels;
- num_output += count * pLibConfig->numChannels * sizeof(EAS_PCM);
- }
-
- // update playback state and position
- // ALOGV("MidiFile::render - updating state");
- EAS_GetLocation(mEasData, mEasHandle, &mPlayTime);
- EAS_State(mEasData, mEasHandle, &mState);
- mMutex.unlock();
-
- // create audio output track if necessary
- if (!mAudioSink->ready()) {
- ALOGV("MidiFile::render - create output track");
- if (createOutputTrack() != NO_ERROR)
- goto threadExit;
- }
-
- // Write data to the audio hardware
- // ALOGV("MidiFile::render - writing to audio output");
- if ((temp = mAudioSink->write(mAudioBuffer, num_output)) < 0) {
- ALOGE("Error in writing:%d",temp);
- return temp;
- }
-
- // start audio output if necessary
- if (!audioStarted) {
- //ALOGV("MidiFile::render - starting audio");
- mAudioSink->start();
- audioStarted = true;
- }
-
- // still playing?
- if ((mState == EAS_STATE_STOPPED) || (mState == EAS_STATE_ERROR) ||
- (mState == EAS_STATE_PAUSED))
- {
- switch(mState) {
- case EAS_STATE_STOPPED:
- {
- ALOGV("MidiFile::render - stopped");
- sendEvent(MEDIA_PLAYBACK_COMPLETE);
- break;
- }
- case EAS_STATE_ERROR:
- {
- ALOGE("MidiFile::render - error");
- sendEvent(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN);
- break;
- }
- case EAS_STATE_PAUSED:
- ALOGV("MidiFile::render - paused");
- break;
- default:
- break;
- }
- mAudioSink->stop();
- audioStarted = false;
- mRender = false;
- }
- }
-
-threadExit:
- mAudioSink.clear();
- if (mAudioBuffer) {
- delete [] mAudioBuffer;
- mAudioBuffer = NULL;
- }
- mMutex.lock();
- mTid = -1;
- mCondition.signal();
- mMutex.unlock();
- return result;
-}
-
-} // end namespace android
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
deleted file mode 100644
index 82e4e88..0000000
--- a/media/libmediaplayerservice/MidiFile.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
-**
-** Copyright 2008, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_MIDIFILE_H
-#define ANDROID_MIDIFILE_H
-
-#include <media/MediaPlayerInterface.h>
-#include <libsonivox/eas.h>
-
-namespace android {
-
-// Note that the name MidiFile is misleading; this actually represents a MIDI file player
-class MidiFile : public MediaPlayerInterface {
-public:
- MidiFile();
- ~MidiFile();
-
- virtual status_t initCheck();
-
- virtual status_t setDataSource(
- const sp<IMediaHTTPService> &httpService,
- const char* path,
- const KeyedVector<String8, String8> *headers);
-
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurfaceTexture(
- const sp<IGraphicBufferProducer>& /*bufferProducer*/)
- { return UNKNOWN_ERROR; }
- virtual status_t prepare();
- virtual status_t prepareAsync();
- virtual status_t start();
- virtual status_t stop();
- virtual status_t seekTo(int msec);
- virtual status_t pause();
- virtual bool isPlaying();
- virtual status_t getCurrentPosition(int* msec);
- virtual status_t getDuration(int* msec);
- virtual status_t release();
- virtual status_t reset();
- virtual status_t setLooping(int loop);
- virtual player_type playerType() { return SONIVOX_PLAYER; }
- virtual status_t invoke(const Parcel& /*request*/, Parcel* /*reply*/) {
- return INVALID_OPERATION;
- }
- virtual status_t setParameter(int /*key*/, const Parcel &/*request*/) {
- return INVALID_OPERATION;
- }
- virtual status_t getParameter(int /*key*/, Parcel* /*reply*/) {
- return INVALID_OPERATION;
- }
-
-
-private:
- status_t createOutputTrack();
- status_t reset_nosync();
- int render();
- void updateState(){ EAS_State(mEasData, mEasHandle, &mState); }
-
- Mutex mMutex;
- Condition mCondition;
- EAS_DATA_HANDLE mEasData;
- EAS_HANDLE mEasHandle;
- EAS_PCM* mAudioBuffer;
- EAS_I32 mPlayTime;
- EAS_I32 mDuration;
- EAS_STATE mState;
- EAS_FILE mFileLocator;
- audio_stream_type_t mStreamType;
- bool mLoop;
- volatile bool mExit;
- bool mPaused;
- volatile bool mRender;
- pid_t mTid;
-
- class MidiFileThread : public Thread {
- public:
- MidiFileThread(MidiFile *midiPlayer) : mMidiFile(midiPlayer) {
- }
-
- protected:
- virtual ~MidiFileThread() {}
-
- private:
- MidiFile *mMidiFile;
-
- bool threadLoop() {
- int result;
- result = mMidiFile->render();
- return false;
- }
-
- MidiFileThread(const MidiFileThread &);
- MidiFileThread &operator=(const MidiFileThread &);
- };
-
- sp<MidiFileThread> mThread;
-};
-
-}; // namespace android
-
-#endif // ANDROID_MIDIFILE_H
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.cpp b/media/libmediaplayerservice/MidiMetadataRetriever.cpp
deleted file mode 100644
index f3cf6ef..0000000
--- a/media/libmediaplayerservice/MidiMetadataRetriever.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
-**
-** Copyright 2009, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MidiMetadataRetriever"
-#include <utils/Log.h>
-
-#include "MidiMetadataRetriever.h"
-#include <media/mediametadataretriever.h>
-
-#include <media/IMediaHTTPService.h>
-
-namespace android {
-
-static status_t ERROR_NOT_OPEN = -1;
-static status_t ERROR_OPEN_FAILED = -2;
-static status_t ERROR_EAS_FAILURE = -3;
-static status_t ERROR_ALLOCATE_FAILED = -4;
-
-void MidiMetadataRetriever::clearMetadataValues()
-{
- ALOGV("clearMetadataValues");
- mMetadataValues[0][0] = '\0';
-}
-
-status_t MidiMetadataRetriever::setDataSource(
- const sp<IMediaHTTPService> &httpService,
- const char *url,
- const KeyedVector<String8, String8> *headers)
-{
- ALOGV("setDataSource: %s", url? url: "NULL pointer");
- Mutex::Autolock lock(mLock);
- clearMetadataValues();
- if (mMidiPlayer == 0) {
- mMidiPlayer = new MidiFile();
- }
- return mMidiPlayer->setDataSource(httpService, url, headers);
-}
-
-status_t MidiMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
-{
- ALOGV("setDataSource: fd(%d), offset(%lld), and length(%lld)", fd, offset, length);
- Mutex::Autolock lock(mLock);
- clearMetadataValues();
- if (mMidiPlayer == 0) {
- mMidiPlayer = new MidiFile();
- }
- return mMidiPlayer->setDataSource(fd, offset, length);;
-}
-
-const char* MidiMetadataRetriever::extractMetadata(int keyCode)
-{
- ALOGV("extractMetdata: key(%d)", keyCode);
- Mutex::Autolock lock(mLock);
- if (mMidiPlayer == 0 || mMidiPlayer->initCheck() != NO_ERROR) {
- ALOGE("Midi player is not initialized yet");
- return NULL;
- }
- switch (keyCode) {
- case METADATA_KEY_DURATION:
- {
- if (mMetadataValues[0][0] == '\0') {
- int duration = -1;
- if (mMidiPlayer->getDuration(&duration) != NO_ERROR) {
- ALOGE("failed to get duration");
- return NULL;
- }
- snprintf(mMetadataValues[0], MAX_METADATA_STRING_LENGTH, "%d", duration);
- }
-
- ALOGV("duration: %s ms", mMetadataValues[0]);
- return mMetadataValues[0];
- }
- default:
- ALOGE("Unsupported key code (%d)", keyCode);
- return NULL;
- }
- return NULL;
-}
-
-};
-
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.h b/media/libmediaplayerservice/MidiMetadataRetriever.h
deleted file mode 100644
index b8214ee..0000000
--- a/media/libmediaplayerservice/MidiMetadataRetriever.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
-**
-** Copyright 2009, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_MIDIMETADATARETRIEVER_H
-#define ANDROID_MIDIMETADATARETRIEVER_H
-
-#include <utils/threads.h>
-#include <utils/Errors.h>
-#include <media/MediaMetadataRetrieverInterface.h>
-
-#include "MidiFile.h"
-
-namespace android {
-
-class MidiMetadataRetriever : public MediaMetadataRetrieverInterface {
-public:
- MidiMetadataRetriever() {}
- ~MidiMetadataRetriever() {}
-
- virtual status_t setDataSource(
- const sp<IMediaHTTPService> &httpService,
- const char *url,
- const KeyedVector<String8, String8> *headers);
-
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual const char* extractMetadata(int keyCode);
-
-private:
- static const uint32_t MAX_METADATA_STRING_LENGTH = 128;
- void clearMetadataValues();
-
- Mutex mLock;
- sp<MidiFile> mMidiPlayer;
- char mMetadataValues[1][MAX_METADATA_STRING_LENGTH];
-};
-
-}; // namespace android
-
-#endif // ANDROID_MIDIMETADATARETRIEVER_H
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 1af2713..dd79b50 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -173,6 +173,14 @@
if (mFileMeta->findCString(kKeyMIMEType, &fileMime)
&& !strncasecmp(fileMime, "video/wvm", 9)) {
mIsWidevine = true;
+ if (!mUri.empty()) {
+ // streaming, but the app forgot to specify widevine:// url
+ mWVMExtractor = static_cast<WVMExtractor *>(extractor.get());
+ mWVMExtractor->setAdaptiveStreamingMode(true);
+ if (mUIDValid) {
+ mWVMExtractor->setUID(mUID);
+ }
+ }
}
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 080cd52..a28591e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1667,6 +1667,10 @@
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
+ if ((flags & NuPlayer::Source::FLAG_CAN_SEEK) == 0) {
+ driver->notifyListener(
+ MEDIA_INFO, MEDIA_INFO_NOT_SEEKABLE, 0);
+ }
driver->notifyFlagsChanged(flags);
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index d65df14..f126b87 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -388,13 +388,22 @@
status_t NuPlayerDriver::getCurrentPosition(int *msec) {
int64_t tempUs = 0;
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mSeekInProgress || mState == STATE_PAUSED) {
+ tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
+ *msec = (int)divRound(tempUs, (int64_t)(1000));
+ return OK;
+ }
+ }
+
status_t ret = mPlayer->getCurrentPosition(&tempUs);
Mutex::Autolock autoLock(mLock);
// We need to check mSeekInProgress here because mPlayer->seekToAsync is an async call, which
// means getCurrentPosition can be called before seek is completed. Iow, renderer may return a
// position value that's different the seek to position.
- if (ret != OK || mSeekInProgress) {
+ if (ret != OK) {
tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
} else {
mPositionUs = tempUs;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 21b74ee..2ea6d70 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -68,6 +68,7 @@
mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
+ mPausePositionMediaTimeUs(0),
mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
@@ -166,11 +167,48 @@
msg->post();
}
+// Called on any threads, except renderer's thread.
status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
- return getCurrentPosition(mediaUs, ALooper::GetNowUs());
+ {
+ Mutex::Autolock autoLock(mLock);
+ int64_t currentPositionUs;
+ if (getCurrentPositionIfPaused_l(¤tPositionUs)) {
+ *mediaUs = currentPositionUs;
+ return OK;
+ }
+ }
+ return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs());
}
-status_t NuPlayer::Renderer::getCurrentPosition(
+// Called on only renderer's thread.
+status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) {
+ return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs());
+}
+
+// Called on only renderer's thread.
+// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's
+// thread, no need to acquire mLock.
+status_t NuPlayer::Renderer::getCurrentPositionOnLooper(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
+ int64_t currentPositionUs;
+ if (getCurrentPositionIfPaused_l(¤tPositionUs)) {
+ *mediaUs = currentPositionUs;
+ return OK;
+ }
+ return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo);
+}
+
+// Called either with mLock acquired or on renderer's thread.
+bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) {
+ if (!mPaused) {
+ return false;
+ }
+ *mediaUs = mPausePositionMediaTimeUs;
+ return true;
+}
+
+// Called on any threads.
+status_t NuPlayer::Renderer::getCurrentPositionFromAnchor(
int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
Mutex::Autolock autoLock(mTimeLock);
if (!mHasAudio && !mHasVideo) {
@@ -638,10 +676,13 @@
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
- // Need to stop the track here, because that will play out the last
- // little bit at the end of the file. Otherwise short files won't play.
- mAudioSink->stop();
- mNumFramesWritten = 0;
+ if (mAudioSink->needsTrailingPadding()) {
+ // If we're not in gapless playback (i.e. through setNextPlayer), we
+ // need to stop the track here, because that will play out the last
+ // little bit at the end of the file. Otherwise short files won't play.
+ mAudioSink->stop();
+ mNumFramesWritten = 0;
+ }
return false;
}
@@ -715,7 +756,8 @@
int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
int64_t currentPositionUs;
- if (getCurrentPosition(¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
+ if (getCurrentPositionOnLooper(
+ ¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
// If failed to get current position, e.g. due to audio clock is not ready, then just
// play out video immediately without delay.
return nowUs;
@@ -1176,6 +1218,11 @@
ALOGW("Renderer::onPause() called while already paused!");
return;
}
+ int64_t currentPositionUs;
+ if (getCurrentPositionFromAnchor(
+ ¤tPositionUs, ALooper::GetNowUs()) == OK) {
+ mPausePositionMediaTimeUs = currentPositionUs;
+ }
{
Mutex::Autolock autoLock(mLock);
++mAudioQueueGeneration;
@@ -1303,7 +1350,7 @@
mAudioOffloadTornDown = true;
int64_t currentPositionUs;
- if (getCurrentPosition(¤tPositionUs) != OK) {
+ if (getCurrentPositionOnLooper(¤tPositionUs) != OK) {
currentPositionUs = 0;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 406c64c..c6e3457 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -62,8 +62,6 @@
// Following setters and getters are protected by mTimeLock.
status_t getCurrentPosition(int64_t *mediaUs);
- status_t getCurrentPosition(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
void setHasMedia(bool audio);
void setAudioFirstAnchorTime(int64_t mediaUs);
void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
@@ -168,7 +166,10 @@
bool mSyncQueues;
+ // modified on only renderer's thread.
bool mPaused;
+ int64_t mPausePositionMediaTimeUs;
+
bool mVideoSampleReceived;
bool mVideoRenderingStarted;
int32_t mVideoRenderingStartGeneration;
@@ -183,6 +184,12 @@
int32_t mTotalBuffersQueued;
int32_t mLastAudioBufferDrained;
+ status_t getCurrentPositionOnLooper(int64_t *mediaUs);
+ status_t getCurrentPositionOnLooper(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+ bool getCurrentPositionIfPaused_l(int64_t *mediaUs);
+ status_t getCurrentPositionFromAnchor(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
size_t fillAudioBuffer(void *buffer, size_t size);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 653d16c..dfb0101 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1302,7 +1302,21 @@
return err;
}
- inputFormat->setInt32("adaptive-playback", true);
+ int32_t maxWidth = 0, maxHeight = 0;
+ if (msg->findInt32("max-width", &maxWidth) &&
+ msg->findInt32("max-height", &maxHeight)) {
+
+ err = mOMX->prepareForAdaptivePlayback(
+ mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
+ if (err != OK) {
+ ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
+ mComponentName.c_str(), err);
+ } else {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+ }
} else {
ALOGV("Configuring CPU controlled video playback.");
mTunneled = false;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 1810031..609847c 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -36,6 +36,7 @@
MediaCodecSource.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
+ MidiExtractor.cpp \
http/MediaHTTP.cpp \
MediaMuxer.cpp \
MediaSource.cpp \
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index c99db84..f7dcf35 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -22,6 +22,7 @@
#include "include/DRMExtractor.h"
#include "include/FLACExtractor.h"
#include "include/HTTPBase.h"
+#include "include/MidiExtractor.h"
#include "include/MP3Extractor.h"
#include "include/MPEG2PSExtractor.h"
#include "include/MPEG2TSExtractor.h"
@@ -172,6 +173,7 @@
RegisterSniffer_l(SniffAAC);
RegisterSniffer_l(SniffMPEG2PS);
RegisterSniffer_l(SniffWVM);
+ RegisterSniffer_l(SniffMidi);
char value[PROPERTY_VALUE_MAX];
if (property_get("drm.service.enabled", value, NULL)
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index a7ca3da..f0db76b 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FileSource"
+#include <utils/Log.h>
+
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/FileSource.h>
#include <sys/types.h>
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 087f345..dec52f3 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -999,6 +999,9 @@
int64_t duration;
int32_t samplerate;
+ if (!mLastTrack) {
+ return ERROR_MALFORMED;
+ }
if (mLastTrack->meta->findInt64(kKeyDuration, &duration) &&
mLastTrack->meta->findInt32(kKeySampleRate, &samplerate)) {
@@ -1533,13 +1536,13 @@
break;
}
- // @xyz
- case FOURCC('\xA9', 'x', 'y', 'z'):
+ // ©xyz
+ case FOURCC(0xA9, 'x', 'y', 'z'):
{
*offset += chunk_size;
- // Best case the total data length inside "@xyz" box
- // would be 8, for instance "@xyz" + "\x00\x04\x15\xc7" + "0+0/",
+ // Best case the total data length inside "©xyz" box
+ // would be 8, for instance "©xyz" + "\x00\x04\x15\xc7" + "0+0/",
// where "\x00\x04" is the text string length with value = 4,
// "\0x15\xc7" is the language code = en, and "0+0" is a
// location (string) value with longitude = 0 and latitude = 0.
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 5b8be46..cf6e937 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -62,6 +62,14 @@
sp<IMediaCodecList> MediaCodecList::sRemoteList;
+sp<MediaCodecList::BinderDeathObserver> MediaCodecList::sBinderDeathObserver;
+
+void MediaCodecList::BinderDeathObserver::binderDied(const wp<IBinder> &who __unused) {
+ Mutex::Autolock _l(sRemoteInitMutex);
+ sRemoteList.clear();
+ sBinderDeathObserver.clear();
+}
+
// static
sp<IMediaCodecList> MediaCodecList::getInstance() {
Mutex::Autolock _l(sRemoteInitMutex);
@@ -72,8 +80,11 @@
interface_cast<IMediaPlayerService>(binder);
if (service.get() != NULL) {
sRemoteList = service->getCodecList();
+ if (sRemoteList != NULL) {
+ sBinderDeathObserver = new BinderDeathObserver();
+ binder->linkToDeath(sBinderDeathObserver.get());
+ }
}
-
if (sRemoteList == NULL) {
// if failed to get remote list, create local list
sRemoteList = getLocalInstance();
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index d48dd84..fde6fbd 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -34,6 +34,7 @@
const char *MEDIA_MIMETYPE_AUDIO_MPEG = "audio/mpeg";
const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I = "audio/mpeg-L1";
const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II = "audio/mpeg-L2";
+const char *MEDIA_MIMETYPE_AUDIO_MIDI = "audio/midi";
const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
const char *MEDIA_MIMETYPE_AUDIO_QCELP = "audio/qcelp";
const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 9ab6611..e21fe6e 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -29,6 +29,7 @@
#include "include/WVMExtractor.h"
#include "include/FLACExtractor.h"
#include "include/AACExtractor.h"
+#include "include/MidiExtractor.h"
#include "matroska/MatroskaExtractor.h"
@@ -116,6 +117,8 @@
ret = new AACExtractor(source, meta);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2PS)) {
ret = new MPEG2PSExtractor(source);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MIDI)) {
+ ret = new MidiExtractor(source);
}
if (ret != NULL) {
diff --git a/media/libstagefright/MidiExtractor.cpp b/media/libstagefright/MidiExtractor.cpp
new file mode 100644
index 0000000..66fab77
--- /dev/null
+++ b/media/libstagefright/MidiExtractor.cpp
@@ -0,0 +1,325 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MidiExtractor"
+#include <utils/Log.h>
+
+#include "include/MidiExtractor.h"
+
+#include <media/MidiIoWrapper.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaSource.h>
+#include <libsonivox/eas_reverb.h>
+
+namespace android {
+
+// how many Sonivox output buffers to aggregate into one MediaBuffer
+static const int NUM_COMBINE_BUFFERS = 4;
+
+class MidiSource : public MediaSource {
+
+public:
+ MidiSource(
+ const sp<MidiEngine> &engine,
+ const sp<MetaData> &trackMetadata);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+protected:
+ virtual ~MidiSource();
+
+private:
+ sp<MidiEngine> mEngine;
+ sp<MetaData> mTrackMetadata;
+ bool mInitCheck;
+ bool mStarted;
+
+ status_t init();
+
+ // no copy constructor or assignment
+ MidiSource(const MidiSource &);
+ MidiSource &operator=(const MidiSource &);
+
+};
+
+
+// Midisource
+
+MidiSource::MidiSource(
+ const sp<MidiEngine> &engine,
+ const sp<MetaData> &trackMetadata)
+ : mEngine(engine),
+ mTrackMetadata(trackMetadata),
+ mInitCheck(false),
+ mStarted(false)
+{
+ ALOGV("MidiSource ctor");
+ mInitCheck = init();
+}
+
+MidiSource::~MidiSource()
+{
+ ALOGV("MidiSource dtor");
+ if (mStarted) {
+ stop();
+ }
+}
+
+status_t MidiSource::start(MetaData * /* params */)
+{
+ ALOGV("MidiSource::start");
+
+ CHECK(!mStarted);
+ mStarted = true;
+ mEngine->allocateBuffers();
+ return OK;
+}
+
+status_t MidiSource::stop()
+{
+ ALOGV("MidiSource::stop");
+
+ CHECK(mStarted);
+ mStarted = false;
+ mEngine->releaseBuffers();
+
+ return OK;
+}
+
+sp<MetaData> MidiSource::getFormat()
+{
+ return mTrackMetadata;
+}
+
+status_t MidiSource::read(
+ MediaBuffer **outBuffer, const ReadOptions *options)
+{
+ ALOGV("MidiSource::read");
+ MediaBuffer *buffer;
+ // process an optional seek request
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode mode;
+ if ((NULL != options) && options->getSeekTo(&seekTimeUs, &mode)) {
+ if (seekTimeUs <= 0LL) {
+ seekTimeUs = 0LL;
+ }
+ mEngine->seekTo(seekTimeUs);
+ }
+ buffer = mEngine->readBuffer();
+ *outBuffer = buffer;
+ ALOGV("MidiSource::read %p done", this);
+ return buffer != NULL ? (status_t) OK : (status_t) ERROR_END_OF_STREAM;
+}
+
+status_t MidiSource::init()
+{
+ ALOGV("MidiSource::init");
+ return OK;
+}
+
+// MidiEngine
+
+MidiEngine::MidiEngine(const sp<DataSource> &dataSource,
+ const sp<MetaData> &fileMetadata,
+ const sp<MetaData> &trackMetadata) :
+ mGroup(NULL),
+ mEasData(NULL),
+ mEasHandle(NULL),
+ mEasConfig(NULL),
+ mIsInitialized(false) {
+ mIoWrapper = new MidiIoWrapper(dataSource);
+ // spin up a new EAS engine
+ EAS_I32 temp;
+ EAS_RESULT result = EAS_Init(&mEasData);
+
+ if (result == EAS_SUCCESS) {
+ result = EAS_OpenFile(mEasData, mIoWrapper->getLocator(), &mEasHandle);
+ }
+ if (result == EAS_SUCCESS) {
+ result = EAS_Prepare(mEasData, mEasHandle);
+ }
+ if (result == EAS_SUCCESS) {
+ result = EAS_ParseMetaData(mEasData, mEasHandle, &temp);
+ }
+
+ if (result != EAS_SUCCESS) {
+ return;
+ }
+
+ if (fileMetadata != NULL) {
+ fileMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MIDI);
+ }
+
+ if (trackMetadata != NULL) {
+ trackMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+ trackMetadata->setInt64(kKeyDuration, 1000ll * temp); // milli->micro
+ mEasConfig = EAS_Config();
+ trackMetadata->setInt32(kKeySampleRate, mEasConfig->sampleRate);
+ trackMetadata->setInt32(kKeyChannelCount, mEasConfig->numChannels);
+ }
+ mIsInitialized = true;
+}
+
+MidiEngine::~MidiEngine() {
+ if (mEasHandle) {
+ EAS_CloseFile(mEasData, mEasHandle);
+ }
+ if (mEasData) {
+ EAS_Shutdown(mEasData);
+ }
+ delete mGroup;
+
+}
+
+status_t MidiEngine::initCheck() {
+ return mIsInitialized ? OK : UNKNOWN_ERROR;
+}
+
+status_t MidiEngine::allocateBuffers() {
+ // select reverb preset and enable
+ EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_PRESET, EAS_PARAM_REVERB_CHAMBER);
+ EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_BYPASS, EAS_FALSE);
+
+ mGroup = new MediaBufferGroup;
+ int bufsize = sizeof(EAS_PCM)
+ * mEasConfig->mixBufferSize * mEasConfig->numChannels * NUM_COMBINE_BUFFERS;
+ ALOGV("using %d byte buffer", bufsize);
+ mGroup->add_buffer(new MediaBuffer(bufsize));
+ return OK;
+}
+
+status_t MidiEngine::releaseBuffers() {
+ delete mGroup;
+ mGroup = NULL;
+ return OK;
+}
+
+status_t MidiEngine::seekTo(int64_t positionUs) {
+ ALOGV("seekTo %lld", positionUs);
+ EAS_RESULT result = EAS_Locate(mEasData, mEasHandle, positionUs / 1000, false);
+ return result == EAS_SUCCESS ? OK : UNKNOWN_ERROR;
+}
+
+MediaBuffer* MidiEngine::readBuffer() {
+ EAS_STATE state;
+ EAS_State(mEasData, mEasHandle, &state);
+ if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) {
+ return NULL;
+ }
+ MediaBuffer *buffer;
+ status_t err = mGroup->acquire_buffer(&buffer);
+ if (err != OK) {
+ ALOGE("readBuffer: no buffer");
+ return NULL;
+ }
+ EAS_I32 timeMs;
+ EAS_GetLocation(mEasData, mEasHandle, &timeMs);
+ int64_t timeUs = 1000ll * timeMs;
+ buffer->meta_data()->setInt64(kKeyTime, timeUs);
+
+ EAS_PCM* p = (EAS_PCM*) buffer->data();
+ int numBytesOutput = 0;
+ for (int i = 0; i < NUM_COMBINE_BUFFERS; i++) {
+ EAS_I32 numRendered;
+ EAS_RESULT result = EAS_Render(mEasData, p, mEasConfig->mixBufferSize, &numRendered);
+ if (result != EAS_SUCCESS) {
+ ALOGE("EAS_Render returned %ld", result);
+ break;
+ }
+ p += numRendered * mEasConfig->numChannels;
+ numBytesOutput += numRendered * mEasConfig->numChannels * sizeof(EAS_PCM);
+ }
+ buffer->set_range(0, numBytesOutput);
+ ALOGV("readBuffer: returning %zd in buffer %p", buffer->range_length(), buffer);
+ return buffer;
+}
+
+
+// MidiExtractor
+
+MidiExtractor::MidiExtractor(
+ const sp<DataSource> &dataSource)
+ : mDataSource(dataSource),
+ mInitCheck(false)
+{
+ ALOGV("MidiExtractor ctor");
+ mFileMetadata = new MetaData;
+ mTrackMetadata = new MetaData;
+ mEngine = new MidiEngine(mDataSource, mFileMetadata, mTrackMetadata);
+ mInitCheck = mEngine->initCheck();
+}
+
+MidiExtractor::~MidiExtractor()
+{
+ ALOGV("MidiExtractor dtor");
+}
+
+size_t MidiExtractor::countTracks()
+{
+ return mInitCheck == OK ? 1 : 0;
+}
+
+sp<MediaSource> MidiExtractor::getTrack(size_t index)
+{
+ if (mInitCheck != OK || index > 0) {
+ return NULL;
+ }
+ return new MidiSource(mEngine, mTrackMetadata);
+}
+
+sp<MetaData> MidiExtractor::getTrackMetaData(
+ size_t index, uint32_t /* flags */) {
+ ALOGV("MidiExtractor::getTrackMetaData");
+ if (mInitCheck != OK || index > 0) {
+ return NULL;
+ }
+ return mTrackMetadata;
+}
+
+sp<MetaData> MidiExtractor::getMetaData()
+{
+ ALOGV("MidiExtractor::getMetaData");
+ return mFileMetadata;
+}
+
+// Sniffer
+
+bool SniffMidi(
+ const sp<DataSource> &source, String8 *mimeType, float *confidence,
+ sp<AMessage> *)
+{
+ sp<MidiEngine> p = new MidiEngine(source, NULL, NULL);
+ if (p->initCheck() == OK) {
+ *mimeType = MEDIA_MIMETYPE_AUDIO_MIDI;
+ *confidence = 0.8;
+ ALOGV("SniffMidi: yes");
+ return true;
+ }
+ ALOGV("SniffMidi: no");
+ return false;
+
+}
+
+} // namespace android
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index bd28583..6e32494 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -76,7 +76,7 @@
status_t seekToTime(int64_t timeUs);
status_t seekToOffset(off64_t offset);
- status_t readNextPacket(MediaBuffer **buffer);
+ status_t readNextPacket(MediaBuffer **buffer, bool conf);
status_t init();
@@ -185,7 +185,7 @@
}
MediaBuffer *packet;
- status_t err = mExtractor->mImpl->readNextPacket(&packet);
+ status_t err = mExtractor->mImpl->readNextPacket(&packet, /* conf = */ false);
if (err != OK) {
return err;
@@ -457,7 +457,7 @@
return sizeof(header) + page->mNumSegments + totalSize;
}
-status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
+status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) {
*out = NULL;
MediaBuffer *buffer = NULL;
@@ -523,10 +523,8 @@
mFirstPacketInPage = false;
}
- if (mVi.rate) {
- // Rate may not have been initialized yet if we're currently
- // reading the configuration packets...
- // Fortunately, the timestamp doesn't matter for those.
+ // ignore timestamp for configuration packets
+ if (!conf) {
int32_t curBlockSize = packetBlockSize(buffer);
if (mCurrentPage.mPrevPacketSize < 0) {
mCurrentPage.mPrevPacketSize = curBlockSize;
@@ -605,7 +603,7 @@
MediaBuffer *packet;
status_t err;
- if ((err = readNextPacket(&packet)) != OK) {
+ if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
return err;
}
ALOGV("read packet of size %zu\n", packet->range_length());
@@ -616,7 +614,7 @@
return err;
}
- if ((err = readNextPacket(&packet)) != OK) {
+ if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
return err;
}
ALOGV("read packet of size %zu\n", packet->range_length());
@@ -627,7 +625,7 @@
return err;
}
- if ((err = readNextPacket(&packet)) != OK) {
+ if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
return err;
}
ALOGV("read packet of size %zu\n", packet->range_length());
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 4449d57..db33e83 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -28,9 +28,6 @@
#include <media/mediametadataretriever.h>
#include <private/media/VideoFrame.h>
-// Sonivox includes
-#include <libsonivox/eas.h>
-
namespace android {
StagefrightMediaScanner::StagefrightMediaScanner() {}
@@ -57,54 +54,6 @@
return false;
}
-static MediaScanResult HandleMIDI(
- const char *filename, MediaScannerClient *client) {
- // get the library configuration and do sanity check
- const S_EAS_LIB_CONFIG* pLibConfig = EAS_Config();
- if ((pLibConfig == NULL) || (LIB_VERSION != pLibConfig->libVersion)) {
- ALOGE("EAS library/header mismatch\n");
- return MEDIA_SCAN_RESULT_ERROR;
- }
- EAS_I32 temp;
-
- // spin up a new EAS engine
- EAS_DATA_HANDLE easData = NULL;
- EAS_HANDLE easHandle = NULL;
- EAS_RESULT result = EAS_Init(&easData);
- if (result == EAS_SUCCESS) {
- EAS_FILE file;
- file.path = filename;
- file.fd = 0;
- file.offset = 0;
- file.length = 0;
- result = EAS_OpenFile(easData, &file, &easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_Prepare(easData, easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_ParseMetaData(easData, easHandle, &temp);
- }
- if (easHandle) {
- EAS_CloseFile(easData, easHandle);
- }
- if (easData) {
- EAS_Shutdown(easData);
- }
-
- if (result != EAS_SUCCESS) {
- return MEDIA_SCAN_RESULT_SKIPPED;
- }
-
- char buffer[20];
- sprintf(buffer, "%ld", temp);
- status_t status = client->addStringTag("duration", buffer);
- if (status != OK) {
- return MEDIA_SCAN_RESULT_ERROR;
- }
- return MEDIA_SCAN_RESULT_OK;
-}
-
MediaScanResult StagefrightMediaScanner::processFile(
const char *path, const char *mimeType,
MediaScannerClient &client) {
@@ -130,18 +79,6 @@
return MEDIA_SCAN_RESULT_SKIPPED;
}
- if (!strcasecmp(extension, ".mid")
- || !strcasecmp(extension, ".smf")
- || !strcasecmp(extension, ".imy")
- || !strcasecmp(extension, ".midi")
- || !strcasecmp(extension, ".xmf")
- || !strcasecmp(extension, ".rtttl")
- || !strcasecmp(extension, ".rtx")
- || !strcasecmp(extension, ".ota")
- || !strcasecmp(extension, ".mxmf")) {
- return HandleMIDI(path, &client);
- }
-
sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever);
int fd = open(path, O_RDONLY | O_LARGEFILE);
diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml
index 85f6615..a06684b 100644
--- a/media/libstagefright/data/media_codecs_google_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_audio.xml
@@ -65,7 +65,8 @@
<Encoders>
<MediaCodec name="OMX.google.aac.encoder" type="audio/mp4a-latm">
<Limit name="channel-count" max="6" />
- <Limit name="sample-rate" ranges="11025,12000,16000,22050,24000,32000,44100,48000" />
+ <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+ <!-- also may support 64000, 88200 and 96000 Hz -->
<Limit name="bitrate" range="8000-960000" />
</MediaCodec>
<MediaCodec name="OMX.google.amrnb.encoder" type="audio/3gpp">
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 6522ad7..04005bd 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1110,11 +1110,11 @@
}
status_t LiveSession::getDuration(int64_t *durationUs) const {
- int64_t maxDurationUs = 0ll;
+ int64_t maxDurationUs = -1ll;
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
int64_t fetcherDurationUs = mFetcherInfos.valueAt(i).mDurationUs;
- if (fetcherDurationUs >= 0ll && fetcherDurationUs > maxDurationUs) {
+ if (fetcherDurationUs > maxDurationUs) {
maxDurationUs = fetcherDurationUs;
}
}
diff --git a/media/libstagefright/include/MidiExtractor.h b/media/libstagefright/include/MidiExtractor.h
new file mode 100644
index 0000000..9a2abc0
--- /dev/null
+++ b/media/libstagefright/include/MidiExtractor.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MIDI_EXTRACTOR_H_
+#define MIDI_EXTRACTOR_H_
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/MidiIoWrapper.h>
+#include <utils/String8.h>
+#include <libsonivox/eas.h>
+
+namespace android {
+
+class MidiEngine : public RefBase {
+public:
+ MidiEngine(const sp<DataSource> &dataSource,
+ const sp<MetaData> &fileMetadata,
+ const sp<MetaData> &trackMetadata);
+ ~MidiEngine();
+
+ status_t initCheck();
+
+ status_t allocateBuffers();
+ status_t releaseBuffers();
+ status_t seekTo(int64_t positionUs);
+ MediaBuffer* readBuffer();
+private:
+ sp<MidiIoWrapper> mIoWrapper;
+ MediaBufferGroup *mGroup;
+ EAS_DATA_HANDLE mEasData;
+ EAS_HANDLE mEasHandle;
+ const S_EAS_LIB_CONFIG* mEasConfig;
+ bool mIsInitialized;
+};
+
+class MidiExtractor : public MediaExtractor {
+
+public:
+ // Extractor assumes ownership of source
+ MidiExtractor(const sp<DataSource> &source);
+
+ virtual size_t countTracks();
+ virtual sp<MediaSource> getTrack(size_t index);
+ virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+ virtual sp<MetaData> getMetaData();
+
+protected:
+ virtual ~MidiExtractor();
+
+private:
+ sp<DataSource> mDataSource;
+ status_t mInitCheck;
+ sp<MetaData> mFileMetadata;
+
+ // There is only one track
+ sp<MetaData> mTrackMetadata;
+
+ sp<MidiEngine> mEngine;
+
+ EAS_DATA_HANDLE mEasData;
+ EAS_HANDLE mEasHandle;
+ EAS_PCM* mAudioBuffer;
+ EAS_I32 mPlayTime;
+ EAS_I32 mDuration;
+ EAS_STATE mState;
+ EAS_FILE mFileLocator;
+
+ MidiExtractor(const MidiExtractor &);
+ MidiExtractor &operator=(const MidiExtractor &);
+
+};
+
+bool SniffMidi(const sp<DataSource> &source, String8 *mimeType,
+ float *confidence, sp<AMessage> *);
+
+} // namespace android
+
+#endif // MIDI_EXTRACTOR_H_
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 1d8a213..4cf3819 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -343,7 +343,7 @@
}
if (frameLength != size - startOffset) {
- ALOGW("First ADTS AAC frame length is %zd bytes, "
+ ALOGV("First ADTS AAC frame length is %zd bytes, "
"while the buffer size is %zd bytes.",
frameLength, size - startOffset);
}
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index 931a09d..e4e16f2 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -819,18 +819,24 @@
MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
- if (mRequest.getParameterCount() < 4)
- return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
uint64_t offset;
uint32_t length;
offset = mRequest.getParameter(2);
if (operation == MTP_OPERATION_GET_PARTIAL_OBJECT_64) {
+ // MTP_OPERATION_GET_PARTIAL_OBJECT_64 takes 4 arguments
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
// android extension with 64 bit offset
uint64_t offset2 = mRequest.getParameter(3);
offset = offset | (offset2 << 32);
length = mRequest.getParameter(4);
} else {
+ // MTP_OPERATION_GET_PARTIAL_OBJECT takes 3 arguments
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
// standard GetPartialObject
length = mRequest.getParameter(3);
}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index aee805a..9db313e 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1436,13 +1436,6 @@
goto Exit;
}
- if (deviceRequiresCaptureAudioOutputPermission(thread->inDevice())
- && !captureAudioOutputAllowed()) {
- ALOGE("openRecord() permission denied: capture not allowed");
- lStatus = PERMISSION_DENIED;
- goto Exit;
- }
-
pid_t pid = IPCThreadState::self()->getCallingPid();
client = registerPid(pid);
diff --git a/services/audioflinger/AudioMixerOps.h b/services/audioflinger/AudioMixerOps.h
index f7376a8..2678857 100644
--- a/services/audioflinger/AudioMixerOps.h
+++ b/services/audioflinger/AudioMixerOps.h
@@ -52,15 +52,12 @@
*
* For high precision audio, only the <TO, TI, TV> = <float, float, float>
* needs to be accelerated. This is perhaps the easiest form to do quickly as well.
+ *
+ * A generic version is NOT defined to catch any mistake of using it.
*/
template <typename TO, typename TI, typename TV>
-inline TO MixMul(TI value, TV volume) {
- COMPILE_TIME_ASSERT_FUNCTION_SCOPE(false);
- // should not be here :-).
- // To avoid mistakes, this template is always specialized.
- return value * volume;
-}
+TO MixMul(TI value, TV volume);
template <>
inline int32_t MixMul<int32_t, int16_t, int16_t>(int16_t value, int16_t volume) {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index f93a2c5..984f8b7 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1256,6 +1256,7 @@
mScreenState(AudioFlinger::mScreenState),
// index 0 is reserved for normal mixer's submix
mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1),
+ mHwSupportsPause(false), mHwPaused(false), mFlushPending(false),
// mLatchD, mLatchQ,
mLatchDValid(false), mLatchQValid(false)
{
@@ -1910,6 +1911,19 @@
}
}
+ mHwSupportsPause = false;
+ if (mOutput->flags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ if (mOutput->stream->pause != NULL) {
+ if (mOutput->stream->resume != NULL) {
+ mHwSupportsPause = true;
+ } else {
+ ALOGW("direct output implements pause but not resume");
+ }
+ } else if (mOutput->stream->resume != NULL) {
+ ALOGW("direct output implements resume but not pause");
+ }
+ }
+
// Calculate size of normal sink buffer relative to the HAL output buffer size
double multiplier = 1.0;
if (mType == MIXER && (kUseFastMixer == FastMixer_Static ||
@@ -3145,6 +3159,7 @@
mCallbackThread->setWriteBlocked(mWriteAckSequence);
mCallbackThread->setDraining(mDrainSequence);
}
+ mHwPaused = false;
}
void AudioFlinger::PlaybackThread::onAddNewTrack_l()
@@ -4074,6 +4089,9 @@
{
size_t count = mActiveTracks.size();
mixer_state mixerStatus = MIXER_IDLE;
+ bool doHwPause = false;
+ bool doHwResume = false;
+ bool flushPending = false;
// find out which tracks need to be processed
for (size_t i = 0; i < count; i++) {
@@ -4092,6 +4110,28 @@
sp<Track> l = mLatestActiveTrack.promote();
bool last = l.get() == track;
+ if (mHwSupportsPause && track->isPausing()) {
+ track->setPaused();
+ if (last && !mHwPaused) {
+ doHwPause = true;
+ mHwPaused = true;
+ }
+ tracksToRemove->add(track);
+ } else if (track->isFlushPending()) {
+ track->flushAck();
+ if (last) {
+ flushPending = true;
+ }
+ } else if (mHwSupportsPause && track->isResumePending()){
+ track->resumeAck();
+ if (last) {
+ if (mHwPaused) {
+ doHwResume = true;
+ mHwPaused = false;
+ }
+ }
+ }
+
// The first time a track is added we wait
// for all its buffers to be filled before processing it.
// Allow draining the buffer in case the client
@@ -4115,8 +4155,8 @@
track->mFillingUpStatus = Track::FS_ACTIVE;
// make sure processVolume_l() will apply new volume even if 0
mLeftVolFloat = mRightVolFloat = -1.0;
- if (track->mState == TrackBase::RESUMING) {
- track->mState = TrackBase::ACTIVE;
+ if (!mHwSupportsPause) {
+ track->resumeAck();
}
}
@@ -4179,6 +4219,30 @@
}
}
+ // if an active track did not command a flush, check for pending flush on stopped tracks
+ if (!flushPending) {
+ for (size_t i = 0; i < mTracks.size(); i++) {
+ if (mTracks[i]->isFlushPending()) {
+ mTracks[i]->flushAck();
+ flushPending = true;
+ }
+ }
+ }
+
+ // make sure the pause/flush/resume sequence is executed in the right order.
+ // If a flush is pending and a track is active but the HW is not paused, force a HW pause
+ // before flush and then resume HW. This can happen in case of pause/flush/resume
+ // if resume is received before pause is executed.
+ if (mHwSupportsPause && !mStandby &&
+ (doHwPause || (flushPending && !mHwPaused && (count != 0)))) {
+ mOutput->stream->pause(mOutput->stream);
+ }
+ if (flushPending) {
+ flushHw_l();
+ }
+ if (mHwSupportsPause && !mStandby && doHwResume) {
+ mOutput->stream->resume(mOutput->stream);
+ }
// remove all the tracks that need to be...
removeTracks_l(*tracksToRemove);
@@ -4211,6 +4275,11 @@
void AudioFlinger::DirectOutputThread::threadLoop_sleepTime()
{
+ // do not write to HAL when paused
+ if (mHwPaused) {
+ sleepTime = idleSleepTime;
+ return;
+ }
if (sleepTime == 0) {
if (mMixerStatus == MIXER_TRACKS_ENABLED) {
sleepTime = activeSleepTime;
@@ -4223,6 +4292,38 @@
}
}
+void AudioFlinger::DirectOutputThread::threadLoop_exit()
+{
+ {
+ Mutex::Autolock _l(mLock);
+ bool flushPending = false;
+ for (size_t i = 0; i < mTracks.size(); i++) {
+ if (mTracks[i]->isFlushPending()) {
+ mTracks[i]->flushAck();
+ flushPending = true;
+ }
+ }
+ if (flushPending) {
+ flushHw_l();
+ }
+ }
+ PlaybackThread::threadLoop_exit();
+}
+
+// must be called with thread mutex locked
+bool AudioFlinger::DirectOutputThread::shouldStandby_l()
+{
+ bool trackPaused = false;
+
+ // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
+ // after a timeout and we will enter standby then.
+ if (mTracks.size() > 0) {
+ trackPaused = mTracks[mTracks.size() - 1]->isPaused();
+ }
+
+ return !mStandby && !trackPaused;
+}
+
// getTrackName_l() must be called with ThreadBase::mLock held
int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask __unused,
audio_format_t format __unused, int sessionId __unused)
@@ -4332,8 +4433,10 @@
void AudioFlinger::DirectOutputThread::flushHw_l()
{
- if (mOutput->stream->flush != NULL)
+ if (mOutput->stream->flush != NULL) {
mOutput->stream->flush(mOutput->stream);
+ }
+ mHwPaused = false;
}
// ----------------------------------------------------------------------------
@@ -4442,8 +4545,6 @@
AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
AudioStreamOut* output, audio_io_handle_t id, uint32_t device)
: DirectOutputThread(audioFlinger, output, id, device, OFFLOAD),
- mHwPaused(false),
- mFlushPending(false),
mPausedBytesRemaining(0)
{
//FIXME: mStandby should be set to true by ThreadBase constructor
@@ -4680,21 +4781,6 @@
return false;
}
-// must be called with thread mutex locked
-bool AudioFlinger::OffloadThread::shouldStandby_l()
-{
- bool trackPaused = false;
-
- // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
- // after a timeout and we will enter standby then.
- if (mTracks.size() > 0) {
- trackPaused = mTracks[mTracks.size() - 1]->isPaused();
- }
-
- return !mStandby && !trackPaused;
-}
-
-
bool AudioFlinger::OffloadThread::waitingAsyncCallback()
{
Mutex::Autolock _l(mLock);
@@ -4709,7 +4795,6 @@
mBytesRemaining = 0;
mPausedWriteLength = 0;
mPausedBytesRemaining = 0;
- mHwPaused = false;
if (mUseAsyncWrite) {
// discard any pending drain or write ack by incrementing sequence
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 09383b6..65088d3 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -812,7 +812,9 @@
protected:
// accessed by both binder threads and within threadLoop(), lock on mutex needed
unsigned mFastTrackAvailMask; // bit i set if fast track [i] is available
-
+ bool mHwSupportsPause;
+ bool mHwPaused;
+ bool mFlushPending;
private:
// timestamp latch:
// D input is written by threadLoop_write while mutex is unlocked, and read while locked
@@ -913,6 +915,8 @@
virtual mixer_state prepareTracks_l(Vector< sp<Track> > *tracksToRemove);
virtual void threadLoop_mix();
virtual void threadLoop_sleepTime();
+ virtual void threadLoop_exit();
+ virtual bool shouldStandby_l();
// volumes last sent to audio HAL with stream->set_volume()
float mLeftVolFloat;
@@ -943,12 +947,9 @@
virtual bool waitingAsyncCallback();
virtual bool waitingAsyncCallback_l();
- virtual bool shouldStandby_l();
virtual void onAddNewTrack_l();
private:
- bool mHwPaused;
- bool mFlushPending;
size_t mPausedWriteLength; // length in bytes of write interrupted by pause
size_t mPausedBytesRemaining; // bytes still waiting in mixbuffer after resume
wp<Track> mPreviousTrack; // used to detect track switch
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index faea6ea..1e750bd 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -823,12 +823,11 @@
// this will be done by prepareTracks_l() when the track is stopped.
// prepareTracks_l() will see mState == FLUSHED, then
// remove from active track list, reset(), and trigger presentation complete
+ if (isDirect()) {
+ mFlushHwPending = true;
+ }
if (playbackThread->mActiveTracks.indexOf(this) < 0) {
reset();
- if (thread->type() == ThreadBase::DIRECT) {
- DirectOutputThread *t = (DirectOutputThread *)playbackThread;
- t->flushHw_l();
- }
}
}
// Prevent flush being lost if the track is flushed and then resumed
@@ -841,7 +840,7 @@
// must be called with thread lock held
void AudioFlinger::PlaybackThread::Track::flushAck()
{
- if (!isOffloaded())
+ if (!isOffloaded() && !isDirect())
return;
mFlushHwPending = false;
diff --git a/services/audiopolicy/AudioPolicyClientImplLegacy.cpp b/services/audiopolicy/AudioPolicyClientImplLegacy.cpp
index 97719da..a79f8ae 100644
--- a/services/audiopolicy/AudioPolicyClientImplLegacy.cpp
+++ b/services/audiopolicy/AudioPolicyClientImplLegacy.cpp
@@ -188,6 +188,13 @@
if (pSamplingRate == NULL || pFormat == NULL || pChannelMask == NULL || pDevices == NULL) {
return AUDIO_IO_HANDLE_NONE;
}
+
+ if (((*pDevices & AUDIO_DEVICE_IN_REMOTE_SUBMIX) == AUDIO_DEVICE_IN_REMOTE_SUBMIX)
+ && !captureAudioOutputAllowed()) {
+ ALOGE("open_input() permission denied: capture not allowed");
+ return AUDIO_IO_HANDLE_NONE;
+ }
+
audio_config_t config = AUDIO_CONFIG_INITIALIZER;;
config.sample_rate = *pSamplingRate;
config.format = *pFormat;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 2826cad..4508fa7 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -57,6 +57,16 @@
{
public:
+ typedef enum {
+ API_INPUT_INVALID = -1,
+ API_INPUT_LEGACY = 0,// e.g. audio recording from a microphone
+ API_INPUT_MIX_CAPTURE,// used for "remote submix", capture of the media to play it remotely
+ API_INPUT_MIX_EXT_POLICY_REROUTE,// used for platform audio rerouting, where mixes are
+ // handled by external and dynamically installed
+ // policies which reroute audio mixes
+ } input_type_t;
+
+public:
virtual ~AudioPolicyInterface() {}
//
// configuration functions
@@ -120,7 +130,8 @@
uint32_t samplingRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- audio_input_flags_t flags) = 0;
+ audio_input_flags_t flags,
+ input_type_t *inputType) = 0;
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_io_handle_t input,
audio_session_t session) = 0;
diff --git a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
index d3c9013..a45dbb3 100644
--- a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
@@ -266,17 +266,47 @@
}
sp<AudioPolicyEffects>audioPolicyEffects;
status_t status;
+ AudioPolicyInterface::input_type_t inputType;
{
Mutex::Autolock _l(mLock);
// the audio_in_acoustics_t parameter is ignored by get_input()
status = mAudioPolicyManager->getInputForAttr(attr, input, session,
samplingRate, format, channelMask,
- flags);
+ flags, &inputType);
audioPolicyEffects = mAudioPolicyEffects;
+
+ if (status == NO_ERROR) {
+ // enforce permission (if any) required for each type of input
+ switch (inputType) {
+ case AudioPolicyInterface::API_INPUT_LEGACY:
+ break;
+ case AudioPolicyInterface::API_INPUT_MIX_CAPTURE:
+ if (!captureAudioOutputAllowed()) {
+ ALOGE("getInputForAttr() permission denied: capture not allowed");
+ status = PERMISSION_DENIED;
+ }
+ break;
+ case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
+ if (!modifyAudioRoutingAllowed()) {
+ ALOGE("getInputForAttr() permission denied: modify audio routing not allowed");
+ status = PERMISSION_DENIED;
+ }
+ break;
+ case AudioPolicyInterface::API_INPUT_INVALID:
+ default:
+ LOG_ALWAYS_FATAL("getInputForAttr() encountered an invalid input type %d",
+ (int)inputType);
+ }
+ }
+
+ if (status != NO_ERROR) {
+ if (status == PERMISSION_DENIED) {
+ mAudioPolicyManager->releaseInput(*input, session);
+ }
+ return status;
+ }
}
- if (status != NO_ERROR) {
- return status;
- }
+
if (audioPolicyEffects != 0) {
// create audio pre processors according to input source
status_t status = audioPolicyEffects->addInputEffects(*input, attr->source, session);
diff --git a/services/audiopolicy/AudioPolicyManager.cpp b/services/audiopolicy/AudioPolicyManager.cpp
index 9918b6f..be7158f 100644
--- a/services/audiopolicy/AudioPolicyManager.cpp
+++ b/services/audiopolicy/AudioPolicyManager.cpp
@@ -215,6 +215,13 @@
audio_policy_dev_state_t state,
const char *device_address)
{
+ return setDeviceConnectionStateInt(device, state, device_address);
+}
+
+status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device,
+ audio_policy_dev_state_t state,
+ const char *device_address)
+{
String8 address = (device_address == NULL) ? String8("") : String8(device_address);
// handle legacy remote submix case where the address was not always specified
if (deviceDistinguishesOnAddress(device) && (address.length() == 0)) {
@@ -457,7 +464,7 @@
audio_patch_handle_t afPatchHandle;
DeviceVector deviceList;
- audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ audio_devices_t txDevice = getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
ALOGV("updateCallRouting device rxDevice %08x txDevice %08x", rxDevice, txDevice);
// release existing RX patch if any
@@ -589,6 +596,9 @@
}
handleIncallSonification((audio_stream_type_t)stream, false, true);
}
+
+ // force reevaluating accessibility routing when call starts
+ mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
}
// store previous phone state for management of sonification strategy below
@@ -899,14 +909,13 @@
mPolicyMixes[i]->mMix.mRegistrationId.string(),
AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
desc = mPolicyMixes[i]->mOutput;
- break;
}
}
if (desc != 0) {
if (!audio_is_linear_pcm(format)) {
return BAD_VALUE;
}
- desc->mPolicyMixAddress = mPolicyMixes[i]->mMix.mRegistrationId;
+ desc->mPolicyMix = &mPolicyMixes[i]->mMix;
*stream = streamTypefromAttributesInt(&attributes);
*output = desc->mIoHandle;
ALOGV("getOutputForAttr() returns output %d", *output);
@@ -1227,8 +1236,7 @@
if (outputDesc->mRefCount[stream] == 1) {
// starting an output being rerouted?
audio_devices_t newDevice;
- if (outputDesc->mPolicyMixAddress != String8("")
- && outputDesc->mPolicyMixAddress != String8("0")) {
+ if (outputDesc->mPolicyMix != NULL) {
newDevice = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
} else {
newDevice = getNewOutputDevice(output, false /*fromCache*/);
@@ -1275,6 +1283,21 @@
// update the outputs if starting an output with a stream that can affect notification
// routing
handleNotificationRoutingForStream(stream);
+
+ // Automatically enable the remote submix input when output is started on a re routing mix
+ // of type MIX_TYPE_RECORDERS
+ if (audio_is_remote_submix_device(newDevice) && outputDesc->mPolicyMix != NULL &&
+ outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ outputDesc->mPolicyMix->mRegistrationId);
+ }
+
+ // force reevaluating accessibility routing when ringtone or alarm starts
+ if (strategy == STRATEGY_SONIFICATION) {
+ mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+ }
+
if (waitMs > muteWaitMs) {
usleep((waitMs - muteWaitMs) * 2 * 1000);
}
@@ -1309,6 +1332,16 @@
outputDesc->changeRefCount(stream, -1);
// store time at which the stream was stopped - see isStreamActive()
if (outputDesc->mRefCount[stream] == 0) {
+ // Automatically disable the remote submix input when output is stopped on a
+ // re routing mix of type MIX_TYPE_RECORDERS
+ if (audio_is_remote_submix_device(outputDesc->mDevice) &&
+ outputDesc->mPolicyMix != NULL &&
+ outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ outputDesc->mPolicyMix->mRegistrationId);
+ }
+
outputDesc->mStopTime[stream] = systemTime();
audio_devices_t newDevice = getNewOutputDevice(output, false /*fromCache*/);
// delay the device switch by twice the latency because stopOutput() is executed when
@@ -1394,18 +1427,21 @@
uint32_t samplingRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ input_type_t *inputType)
{
ALOGV("getInputForAttr() source %d, samplingRate %d, format %d, channelMask %x,"
"session %d, flags %#x",
attr->source, samplingRate, format, channelMask, session, flags);
*input = AUDIO_IO_HANDLE_NONE;
+ *inputType = API_INPUT_INVALID;
audio_devices_t device;
// handle legacy remote submix case where the address was not always specified
String8 address = String8("");
bool isSoundTrigger = false;
audio_source_t halInputSource = attr->source;
+ AudioMix *policyMix = NULL;
if (attr->source == AUDIO_SOURCE_REMOTE_SUBMIX &&
strncmp(attr->tags, "addr=", strlen("addr=")) == 0) {
@@ -1416,13 +1452,36 @@
ALOGW("getInputForAttr() no policy for address %s", address.string());
return BAD_VALUE;
}
+ if (mPolicyMixes[index]->mMix.mMixType != MIX_TYPE_PLAYERS) {
+ ALOGW("getInputForAttr() bad policy mix type for address %s", address.string());
+ return BAD_VALUE;
+ }
+ policyMix = &mPolicyMixes[index]->mMix;
+ *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
} else {
- device = getDeviceForInputSource(attr->source, &address);
-
+ device = getDeviceAndMixForInputSource(attr->source, &policyMix);
if (device == AUDIO_DEVICE_NONE) {
ALOGW("getInputForAttr() could not find device for source %d", attr->source);
return BAD_VALUE;
}
+ if (policyMix != NULL) {
+ address = policyMix->mRegistrationId;
+ if (policyMix->mMixType == MIX_TYPE_RECORDERS) {
+ // there is an external policy, but this input is attached to a mix of recorders,
+ // meaning it receives audio injected into the framework, so the recorder doesn't
+ // know about it and is therefore considered "legacy"
+ *inputType = API_INPUT_LEGACY;
+ } else {
+ // recording a mix of players defined by an external policy, we're rerouting for
+ // an external policy
+ *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
+ }
+ } else if (audio_is_remote_submix_device(device)) {
+ address = String8("0");
+ *inputType = API_INPUT_MIX_CAPTURE;
+ } else {
+ *inputType = API_INPUT_LEGACY;
+ }
// adapt channel selection to input source
switch (attr->source) {
case AUDIO_SOURCE_VOICE_UPLINK:
@@ -1507,8 +1566,11 @@
inputDesc->mFormat = format;
inputDesc->mChannelMask = channelMask;
inputDesc->mDevice = device;
- inputDesc->mSessions.add(session, address);
+ inputDesc->mSessions.add(session);
inputDesc->mIsSoundTrigger = isSoundTrigger;
+ inputDesc->mPolicyMix = policyMix;
+
+ ALOGV("getInputForAttr() returns input type = %d", inputType);
addInput(*input, inputDesc);
mpClientInterface->onAudioPortListUpdate();
@@ -1526,7 +1588,7 @@
}
sp<AudioInputDescriptor> inputDesc = mInputs.valueAt(index);
- index = inputDesc->mSessions.indexOfKey(session);
+ index = inputDesc->mSessions.indexOf(session);
if (index < 0) {
ALOGW("startInput() unknown session %d on input %d", session, input);
return BAD_VALUE;
@@ -1544,8 +1606,8 @@
sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
if (activeDesc->mInputSource == AUDIO_SOURCE_HOTWORD) {
ALOGW("startInput(%d) preempting low-priority input %d", input, activeInput);
- stopInput(activeInput, activeDesc->mSessions.keyAt(0));
- releaseInput(activeInput, activeDesc->mSessions.keyAt(0));
+ stopInput(activeInput, activeDesc->mSessions.itemAt(0));
+ releaseInput(activeInput, activeDesc->mSessions.itemAt(0));
} else {
ALOGE("startInput(%d) failed: other input %d already started", input, activeInput);
return INVALID_OPERATION;
@@ -1559,12 +1621,21 @@
}
setInputDevice(input, getNewInputDevice(input), true /* force */);
- // Automatically enable the remote submix output when input is started.
+ // automatically enable the remote submix output when input is started if not
+ // used by a policy mix of type MIX_TYPE_RECORDERS
// For remote submix (a virtual device), we open only one input per capture request.
if (audio_is_remote_submix_device(inputDesc->mDevice)) {
- setDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
- AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
- inputDesc->mSessions.valueAt(index));
+ String8 address = String8("");
+ if (inputDesc->mPolicyMix == NULL) {
+ address = String8("0");
+ } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
+ address = inputDesc->mPolicyMix->mRegistrationId;
+ }
+ if (address != "") {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address);
+ }
}
}
@@ -1585,7 +1656,7 @@
}
sp<AudioInputDescriptor> inputDesc = mInputs.valueAt(index);
- index = inputDesc->mSessions.indexOfKey(session);
+ index = inputDesc->mSessions.indexOf(session);
if (index < 0) {
ALOGW("stopInput() unknown session %d on input %d", session, input);
return BAD_VALUE;
@@ -1599,11 +1670,20 @@
inputDesc->mRefCount--;
if (inputDesc->mRefCount == 0) {
- // automatically disable the remote submix output when input is stopped
+ // automatically disable the remote submix output when input is stopped if not
+ // used by a policy mix of type MIX_TYPE_RECORDERS
if (audio_is_remote_submix_device(inputDesc->mDevice)) {
- setDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
- AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- inputDesc->mSessions.valueAt(index));
+ String8 address = String8("");
+ if (inputDesc->mPolicyMix == NULL) {
+ address = String8("0");
+ } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
+ address = inputDesc->mPolicyMix->mRegistrationId;
+ }
+ if (address != "") {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address);
+ }
}
resetInputDevice(input);
@@ -1627,12 +1707,12 @@
sp<AudioInputDescriptor> inputDesc = mInputs.valueAt(index);
ALOG_ASSERT(inputDesc != 0);
- index = inputDesc->mSessions.indexOfKey(session);
+ index = inputDesc->mSessions.indexOf(session);
if (index < 0) {
ALOGW("releaseInput() unknown session %d on input %d", session, input);
return;
}
- inputDesc->mSessions.removeItem(session);
+ inputDesc->mSessions.remove(session);
if (inputDesc->mOpenRefCount == 0) {
ALOGW("releaseInput() invalid open ref count %d", inputDesc->mOpenRefCount);
return;
@@ -1961,10 +2041,9 @@
const sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
if (((outputDesc->device() & APM_AUDIO_OUT_DEVICE_REMOTE_ALL) != 0) &&
outputDesc->isStreamActive(stream, inPastMs, sysTime)) {
- // only consider empty or "0" address to only qualify the screen mirroring case
- // as "remote playback" (vs rerouting when the output is going to a dynamic policy)
- if (outputDesc->mPolicyMixAddress == String8("")
- || outputDesc->mPolicyMixAddress == String8("0")) {
+ // do not consider re routing (when the output is going to a dynamic policy)
+ // as "remote playback"
+ if (outputDesc->mPolicyMix == NULL) {
return true;
}
}
@@ -2046,9 +2125,15 @@
sp<AudioPolicyMix> policyMix = new AudioPolicyMix();
policyMix->mMix = mixes[i];
mPolicyMixes.add(address, policyMix);
- setDeviceConnectionState(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
- AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
- address.string());
+ if (mixes[i].mMixType == MIX_TYPE_PLAYERS) {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.string());
+ } else {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.string());
+ }
}
return NO_ERROR;
}
@@ -2080,14 +2165,18 @@
mPolicyMixes.removeItemsAt(index);
- setDeviceConnectionState(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
- AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- address.string());
+ if (getDeviceConnectionState(AUDIO_DEVICE_IN_REMOTE_SUBMIX, address.string()) ==
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE)
+ {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.string());
+ }
if (getDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address.string()) ==
AUDIO_POLICY_DEVICE_STATE_AVAILABLE)
{
- setDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
address.string());
}
@@ -2840,7 +2929,7 @@
{
*session = (audio_session_t)mpClientInterface->newAudioUniqueId();
*ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId();
- *device = getDeviceForInputSource(AUDIO_SOURCE_HOTWORD);
+ *device = getDeviceAndMixForInputSource(AUDIO_SOURCE_HOTWORD);
mSoundTriggerSessions.add(*session, *ioHandle);
@@ -3543,12 +3632,14 @@
ssize_t index = mPolicyMixes.indexOfKey(address);
if (index >= 0) {
mPolicyMixes[index]->mOutput = desc;
+ desc->mPolicyMix = &mPolicyMixes[index]->mMix;
} else {
ALOGE("checkOutputsForDevice() cannot find policy for address %s",
address.string());
}
- }
- if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) {
+ } else if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) {
+ // no duplicated output for direct outputs and
+ // outputs used by dynamic policy mixes
audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
// set initial stream volume for device
@@ -3972,6 +4063,24 @@
SortedVector<audio_io_handle_t> srcOutputs = getOutputsForDevice(oldDevice, mPreviousOutputs);
SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevice(newDevice, mOutputs);
+ // also take into account external policy-related changes: add all outputs which are
+ // associated with policies in the "before" and "after" output vectors
+ ALOGVV("checkOutputForStrategy(): policy related outputs");
+ for (size_t i = 0 ; i < mPreviousOutputs.size() ; i++) {
+ const sp<AudioOutputDescriptor> desc = mPreviousOutputs.valueAt(i);
+ if (desc != 0 && desc->mPolicyMix != NULL) {
+ srcOutputs.add(desc->mIoHandle);
+ ALOGVV(" previous outputs: adding %d", desc->mIoHandle);
+ }
+ }
+ for (size_t i = 0 ; i < mOutputs.size() ; i++) {
+ const sp<AudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ if (desc != 0 && desc->mPolicyMix != NULL) {
+ dstOutputs.add(desc->mIoHandle);
+ ALOGVV(" new outputs: adding %d", desc->mIoHandle);
+ }
+ }
+
if (!vectorsEqual(srcOutputs,dstOutputs)) {
ALOGV("checkOutputForStrategy() strategy %d, moving from output %d to output %d",
strategy, srcOutputs[0], dstOutputs[0]);
@@ -4166,7 +4275,7 @@
}
}
- audio_devices_t device = getDeviceForInputSource(inputDesc->mInputSource);
+ audio_devices_t device = getDeviceAndMixForInputSource(inputDesc->mInputSource);
ALOGV("getNewInputDevice() selected device %x", device);
return device;
@@ -4432,7 +4541,8 @@
// - cannot route from voice call RX OR
// - audio HAL version is < 3.0 and TX device is on the primary HW module
if (mPhoneState == AUDIO_MODE_IN_CALL) {
- audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ audio_devices_t txDevice =
+ getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
sp<AudioOutputDescriptor> hwOutputDesc = mOutputs.valueFor(mPrimaryOutput);
if (((mAvailableInputDevices.types() &
AUDIO_DEVICE_IN_TELEPHONY_RX & ~AUDIO_DEVICE_BIT_IN) == 0) ||
@@ -4500,7 +4610,7 @@
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
if (device) break;
}
- if (mPhoneState != AUDIO_MODE_IN_CALL) {
+ if (!isInCall()) {
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_ACCESSORY;
if (device) break;
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_DEVICE;
@@ -5013,10 +5123,10 @@
return NULL;
}
-audio_devices_t AudioPolicyManager::getDeviceForInputSource(audio_source_t inputSource,
- String8 *address)
+
+audio_devices_t AudioPolicyManager::getDeviceAndMixForInputSource(audio_source_t inputSource,
+ AudioMix **policyMix)
{
- uint32_t device = AUDIO_DEVICE_NONE;
audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() &
~AUDIO_DEVICE_BIT_IN;
@@ -5030,8 +5140,8 @@
(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mPolicyMixes[i]->mMix.mCriteria[j].mRule &&
mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mSource != inputSource)) {
if (availableDeviceTypes & AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
- if (address != NULL) {
- *address = mPolicyMixes[i]->mMix.mRegistrationId;
+ if (policyMix != NULL) {
+ *policyMix = &mPolicyMixes[i]->mMix;
}
return AUDIO_DEVICE_IN_REMOTE_SUBMIX;
}
@@ -5040,6 +5150,15 @@
}
}
+ return getDeviceForInputSource(inputSource);
+}
+
+audio_devices_t AudioPolicyManager::getDeviceForInputSource(audio_source_t inputSource)
+{
+ uint32_t device = AUDIO_DEVICE_NONE;
+ audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() &
+ ~AUDIO_DEVICE_BIT_IN;
+
switch (inputSource) {
case AUDIO_SOURCE_VOICE_UPLINK:
if (availableDeviceTypes & AUDIO_DEVICE_IN_VOICE_CALL) {
@@ -5052,6 +5171,9 @@
case AUDIO_SOURCE_MIC:
if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) {
device = AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
+ } else if ((mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO) &&
+ (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) {
+ device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
} else if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) {
device = AUDIO_DEVICE_IN_WIRED_HEADSET;
} else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) {
@@ -5127,9 +5249,6 @@
case AUDIO_SOURCE_REMOTE_SUBMIX:
if (availableDeviceTypes & AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
- if (address != NULL) {
- *address = "0";
- }
}
break;
case AUDIO_SOURCE_FM_TUNER:
@@ -5571,8 +5690,7 @@
ssize_t index = mOutputs.indexOfKey(output);
if (index >= 0) {
sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
- if (outputDesc->mPolicyMixAddress != String8("")
- && outputDesc->mPolicyMixAddress != String8("0")) {
+ if (outputDesc->mPolicyMix != NULL) {
ALOGV("max gain when rerouting for output=%d", output);
volume = 1.0f;
}
@@ -5761,7 +5879,7 @@
AudioPolicyManager::AudioOutputDescriptor::AudioOutputDescriptor(
const sp<IOProfile>& profile)
: mId(0), mIoHandle(0), mLatency(0),
- mFlags((audio_output_flags_t)0), mDevice(AUDIO_DEVICE_NONE), mPolicyMixAddress(String8("")),
+ mFlags((audio_output_flags_t)0), mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL),
mPatchHandle(0),
mOutput1(0), mOutput2(0), mProfile(profile), mDirectOpenCount(0)
{
@@ -5957,7 +6075,7 @@
AudioPolicyManager::AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile)
: mId(0), mIoHandle(0),
- mDevice(AUDIO_DEVICE_NONE), mPatchHandle(0), mRefCount(0),
+ mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL), mPatchHandle(0), mRefCount(0),
mInputSource(AUDIO_SOURCE_DEFAULT), mProfile(profile), mIsSoundTrigger(false)
{
if (profile != NULL) {
diff --git a/services/audiopolicy/AudioPolicyManager.h b/services/audiopolicy/AudioPolicyManager.h
index 3eef8fe..2059f58 100644
--- a/services/audiopolicy/AudioPolicyManager.h
+++ b/services/audiopolicy/AudioPolicyManager.h
@@ -112,7 +112,8 @@
uint32_t samplingRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- audio_input_flags_t flags);
+ audio_input_flags_t flags,
+ input_type_t *inputType);
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_io_handle_t input,
@@ -497,7 +498,7 @@
uint32_t mLatency; //
audio_output_flags_t mFlags; //
audio_devices_t mDevice; // current device this output is routed to
- String8 mPolicyMixAddress; // non empty or "0" when used by a dynamic policy
+ AudioMix *mPolicyMix; // non NULL when used by a dynamic policy
audio_patch_handle_t mPatchHandle;
uint32_t mRefCount[AUDIO_STREAM_CNT]; // number of streams of each type using this output
nsecs_t mStopTime[AUDIO_STREAM_CNT];
@@ -523,6 +524,7 @@
audio_port_handle_t mId;
audio_io_handle_t mIoHandle; // input handle
audio_devices_t mDevice; // current device this input is routed to
+ AudioMix *mPolicyMix; // non NULL when used by a dynamic policy
audio_patch_handle_t mPatchHandle;
uint32_t mRefCount; // number of AudioRecord clients using
// this input
@@ -530,9 +532,7 @@
audio_source_t mInputSource; // input source selected by application
//(mediarecorder.h)
const sp<IOProfile> mProfile; // I/O profile this output derives from
- // audio sessions attached to this input and the
- // corresponding device address
- DefaultKeyedVector<audio_session_t, String8> mSessions;
+ SortedVector<audio_session_t> mSessions; // audio sessions attached to this input
bool mIsSoundTrigger; // used by a soundtrigger capture
virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
@@ -611,8 +611,7 @@
audio_patch_handle_t *patchHandle = NULL);
// select input device corresponding to requested audio source
- virtual audio_devices_t getDeviceForInputSource(audio_source_t inputSource,
- String8 *address = NULL);
+ virtual audio_devices_t getDeviceForInputSource(audio_source_t inputSource);
// return io handle of active input or 0 if no input is active
// Only considers inputs from physical devices (e.g. main mic, headset mic) when
@@ -764,9 +763,9 @@
bool isNonOffloadableEffectEnabled();
- status_t addAudioPatch(audio_patch_handle_t handle,
+ virtual status_t addAudioPatch(audio_patch_handle_t handle,
const sp<AudioPatch>& patch);
- status_t removeAudioPatch(audio_patch_handle_t handle);
+ virtual status_t removeAudioPatch(audio_patch_handle_t handle);
sp<AudioOutputDescriptor> getOutputFromId(audio_port_handle_t id) const;
sp<AudioInputDescriptor> getInputFromId(audio_port_handle_t id) const;
@@ -915,6 +914,16 @@
uint32_t handleEventForBeacon(int event);
uint32_t setBeaconMute(bool mute);
bool isValidAttributes(const audio_attributes_t *paa);
+
+ // select input device corresponding to requested audio source and return associated policy
+ // mix if any. Calls getDeviceForInputSource().
+ audio_devices_t getDeviceAndMixForInputSource(audio_source_t inputSource,
+ AudioMix **policyMix = NULL);
+
+ // Called by setDeviceConnectionState().
+ status_t setDeviceConnectionStateInt(audio_devices_t device,
+ audio_policy_dev_state_t state,
+ const char *device_address);
};
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 60939f9..0ed5586 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -918,6 +918,15 @@
ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
}
+
+ // Flush all in-process captures and buffer in order to stop
+ // preview faster.
+ res = mDevice->flush();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+
res = mDevice->waitUntilDrained();
if (res != OK) {
ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",