Merge "propogate heartbeat error code to app" into lmp-dev
diff --git a/include/media/AudioResamplerPublic.h b/include/media/AudioResamplerPublic.h
new file mode 100644
index 0000000..97847a0
--- /dev/null
+++ b/include/media/AudioResamplerPublic.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_PUBLIC_H
+#define ANDROID_AUDIO_RESAMPLER_PUBLIC_H
+
+// AUDIO_RESAMPLER_DOWN_RATIO_MAX is the maximum ratio between the original
+// audio sample rate and the target rate when downsampling,
+// as permitted in the audio framework, e.g. AudioTrack and AudioFlinger.
+// In practice, it is not recommended to downsample more than 6:1
+// for best audio quality, even though the audio framework permits a larger
+// downsampling ratio.
+// TODO: replace with an API
+#define AUDIO_RESAMPLER_DOWN_RATIO_MAX 256
+
+#endif // ANDROID_AUDIO_RESAMPLER_PUBLIC_H
diff --git a/include/media/IMediaCodecList.h b/include/media/IMediaCodecList.h
new file mode 100644
index 0000000..e93ea8b
--- /dev/null
+++ b/include/media/IMediaCodecList.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_IMEDIACODECLIST_H
+#define ANDROID_IMEDIACODECLIST_H
+
+#include <utils/Errors.h> // for status_t
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+struct MediaCodecInfo;
+
+class IMediaCodecList: public IInterface
+{
+public:
+ DECLARE_META_INTERFACE(MediaCodecList);
+
+ virtual size_t countCodecs() const = 0;
+ virtual sp<MediaCodecInfo> getCodecInfo(size_t index) const = 0;
+
+ virtual ssize_t findCodecByType(
+ const char *type, bool encoder, size_t startIndex = 0) const = 0;
+
+ virtual ssize_t findCodecByName(const char *name) const = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnMediaCodecList: public BnInterface<IMediaCodecList>
+{
+public:
+ virtual status_t onTransact( uint32_t code,
+ const Parcel& data,
+ Parcel* reply,
+ uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif // ANDROID_IMEDIACODECLIST_H
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 5b45376..d7e584a 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -34,6 +34,7 @@
struct ICrypto;
struct IDrm;
struct IHDCP;
+struct IMediaCodecList;
struct IMediaHTTPService;
class IMediaRecorder;
class IOMX;
@@ -65,6 +66,7 @@
virtual sp<ICrypto> makeCrypto() = 0;
virtual sp<IDrm> makeDrm() = 0;
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) = 0;
+ virtual sp<IMediaCodecList> getCodecList() const = 0;
// Connects to a remote display.
// 'iface' specifies the address of the local interface on which to listen for
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index dd13fea..627f23b 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -87,6 +87,10 @@
node_id node, OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0;
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle) = 0;
+
virtual status_t enableGraphicBuffers(
node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
diff --git a/include/media/MediaCodecInfo.h b/include/media/MediaCodecInfo.h
new file mode 100644
index 0000000..29315ce
--- /dev/null
+++ b/include/media/MediaCodecInfo.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CODEC_INFO_H_
+
+#define MEDIA_CODEC_INFO_H_
+
+#include <binder/Parcel.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <sys/types.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct AMessage;
+struct Parcel;
+struct CodecCapabilities;
+
+struct MediaCodecInfo : public RefBase {
+ struct ProfileLevel {
+ uint32_t mProfile;
+ uint32_t mLevel;
+ };
+
+ struct Capabilities : public RefBase {
+ void getSupportedProfileLevels(Vector<ProfileLevel> *profileLevels) const;
+ void getSupportedColorFormats(Vector<uint32_t> *colorFormats) const;
+ uint32_t getFlags() const;
+ const sp<AMessage> &getDetails() const;
+
+ private:
+ Vector<ProfileLevel> mProfileLevels;
+ Vector<uint32_t> mColorFormats;
+ uint32_t mFlags;
+ sp<AMessage> mDetails;
+
+ Capabilities();
+
+ // read object from parcel even if object creation fails
+ static sp<Capabilities> FromParcel(const Parcel &parcel);
+ status_t writeToParcel(Parcel *parcel) const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(Capabilities);
+
+ friend class MediaCodecInfo;
+ };
+
+ bool isEncoder() const;
+ bool hasQuirk(const char *name) const;
+ void getSupportedMimes(Vector<AString> *mimes) const;
+ const sp<Capabilities> &getCapabilitiesFor(const char *mime) const;
+ const char *getCodecName() const;
+
+ /**
+ * Serialization over Binder
+ */
+ static sp<MediaCodecInfo> FromParcel(const Parcel &parcel);
+ status_t writeToParcel(Parcel *parcel) const;
+
+private:
+ // variable set only in constructor - these are accessed by MediaCodecList
+ // to avoid duplication of same variables
+ AString mName;
+ bool mIsEncoder;
+ bool mHasSoleMime; // was initialized with mime
+
+ Vector<AString> mQuirks;
+ KeyedVector<AString, sp<Capabilities> > mCaps;
+
+ sp<Capabilities> mCurrentCaps; // currently initalized capabilities
+
+ ssize_t getCapabilityIndex(const char *mime) const;
+
+ /* Methods used by MediaCodecList to construct the info
+ * object from XML.
+ *
+ * After info object is created:
+ * - additional quirks can be added
+ * - additional mimes can be added
+ * - OMX codec capabilities can be set for the current mime-type
+ * - a capability detail can be set for the current mime-type
+ * - a feature can be set for the current mime-type
+ * - info object can be completed when parsing of a mime-type is done
+ */
+ MediaCodecInfo(AString name, bool encoder, const char *mime);
+ void addQuirk(const char *name);
+ status_t addMime(const char *mime);
+ status_t initializeCapabilities(const CodecCapabilities &caps);
+ void addDetail(const AString &key, const AString &value);
+ void addFeature(const AString &key, int32_t value);
+ void complete();
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaCodecInfo);
+
+ friend class MediaCodecList;
+};
+
+} // namespace android
+
+#endif // MEDIA_CODEC_INFO_H_
+
+
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index e756368..2442219 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -172,7 +172,8 @@
INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3,
INVOKE_ID_SELECT_TRACK = 4,
INVOKE_ID_UNSELECT_TRACK = 5,
- INVOKE_ID_SET_VIDEO_SCALING_MODE = 6
+ INVOKE_ID_SET_VIDEO_SCALING_MODE = 6,
+ INVOKE_ID_GET_SELECTED_TRACK = 7
};
// Keep MEDIA_TRACK_TYPE_* in sync with MediaPlayer.java.
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index a68adea..eb31c77 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -76,6 +76,10 @@
DISALLOW_EVIL_CONSTRUCTORS(PortDescription);
};
+ static bool isFlexibleColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t colorFormat, OMX_U32 *flexibleEquivalent);
+
protected:
virtual ~ACodec();
@@ -230,6 +234,9 @@
status_t setComponentRole(bool isEncoder, const char *mime);
status_t configureCodec(const char *mime, const sp<AMessage> &msg);
+ status_t configureTunneledVideoPlayback(int64_t audioHwSync,
+ const sp<ANativeWindow> &nativeWindow);
+
status_t setVideoPortFormatType(
OMX_U32 portIndex,
OMX_VIDEO_CODINGTYPE compressionFormat,
@@ -248,10 +255,19 @@
int32_t width, int32_t height,
OMX_VIDEO_CODINGTYPE compressionFormat);
+ typedef struct drcParams {
+ int32_t drcCut;
+ int32_t drcBoost;
+ int32_t heavyCompression;
+ int32_t targetRefLevel;
+ int32_t encodedTargetLevel;
+ } drcParams_t;
+
status_t setupAACCodec(
bool encoder,
int32_t numChannels, int32_t sampleRate, int32_t bitRate,
- int32_t aacProfile, bool isADTS, int32_t sbrMode);
+ int32_t aacProfile, bool isADTS, int32_t sbrMode,
+ int32_t maxOutputChannelCount, const drcParams_t& drc);
status_t setupAC3Codec(bool encoder, int32_t numChannels, int32_t sampleRate);
@@ -307,7 +323,10 @@
OMX_ERRORTYPE error = OMX_ErrorUndefined,
status_t internalError = UNKNOWN_ERROR);
- static void describeDefaultColorFormat(DescribeColorFormatParams &describeParams);
+ static bool describeDefaultColorFormat(DescribeColorFormatParams &describeParams);
+ static bool describeColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ DescribeColorFormatParams &describeParams);
status_t requestIDRFrame();
status_t setParameters(const sp<AMessage> ¶ms);
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index 4ff0d62..b87a09e 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -55,10 +55,10 @@
struct BatteryNotifier;
static sp<MediaCodec> CreateByType(
- const sp<ALooper> &looper, const char *mime, bool encoder);
+ const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err = NULL);
static sp<MediaCodec> CreateByComponentName(
- const sp<ALooper> &looper, const char *name);
+ const sp<ALooper> &looper, const char *name, status_t *err = NULL);
status_t configure(
const sp<AMessage> &format,
@@ -223,6 +223,7 @@
AString mComponentName;
uint32_t mReplyID;
uint32_t mFlags;
+ status_t mStickyError;
sp<Surface> mNativeWindow;
SoftwareRenderer *mSoftRenderer;
sp<AMessage> mOutputFormat;
@@ -304,6 +305,18 @@
void updateBatteryStat();
bool isExecuting() const;
+ /* called to get the last codec error when the sticky flag is set.
+ * if no such codec error is found, returns UNKNOWN_ERROR.
+ */
+ inline status_t getStickyError() const {
+ return mStickyError != 0 ? mStickyError : UNKNOWN_ERROR;
+ }
+
+ inline void setStickyError(status_t err) {
+ mFlags |= kFlagStickyError;
+ mStickyError = err;
+ }
+
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
};
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index c11fcc9..8605d99 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -20,6 +20,9 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AString.h>
+#include <media/IMediaCodecList.h>
+#include <media/IOMX.h>
+#include <media/MediaCodecInfo.h>
#include <sys/types.h>
#include <utils/Errors.h>
@@ -31,32 +34,22 @@
struct AMessage;
-struct MediaCodecList {
- static const MediaCodecList *getInstance();
+struct MediaCodecList : public BnMediaCodecList {
+ static sp<IMediaCodecList> getInstance();
- ssize_t findCodecByType(
+ virtual ssize_t findCodecByType(
const char *type, bool encoder, size_t startIndex = 0) const;
- ssize_t findCodecByName(const char *name) const;
+ virtual ssize_t findCodecByName(const char *name) const;
- size_t countCodecs() const;
- const char *getCodecName(size_t index) const;
- bool isEncoder(size_t index) const;
- bool codecHasQuirk(size_t index, const char *quirkName) const;
+ virtual size_t countCodecs() const;
- status_t getSupportedTypes(size_t index, Vector<AString> *types) const;
+ virtual sp<MediaCodecInfo> getCodecInfo(size_t index) const {
+ return mCodecInfos.itemAt(index);
+ }
- struct ProfileLevel {
- uint32_t mProfile;
- uint32_t mLevel;
- };
- status_t getCodecCapabilities(
- size_t index, const char *type,
- Vector<ProfileLevel> *profileLevels,
- Vector<uint32_t> *colorFormats,
- uint32_t *flags,
- // TODO default argument is only for compatibility with existing JNI
- sp<AMessage> *capabilities = NULL) const;
+ // to be used by MediaPlayerService alone
+ static sp<IMediaCodecList> getLocalInstance();
private:
enum Section {
@@ -70,17 +63,8 @@
SECTION_INCLUDE,
};
- struct CodecInfo {
- AString mName;
- bool mIsEncoder;
- uint32_t mTypes;
- uint32_t mSoleType;
- uint32_t mQuirks;
- KeyedVector<uint32_t, sp<AMessage> > mCaps;
- sp<AMessage> mCurrentCaps;
- };
-
- static MediaCodecList *sCodecList;
+ static sp<IMediaCodecList> sCodecList;
+ static sp<IMediaCodecList> sRemoteList;
status_t mInitCheck;
Section mCurrentSection;
@@ -88,9 +72,9 @@
int32_t mDepth;
AString mHrefBase;
- Vector<CodecInfo> mCodecInfos;
- KeyedVector<AString, size_t> mCodecQuirks;
- KeyedVector<AString, size_t> mTypes;
+ Vector<sp<MediaCodecInfo> > mCodecInfos;
+ sp<MediaCodecInfo> mCurrentInfo;
+ sp<IOMX> mOMX;
MediaCodecList();
~MediaCodecList();
@@ -117,6 +101,8 @@
status_t addFeature(const char **attrs);
void addType(const char *name);
+ status_t initializeCapabilities(const char *type);
+
DISALLOW_EVIL_CONSTRUCTORS(MediaCodecList);
};
diff --git a/include/media/stagefright/MediaErrors.h b/include/media/stagefright/MediaErrors.h
index 686f286..7540e07 100644
--- a/include/media/stagefright/MediaErrors.h
+++ b/include/media/stagefright/MediaErrors.h
@@ -23,6 +23,18 @@
namespace android {
enum {
+ // status_t map for errors in the media framework
+ // OK or NO_ERROR or 0 represents no error.
+
+ // See system/core/include/utils/Errors.h
+ // System standard errors from -1 through (possibly) -133
+ //
+ // Errors with special meanings and side effects.
+ // INVALID_OPERATION: Operation attempted in an illegal state (will try to signal to app).
+ // DEAD_OBJECT: Signal from CodecBase to MediaCodec that MediaServer has died.
+ // NAME_NOT_FOUND: Signal from CodecBase to MediaCodec that the component was not found.
+
+ // Media errors
MEDIA_ERROR_BASE = -1000,
ERROR_ALREADY_CONNECTED = MEDIA_ERROR_BASE,
@@ -64,8 +76,34 @@
// Heartbeat Error Codes
HEARTBEAT_ERROR_BASE = -3000,
ERROR_HEARTBEAT_TERMINATE_REQUESTED = HEARTBEAT_ERROR_BASE,
+
+ // NDK Error codes
+ // frameworks/av/include/ndk/NdkMediaError.h
+ // from -10000 (0xFFFFD8F0 - 0xFFFFD8EC)
+ // from -20000 (0xFFFFB1E0 - 0xFFFFB1D7)
+
+ // Codec errors are permitted from 0x80001000 through 0x9000FFFF
+ ERROR_CODEC_MAX = (signed)0x9000FFFF,
+ ERROR_CODEC_MIN = (signed)0x80001000,
+
+ // System unknown errors from 0x80000000 - 0x80000007 (INT32_MIN + 7)
+ // See system/core/include/utils/Errors.h
};
+// action codes for MediaCodecs that tell the upper layer and application
+// the severity of any error.
+enum ActionCode {
+ ACTION_CODE_FATAL,
+ ACTION_CODE_TRANSIENT,
+ ACTION_CODE_RECOVERABLE,
+};
+
+// returns true if err is a recognized DRM error code
+static inline bool isCryptoError(status_t err) {
+ return (ERROR_DRM_RESOURCE_BUSY <= err && err <= ERROR_DRM_UNKNOWN)
+ || (ERROR_DRM_VENDOR_MIN <= err && err <= ERROR_DRM_VENDOR_MAX);
+}
+
} // namespace android
#endif // MEDIA_ERRORS_H_
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 5590b60..e341160 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -28,7 +28,7 @@
namespace android {
-struct MediaCodecList;
+struct MediaCodecInfo;
class MemoryDealer;
struct OMXCodecObserver;
struct CodecProfileLevel;
@@ -115,7 +115,7 @@
Vector<CodecNameAndQuirks> *matchingCodecNamesAndQuirks);
static uint32_t getComponentQuirks(
- const MediaCodecList *list, size_t index);
+ const sp<MediaCodecInfo> &list);
static bool findCodecQuirks(const char *componentName, uint32_t *quirks);
diff --git a/include/media/stagefright/foundation/ABase.h b/include/media/stagefright/foundation/ABase.h
index 949d49e..72e3d87 100644
--- a/include/media/stagefright/foundation/ABase.h
+++ b/include/media/stagefright/foundation/ABase.h
@@ -18,6 +18,8 @@
#define A_BASE_H_
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a)))
+
#define DISALLOW_EVIL_CONSTRUCTORS(name) \
name(const name &); \
name &operator=(const name &)
diff --git a/include/media/stagefright/foundation/AString.h b/include/media/stagefright/foundation/AString.h
index 4be3c6d..7c98699 100644
--- a/include/media/stagefright/foundation/AString.h
+++ b/include/media/stagefright/foundation/AString.h
@@ -18,11 +18,13 @@
#define A_STRING_H_
+#include <utils/Errors.h>
#include <sys/types.h>
namespace android {
struct String8;
+struct Parcel;
struct AString {
AString();
@@ -77,7 +79,9 @@
bool operator>(const AString &other) const;
int compare(const AString &other) const;
+ int compareIgnoreCase(const AString &other) const;
+ bool equalsIgnoreCase(const AString &other) const;
bool startsWith(const char *prefix) const;
bool endsWith(const char *suffix) const;
bool startsWithIgnoreCase(const char *prefix) const;
@@ -85,6 +89,9 @@
void tolower();
+ static AString FromParcel(const Parcel &parcel);
+ status_t writeToParcel(Parcel *parcel) const;
+
private:
static const char *kEmptyString;
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index cee26d9..3be0651 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -25,6 +25,7 @@
AudioRecord.cpp \
AudioSystem.cpp \
mediaplayer.cpp \
+ IMediaCodecList.cpp \
IMediaHTTPConnection.cpp \
IMediaHTTPService.cpp \
IMediaLogService.cpp \
@@ -36,6 +37,7 @@
IRemoteDisplay.cpp \
IRemoteDisplayClient.cpp \
IStreamSource.cpp \
+ MediaCodecInfo.cpp \
Metadata.cpp \
mediarecorder.cpp \
IMediaMetadataRetriever.cpp \
@@ -74,6 +76,7 @@
LOCAL_C_INCLUDES := \
$(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/av/media/libstagefright \
external/icu/icu4c/source/common \
external/icu/icu4c/source/i18n \
$(call include-path-for, audio-effects) \
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index b5c9125..d87e6f5 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -28,6 +28,7 @@
#include <utils/Log.h>
#include <private/media/AudioTrackShared.h>
#include <media/IAudioFlinger.h>
+#include <media/AudioResamplerPublic.h>
#define WAIT_PERIOD_MS 10
#define WAIT_STREAM_END_TIMEOUT_SEC 120
@@ -82,7 +83,7 @@
}
*frameCount = (sampleRate == 0) ? afFrameCount * minBufCount :
- afFrameCount * minBufCount * sampleRate / afSampleRate;
+ afFrameCount * minBufCount * uint64_t(sampleRate) / afSampleRate;
// The formula above should always produce a non-zero value, but return an error
// in the unlikely event that it does not, as that's part of the API contract.
if (*frameCount == 0) {
@@ -646,8 +647,7 @@
if (AudioSystem::getOutputSamplingRateForAttr(&afSamplingRate, &mAttributes) != NO_ERROR) {
return NO_INIT;
}
- // Resampler implementation limits input sampling rate to 2 x output sampling rate.
- if (rate == 0 || rate > afSamplingRate*2 ) {
+ if (rate == 0 || rate > afSamplingRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) {
return BAD_VALUE;
}
@@ -1002,7 +1002,7 @@
minBufCount = nBuffering;
}
- size_t minFrameCount = (afFrameCount*mSampleRate*minBufCount)/afSampleRate;
+ size_t minFrameCount = afFrameCount * minBufCount * uint64_t(mSampleRate) / afSampleRate;
ALOGV("minFrameCount: %zu, afFrameCount=%zu, minBufCount=%d, sampleRate=%u, afSampleRate=%u"
", afLatency=%d",
minFrameCount, afFrameCount, minBufCount, mSampleRate, afSampleRate, afLatency);
diff --git a/media/libmedia/IMediaCodecList.cpp b/media/libmedia/IMediaCodecList.cpp
new file mode 100644
index 0000000..bf7c5ca
--- /dev/null
+++ b/media/libmedia/IMediaCodecList.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/IMediaCodecList.h>
+#include <media/MediaCodecInfo.h>
+
+#include <utils/Errors.h> // for status_t
+
+namespace android {
+
+enum {
+ CREATE = IBinder::FIRST_CALL_TRANSACTION,
+ COUNT_CODECS,
+ GET_CODEC_INFO,
+ FIND_CODEC_BY_TYPE,
+ FIND_CODEC_BY_NAME,
+};
+
+class BpMediaCodecList: public BpInterface<IMediaCodecList>
+{
+public:
+ BpMediaCodecList(const sp<IBinder>& impl)
+ : BpInterface<IMediaCodecList>(impl)
+ {
+ }
+
+ virtual size_t countCodecs() const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ remote()->transact(COUNT_CODECS, data, &reply);
+ return static_cast<size_t>(reply.readInt32());
+ }
+
+ virtual sp<MediaCodecInfo> getCodecInfo(size_t index) const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ data.writeInt32(index);
+ remote()->transact(GET_CODEC_INFO, data, &reply);
+ status_t err = reply.readInt32();
+ if (err == OK) {
+ return MediaCodecInfo::FromParcel(reply);
+ } else {
+ return NULL;
+ }
+ }
+
+ virtual ssize_t findCodecByType(
+ const char *type, bool encoder, size_t startIndex = 0) const
+ {
+ if (startIndex > INT32_MAX) {
+ return NAME_NOT_FOUND;
+ }
+
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ data.writeCString(type);
+ data.writeInt32(encoder);
+ data.writeInt32(startIndex);
+ remote()->transact(FIND_CODEC_BY_TYPE, data, &reply);
+ return static_cast<ssize_t>(reply.readInt32());
+ }
+
+ virtual ssize_t findCodecByName(const char *name) const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ data.writeCString(name);
+ remote()->transact(FIND_CODEC_BY_NAME, data, &reply);
+ return static_cast<ssize_t>(reply.readInt32());
+ }
+};
+
+IMPLEMENT_META_INTERFACE(MediaCodecList, "android.media.IMediaCodecList");
+
+// ----------------------------------------------------------------------
+
+status_t BnMediaCodecList::onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+{
+ switch (code) {
+ case COUNT_CODECS:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ size_t count = countCodecs();
+ if (count > INT32_MAX) {
+ count = INT32_MAX;
+ }
+ reply->writeInt32(count);
+ return NO_ERROR;
+ }
+ break;
+
+ case GET_CODEC_INFO:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ size_t index = static_cast<size_t>(data.readInt32());
+ const sp<MediaCodecInfo> info = getCodecInfo(index);
+ if (info != NULL) {
+ reply->writeInt32(OK);
+ info->writeToParcel(reply);
+ } else {
+ reply->writeInt32(-ERANGE);
+ }
+ return NO_ERROR;
+ }
+ break;
+
+ case FIND_CODEC_BY_TYPE:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ const char *type = data.readCString();
+ bool isEncoder = static_cast<bool>(data.readInt32());
+ size_t startIndex = static_cast<size_t>(data.readInt32());
+ ssize_t index = findCodecByType(type, isEncoder, startIndex);
+ if (index > INT32_MAX || index < 0) {
+ index = NAME_NOT_FOUND;
+ }
+ reply->writeInt32(index);
+ return NO_ERROR;
+ }
+ break;
+
+ case FIND_CODEC_BY_NAME:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ const char *name = data.readCString();
+ ssize_t index = findCodecByName(name);
+ if (index > INT32_MAX || index < 0) {
+ index = NAME_NOT_FOUND;
+ }
+ reply->writeInt32(index);
+ return NO_ERROR;
+ }
+ break;
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index d116b14..2e02d17 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,7 @@
#include <media/ICrypto.h>
#include <media/IDrm.h>
#include <media/IHDCP.h>
+#include <media/IMediaCodecList.h>
#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
@@ -49,6 +50,7 @@
ADD_BATTERY_DATA,
PULL_BATTERY_DATA,
LISTEN_FOR_REMOTE_DISPLAY,
+ GET_CODEC_LIST,
};
class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -191,6 +193,13 @@
remote()->transact(LISTEN_FOR_REMOTE_DISPLAY, data, &reply);
return interface_cast<IRemoteDisplay>(reply.readStrongBinder());
}
+
+ virtual sp<IMediaCodecList> getCodecList() const {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+ remote()->transact(GET_CODEC_LIST, data, &reply);
+ return interface_cast<IMediaCodecList>(reply.readStrongBinder());
+ }
};
IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService");
@@ -318,6 +327,12 @@
reply->writeStrongBinder(display->asBinder());
return NO_ERROR;
} break;
+ case GET_CODEC_LIST: {
+ CHECK_INTERFACE(IMediaPlayerService, data, reply);
+ sp<IMediaCodecList> mcl = getCodecList();
+ reply->writeStrongBinder(mcl->asBinder());
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 5df232f..c583d32 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -54,6 +54,7 @@
GET_GRAPHIC_BUFFER_USAGE,
SET_INTERNAL_OPTION,
UPDATE_GRAPHIC_BUFFER_IN_META,
+ CONFIGURE_VIDEO_TUNNEL_MODE,
};
class BpOMX : public BpInterface<IOMX> {
@@ -368,6 +369,25 @@
return err;
}
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle ) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeInt32((int32_t)node);
+ data.writeInt32(portIndex);
+ data.writeInt32((int32_t)tunneled);
+ data.writeInt32(audioHwSync);
+ remote()->transact(CONFIGURE_VIDEO_TUNNEL_MODE, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (sidebandHandle) {
+ *sidebandHandle = (native_handle_t *)reply.readNativeHandle();
+ }
+ return err;
+ }
+
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -804,6 +824,24 @@
return NO_ERROR;
}
+ case CONFIGURE_VIDEO_TUNNEL_MODE:
+ {
+ CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+ node_id node = (node_id)data.readInt32();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL tunneled = (OMX_BOOL)data.readInt32();
+ OMX_U32 audio_hw_sync = data.readInt32();
+
+ native_handle_t *sideband_handle;
+ status_t err = configureVideoTunnelMode(
+ node, port_index, tunneled, audio_hw_sync, &sideband_handle);
+ reply->writeInt32(err);
+ reply->writeNativeHandle(sideband_handle);
+
+ return NO_ERROR;
+ }
+
case ALLOC_BUFFER:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
new file mode 100644
index 0000000..7900eae
--- /dev/null
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecInfo"
+#include <utils/Log.h>
+
+#include <media/IOMX.h>
+
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <binder/Parcel.h>
+
+#include <media/stagefright/OMXCodec.h>
+
+namespace android {
+
+void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
+ Vector<ProfileLevel> *profileLevels) const {
+ profileLevels->clear();
+ profileLevels->appendVector(mProfileLevels);
+}
+
+void MediaCodecInfo::Capabilities::getSupportedColorFormats(
+ Vector<uint32_t> *colorFormats) const {
+ colorFormats->clear();
+ colorFormats->appendVector(mColorFormats);
+}
+
+uint32_t MediaCodecInfo::Capabilities::getFlags() const {
+ return mFlags;
+}
+
+const sp<AMessage> &MediaCodecInfo::Capabilities::getDetails() const {
+ return mDetails;
+}
+
+MediaCodecInfo::Capabilities::Capabilities()
+ : mFlags(0) {
+ mDetails = new AMessage;
+}
+
+// static
+sp<MediaCodecInfo::Capabilities> MediaCodecInfo::Capabilities::FromParcel(
+ const Parcel &parcel) {
+ sp<MediaCodecInfo::Capabilities> caps = new Capabilities();
+ size_t size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ ProfileLevel profileLevel;
+ profileLevel.mProfile = static_cast<uint32_t>(parcel.readInt32());
+ profileLevel.mLevel = static_cast<uint32_t>(parcel.readInt32());
+ if (caps != NULL) {
+ caps->mProfileLevels.push_back(profileLevel);
+ }
+ }
+ size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ uint32_t color = static_cast<uint32_t>(parcel.readInt32());
+ if (caps != NULL) {
+ caps->mColorFormats.push_back(color);
+ }
+ }
+ uint32_t flags = static_cast<uint32_t>(parcel.readInt32());
+ sp<AMessage> details = AMessage::FromParcel(parcel);
+ if (caps != NULL) {
+ caps->mFlags = flags;
+ caps->mDetails = details;
+ }
+ return caps;
+}
+
+status_t MediaCodecInfo::Capabilities::writeToParcel(Parcel *parcel) const {
+ CHECK_LE(mProfileLevels.size(), INT32_MAX);
+ parcel->writeInt32(mProfileLevels.size());
+ for (size_t i = 0; i < mProfileLevels.size(); i++) {
+ parcel->writeInt32(mProfileLevels.itemAt(i).mProfile);
+ parcel->writeInt32(mProfileLevels.itemAt(i).mLevel);
+ }
+ CHECK_LE(mColorFormats.size(), INT32_MAX);
+ parcel->writeInt32(mColorFormats.size());
+ for (size_t i = 0; i < mColorFormats.size(); i++) {
+ parcel->writeInt32(mColorFormats.itemAt(i));
+ }
+ parcel->writeInt32(mFlags);
+ mDetails->writeToParcel(parcel);
+ return OK;
+}
+
+bool MediaCodecInfo::isEncoder() const {
+ return mIsEncoder;
+}
+
+bool MediaCodecInfo::hasQuirk(const char *name) const {
+ for (size_t ix = 0; ix < mQuirks.size(); ix++) {
+ if (mQuirks.itemAt(ix).equalsIgnoreCase(name)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void MediaCodecInfo::getSupportedMimes(Vector<AString> *mimes) const {
+ mimes->clear();
+ for (size_t ix = 0; ix < mCaps.size(); ix++) {
+ mimes->push_back(mCaps.keyAt(ix));
+ }
+}
+
+const sp<MediaCodecInfo::Capabilities> &
+MediaCodecInfo::getCapabilitiesFor(const char *mime) const {
+ ssize_t ix = getCapabilityIndex(mime);
+ if (ix >= 0) {
+ return mCaps.valueAt(ix);
+ }
+ return NULL;
+}
+
+const char *MediaCodecInfo::getCodecName() const {
+ return mName.c_str();
+}
+
+// static
+sp<MediaCodecInfo> MediaCodecInfo::FromParcel(const Parcel &parcel) {
+ AString name = AString::FromParcel(parcel);
+ bool isEncoder = static_cast<bool>(parcel.readInt32());
+ sp<MediaCodecInfo> info = new MediaCodecInfo(name, isEncoder, NULL);
+ size_t size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ AString quirk = AString::FromParcel(parcel);
+ if (info != NULL) {
+ info->mQuirks.push_back(quirk);
+ }
+ }
+ size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ AString mime = AString::FromParcel(parcel);
+ sp<Capabilities> caps = Capabilities::FromParcel(parcel);
+ if (info != NULL) {
+ info->mCaps.add(mime, caps);
+ }
+ }
+ return info;
+}
+
+status_t MediaCodecInfo::writeToParcel(Parcel *parcel) const {
+ mName.writeToParcel(parcel);
+ parcel->writeInt32(mIsEncoder);
+ parcel->writeInt32(mQuirks.size());
+ for (size_t i = 0; i < mQuirks.size(); i++) {
+ mQuirks.itemAt(i).writeToParcel(parcel);
+ }
+ parcel->writeInt32(mCaps.size());
+ for (size_t i = 0; i < mCaps.size(); i++) {
+ mCaps.keyAt(i).writeToParcel(parcel);
+ mCaps.valueAt(i)->writeToParcel(parcel);
+ }
+ return OK;
+}
+
+ssize_t MediaCodecInfo::getCapabilityIndex(const char *mime) const {
+ for (size_t ix = 0; ix < mCaps.size(); ix++) {
+ if (mCaps.keyAt(ix).equalsIgnoreCase(mime)) {
+ return ix;
+ }
+ }
+ return -1;
+}
+
+MediaCodecInfo::MediaCodecInfo(AString name, bool encoder, const char *mime)
+ : mName(name),
+ mIsEncoder(encoder),
+ mHasSoleMime(false) {
+ if (mime != NULL) {
+ addMime(mime);
+ mHasSoleMime = true;
+ }
+}
+
+status_t MediaCodecInfo::addMime(const char *mime) {
+ if (mHasSoleMime) {
+ ALOGE("Codec '%s' already had its type specified", mName.c_str());
+ return -EINVAL;
+ }
+ ssize_t ix = getCapabilityIndex(mime);
+ if (ix >= 0) {
+ mCurrentCaps = mCaps.valueAt(ix);
+ } else {
+ mCurrentCaps = new Capabilities();
+ mCaps.add(AString(mime), mCurrentCaps);
+ }
+ return OK;
+}
+
+status_t MediaCodecInfo::initializeCapabilities(const CodecCapabilities &caps) {
+ mCurrentCaps->mProfileLevels.clear();
+ mCurrentCaps->mColorFormats.clear();
+
+ for (size_t i = 0; i < caps.mProfileLevels.size(); ++i) {
+ const CodecProfileLevel &src = caps.mProfileLevels.itemAt(i);
+
+ ProfileLevel profileLevel;
+ profileLevel.mProfile = src.mProfile;
+ profileLevel.mLevel = src.mLevel;
+ mCurrentCaps->mProfileLevels.push_back(profileLevel);
+ }
+
+ for (size_t i = 0; i < caps.mColorFormats.size(); ++i) {
+ mCurrentCaps->mColorFormats.push_back(caps.mColorFormats.itemAt(i));
+ }
+
+ mCurrentCaps->mFlags = caps.mFlags;
+ mCurrentCaps->mDetails = new AMessage;
+
+ return OK;
+}
+
+void MediaCodecInfo::addQuirk(const char *name) {
+ if (!hasQuirk(name)) {
+ mQuirks.push(name);
+ }
+}
+
+void MediaCodecInfo::complete() {
+ mCurrentCaps = NULL;
+}
+
+void MediaCodecInfo::addDetail(const AString &key, const AString &value) {
+ mCurrentCaps->mDetails->setString(key.c_str(), value.c_str());
+}
+
+void MediaCodecInfo::addFeature(const AString &key, int32_t value) {
+ AString tag = "feature-";
+ tag.append(key);
+ mCurrentCaps->mDetails->setInt32(tag.c_str(), value);
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 735344c..a706987 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -54,6 +54,7 @@
#include <media/Metadata.h>
#include <media/AudioTrack.h>
#include <media/MemoryLeakTrackUtil.h>
+#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -345,6 +346,10 @@
return c;
}
+sp<IMediaCodecList> MediaPlayerService::getCodecList() const {
+ return MediaCodecList::getLocalInstance();
+}
+
sp<IOMX> MediaPlayerService::getOMX() {
Mutex::Autolock autoLock(mLock);
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 2eca6a0..406e3f6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -273,6 +273,7 @@
uint32_t *pSampleRate, int* pNumChannels,
audio_format_t* pFormat,
const sp<IMemoryHeap>& heap, size_t *pSize);
+ virtual sp<IMediaCodecList> getCodecList() const;
virtual sp<IOMX> getOMX();
virtual sp<ICrypto> makeCrypto();
virtual sp<IDrm> makeDrm();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 4889dbc..9a4e811 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -427,6 +427,31 @@
break;
}
+ case kWhatGetSelectedTrack:
+ {
+ status_t err = INVALID_OPERATION;
+ if (mSource != NULL) {
+ err = OK;
+
+ int32_t type32;
+ CHECK(msg->findInt32("type", (int32_t*)&type32));
+ media_track_type type = (media_track_type)type32;
+ ssize_t selectedTrack = mSource->getSelectedTrack(type);
+
+ Parcel* reply;
+ CHECK(msg->findPointer("reply", (void**)&reply));
+ reply->writeInt32(selectedTrack);
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+ break;
+ }
+
case kWhatSelectTrack:
{
uint32_t replyID;
@@ -1499,6 +1524,19 @@
return err;
}
+status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const {
+ sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id());
+ msg->setPointer("reply", reply);
+ msg->setInt32("type", type);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+ return err;
+}
+
status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
msg->setSize("trackIndex", trackIndex);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 486b7cc..fc456a4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -63,6 +63,7 @@
status_t setVideoScalingMode(int32_t mode);
status_t getTrackInfo(Parcel* reply) const;
+ status_t getSelectedTrack(int32_t type, Parcel* reply) const;
status_t selectTrack(size_t trackIndex, bool select);
protected:
@@ -109,6 +110,7 @@
kWhatPollDuration = 'polD',
kWhatSourceNotify = 'srcN',
kWhatGetTrackInfo = 'gTrI',
+ kWhatGetSelectedTrack = 'gSel',
kWhatSelectTrack = 'selT',
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 4748546..e33e647 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -422,6 +422,12 @@
return mPlayer->selectTrack(trackIndex, false /* select */);
}
+ case INVOKE_ID_GET_SELECTED_TRACK:
+ {
+ int32_t type = request.readInt32();
+ return mPlayer->getSelectedTrack(type, reply);
+ }
+
default:
{
return INVALID_OPERATION;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 06bbbec..74892b6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -22,6 +22,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MetaData.h>
+#include <media/mediaplayer.h>
#include <utils/Vector.h>
namespace android {
@@ -86,6 +87,10 @@
return NULL;
}
+ virtual ssize_t getSelectedTrack(media_track_type /* type */) const {
+ return INVALID_OPERATION;
+ }
+
virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */) {
return INVALID_OPERATION;
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index b81674d..ac80da2 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -51,6 +51,48 @@
namespace android {
+// OMX errors are directly mapped into status_t range if
+// there is no corresponding MediaError status code.
+// Use the statusFromOMXError(int32_t omxError) function.
+//
+// Currently this is a direct map.
+// See frameworks/native/include/media/openmax/OMX_Core.h
+//
+// Vendor OMX errors from 0x90000000 - 0x9000FFFF
+// Extension OMX errors from 0x8F000000 - 0x90000000
+// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current)
+//
+
+// returns true if err is a recognized OMX error code.
+// as OMX error is OMX_S32, this is an int32_t type
+static inline bool isOMXError(int32_t err) {
+ return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX);
+}
+
+// converts an OMX error to a status_t
+static inline status_t statusFromOMXError(int32_t omxError) {
+ switch (omxError) {
+ case OMX_ErrorInvalidComponentName:
+ case OMX_ErrorComponentNotFound:
+ return NAME_NOT_FOUND; // can trigger illegal argument error for provided names.
+ default:
+ return isOMXError(omxError) ? omxError : 0; // no translation required
+ }
+}
+
+// checks and converts status_t to a non-side-effect status_t
+static inline status_t makeNoSideEffectStatus(status_t err) {
+ switch (err) {
+ // the following errors have side effects and may come
+ // from other code modules. Remap for safety reasons.
+ case INVALID_OPERATION:
+ case DEAD_OBJECT:
+ return UNKNOWN_ERROR;
+ default:
+ return err;
+ }
+}
+
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -1182,77 +1224,99 @@
}
}
- // Always try to enable dynamic output buffers on native surface
sp<RefBase> obj;
int32_t haveNativeWindow = msg->findObject("native-window", &obj) &&
- obj != NULL;
+ obj != NULL;
mStoreMetaDataInOutputBuffers = false;
if (video && !encoder) {
inputFormat->setInt32("adaptive-playback", false);
}
if (!encoder && video && haveNativeWindow) {
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE);
- if (err != OK) {
- ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
- mComponentName.c_str(), err);
+ sp<NativeWindowWrapper> windowWrapper(
+ static_cast<NativeWindowWrapper *>(obj.get()));
+ sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
- // if adaptive playback has been requested, try JB fallback
- // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
- // LARGE MEMORY REQUIREMENT
+ int32_t tunneled;
+ if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
+ tunneled != 0) {
+ ALOGI("Configuring TUNNELED video playback.");
- // we will not do adaptive playback on software accessed
- // surfaces as they never had to respond to changes in the
- // crop window, and we don't trust that they will be able to.
- int usageBits = 0;
- bool canDoAdaptivePlayback;
-
- sp<NativeWindowWrapper> windowWrapper(
- static_cast<NativeWindowWrapper *>(obj.get()));
- sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
-
- if (nativeWindow->query(
- nativeWindow.get(),
- NATIVE_WINDOW_CONSUMER_USAGE_BITS,
- &usageBits) != OK) {
- canDoAdaptivePlayback = false;
- } else {
- canDoAdaptivePlayback =
- (usageBits &
- (GRALLOC_USAGE_SW_READ_MASK |
- GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+ int64_t audioHwSync = 0;
+ if (!msg->findInt64("audio-hw-sync", &audioHwSync)) {
+ ALOGW("No Audio HW Sync provided for video tunnel");
+ }
+ err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
+ if (err != OK) {
+ ALOGE("configureTunneledVideoPlayback(%" PRId64 ",%p) failed!",
+ audioHwSync, nativeWindow.get());
+ return err;
}
- int32_t maxWidth = 0, maxHeight = 0;
- if (canDoAdaptivePlayback &&
- msg->findInt32("max-width", &maxWidth) &&
- msg->findInt32("max-height", &maxHeight)) {
- ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
- mComponentName.c_str(), maxWidth, maxHeight);
-
- err = mOMX->prepareForAdaptivePlayback(
- mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
- ALOGW_IF(err != OK,
- "[%s] prepareForAdaptivePlayback failed w/ err %d",
+ inputFormat->setInt32("adaptive-playback", true);
+ } else {
+ // Always try to enable dynamic output buffers on native surface
+ err = mOMX->storeMetaDataInBuffers(
+ mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != OK) {
+ ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
mComponentName.c_str(), err);
- if (err == OK) {
- inputFormat->setInt32("max-width", maxWidth);
- inputFormat->setInt32("max-height", maxHeight);
- inputFormat->setInt32("adaptive-playback", true);
- }
- }
- // allow failure
- err = OK;
- } else {
- ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str());
- mStoreMetaDataInOutputBuffers = true;
- inputFormat->setInt32("adaptive-playback", true);
- }
+ // if adaptive playback has been requested, try JB fallback
+ // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
+ // LARGE MEMORY REQUIREMENT
- int32_t push;
- if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
- && push != 0) {
- mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ // we will not do adaptive playback on software accessed
+ // surfaces as they never had to respond to changes in the
+ // crop window, and we don't trust that they will be able to.
+ int usageBits = 0;
+ bool canDoAdaptivePlayback;
+
+ if (nativeWindow->query(
+ nativeWindow.get(),
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+ &usageBits) != OK) {
+ canDoAdaptivePlayback = false;
+ } else {
+ canDoAdaptivePlayback =
+ (usageBits &
+ (GRALLOC_USAGE_SW_READ_MASK |
+ GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+ }
+
+ int32_t maxWidth = 0, maxHeight = 0;
+ if (canDoAdaptivePlayback &&
+ msg->findInt32("max-width", &maxWidth) &&
+ msg->findInt32("max-height", &maxHeight)) {
+ ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
+ mComponentName.c_str(), maxWidth, maxHeight);
+
+ err = mOMX->prepareForAdaptivePlayback(
+ mNode, kPortIndexOutput, OMX_TRUE, maxWidth,
+ maxHeight);
+ ALOGW_IF(err != OK,
+ "[%s] prepareForAdaptivePlayback failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ if (err == OK) {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+ }
+ // allow failure
+ err = OK;
+ } else {
+ ALOGV("[%s] storeMetaDataInBuffers succeeded",
+ mComponentName.c_str());
+ mStoreMetaDataInOutputBuffers = true;
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+
+ int32_t push;
+ if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
+ && push != 0) {
+ mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ }
}
int32_t rotationDegrees;
@@ -1290,6 +1354,8 @@
} else {
int32_t isADTS, aacProfile;
int32_t sbrMode;
+ int32_t maxOutputChannelCount;
+ drcParams_t drc;
if (!msg->findInt32("is-adts", &isADTS)) {
isADTS = 0;
}
@@ -1300,9 +1366,33 @@
sbrMode = -1;
}
+ if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
+ maxOutputChannelCount = -1;
+ }
+ if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
+ // value is unknown
+ drc.encodedTargetLevel = -1;
+ }
+ if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
+ // value is unknown
+ drc.drcCut = -1;
+ }
+ if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
+ // value is unknown
+ drc.drcBoost = -1;
+ }
+ if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
+ // value is unknown
+ drc.heavyCompression = -1;
+ }
+ if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
+ // value is unknown
+ drc.targetRefLevel = -1;
+ }
+
err = setupAACCodec(
encoder, numChannels, sampleRate, bitRate, aacProfile,
- isADTS != 0, sbrMode);
+ isADTS != 0, sbrMode, maxOutputChannelCount, drc);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
@@ -1464,7 +1554,8 @@
status_t ACodec::setupAACCodec(
bool encoder, int32_t numChannels, int32_t sampleRate,
- int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode) {
+ int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode,
+ int32_t maxOutputChannelCount, const drcParams_t& drc) {
if (encoder && isADTS) {
return -EINVAL;
}
@@ -1587,8 +1678,23 @@
? OMX_AUDIO_AACStreamFormatMP4ADTS
: OMX_AUDIO_AACStreamFormatMP4FF;
- return mOMX->setParameter(
- mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation;
+ presentation.nMaxOutputChannels = maxOutputChannelCount;
+ presentation.nDrcCut = drc.drcCut;
+ presentation.nDrcBoost = drc.drcBoost;
+ presentation.nHeavyCompression = drc.heavyCompression;
+ presentation.nTargetReferenceLevel = drc.targetRefLevel;
+ presentation.nEncodedTargetLevel = drc.encodedTargetLevel;
+
+ status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ if (res == OK) {
+ // optional parameters, will not cause configuration failure
+ mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
+ &presentation, sizeof(presentation));
+ } else {
+ ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res);
+ }
+ return res;
}
status_t ACodec::setupAC3Codec(
@@ -1785,6 +1891,27 @@
mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
}
+status_t ACodec::configureTunneledVideoPlayback(
+ int64_t audioHwSync, const sp<ANativeWindow> &nativeWindow) {
+ native_handle_t* sidebandHandle;
+
+ status_t err = mOMX->configureVideoTunnelMode(
+ mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
+ if (err != OK) {
+ ALOGE("configureVideoTunnelMode failed! (err %d).", err);
+ return err;
+ }
+
+ err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle);
+ if (err != OK) {
+ ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
+ sidebandHandle, err);
+ return err;
+ }
+
+ return OK;
+}
+
status_t ACodec::setVideoPortFormatType(
OMX_U32 portIndex,
OMX_VIDEO_CODINGTYPE compressionFormat,
@@ -1806,6 +1933,17 @@
return err;
}
+ // substitute back flexible color format to codec supported format
+ OMX_U32 flexibleEquivalent;
+ if (compressionFormat == OMX_VIDEO_CodingUnused &&
+ isFlexibleColorFormat(
+ mOMX, mNode, format.eColorFormat, &flexibleEquivalent) &&
+ colorFormat == flexibleEquivalent) {
+ ALOGI("[%s] using color format %#x in place of %#x",
+ mComponentName.c_str(), format.eColorFormat, colorFormat);
+ colorFormat = format.eColorFormat;
+ }
+
// The following assertion is violated by TI's video decoder.
// CHECK_EQ(format.nIndex, index);
@@ -2782,7 +2920,7 @@
}
// static
-void ACodec::describeDefaultColorFormat(DescribeColorFormatParams ¶ms) {
+bool ACodec::describeDefaultColorFormat(DescribeColorFormatParams ¶ms) {
MediaImage &image = params.sMediaImage;
memset(&image, 0, sizeof(image));
@@ -2794,7 +2932,7 @@
if (params.nStride == 0 || params.nSliceHeight == 0) {
ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
fmt, fmt, params.nStride, params.nSliceHeight);
- return;
+ return false;
}
image.mWidth = params.nFrameWidth;
@@ -2806,7 +2944,7 @@
fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar) {
ALOGW("do not know color format 0x%x = %d", fmt, fmt);
- return;
+ return false;
}
// set-up YUV format
@@ -2856,6 +2994,67 @@
default:
TRESPASS();
}
+ return true;
+}
+
+// static
+bool ACodec::describeColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ DescribeColorFormatParams &describeParams)
+{
+ OMX_INDEXTYPE describeColorFormatIndex;
+ if (omx->getExtensionIndex(
+ node, "OMX.google.android.index.describeColorFormat",
+ &describeColorFormatIndex) != OK ||
+ omx->getParameter(
+ node, describeColorFormatIndex,
+ &describeParams, sizeof(describeParams)) != OK) {
+ return describeDefaultColorFormat(describeParams);
+ }
+ return describeParams.sMediaImage.mType !=
+ MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN;
+}
+
+// static
+bool ACodec::isFlexibleColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t colorFormat, OMX_U32 *flexibleEquivalent) {
+ DescribeColorFormatParams describeParams;
+ InitOMXParams(&describeParams);
+ describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
+ // reasonable dummy values
+ describeParams.nFrameWidth = 128;
+ describeParams.nFrameHeight = 128;
+ describeParams.nStride = 128;
+ describeParams.nSliceHeight = 128;
+
+ CHECK(flexibleEquivalent != NULL);
+
+ if (!describeColorFormat(omx, node, describeParams)) {
+ return false;
+ }
+
+ const MediaImage &img = describeParams.sMediaImage;
+ if (img.mType == MediaImage::MEDIA_IMAGE_TYPE_YUV) {
+ if (img.mNumPlanes != 3 ||
+ img.mPlane[img.Y].mHorizSubsampling != 1 ||
+ img.mPlane[img.Y].mVertSubsampling != 1) {
+ return false;
+ }
+
+ // YUV 420
+ if (img.mPlane[img.U].mHorizSubsampling == 2
+ && img.mPlane[img.U].mVertSubsampling == 2
+ && img.mPlane[img.V].mHorizSubsampling == 2
+ && img.mPlane[img.V].mVertSubsampling == 2) {
+ // possible flexible YUV420 format
+ if (img.mBitDepth <= 8) {
+ *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
+ return true;
+ }
+ }
+ }
+ return false;
}
status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {
@@ -2885,7 +3084,6 @@
notify->setInt32("slice-height", videoDef->nSliceHeight);
notify->setInt32("color-format", videoDef->eColorFormat);
-
DescribeColorFormatParams describeParams;
InitOMXParams(&describeParams);
describeParams.eColorFormat = videoDef->eColorFormat;
@@ -2894,17 +3092,7 @@
describeParams.nStride = videoDef->nStride;
describeParams.nSliceHeight = videoDef->nSliceHeight;
- OMX_INDEXTYPE describeColorFormatIndex;
- if (mOMX->getExtensionIndex(
- mNode, "OMX.google.android.index.describeColorFormat",
- &describeColorFormatIndex) ||
- mOMX->getParameter(
- mNode, describeColorFormatIndex,
- &describeParams, sizeof(describeParams))) {
- describeDefaultColorFormat(describeParams);
- }
-
- if (describeParams.sMediaImage.mType != MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN) {
+ if (describeColorFormat(mOMX, mNode, describeParams)) {
notify->setBuffer(
"image-data",
ABuffer::CreateAsCopy(
@@ -3226,8 +3414,18 @@
void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", CodecBase::kWhatError);
- notify->setInt32("omx-error", error);
+ ALOGE("signalError(omxError %#x, internalError %d)", error, internalError);
+
+ if (internalError == UNKNOWN_ERROR) { // find better error code
+ const status_t omxStatus = statusFromOMXError(error);
+ if (omxStatus != 0) {
+ internalError = omxStatus;
+ } else {
+ ALOGW("Invalid OMX error %#x", error);
+ }
+ }
notify->setInt32("err", internalError);
+ notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error.
notify->post();
}
@@ -3451,6 +3649,7 @@
case ACodec::kWhatCreateInputSurface:
case ACodec::kWhatSignalEndOfInputStream:
{
+ // This may result in an app illegal state exception.
ALOGE("Message 0x%x was not handled", msg->what());
mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION);
return true;
@@ -3458,6 +3657,7 @@
case ACodec::kWhatOMXDied:
{
+ // This will result in kFlagSawMediaServerDie handling in MediaCodec.
ALOGE("OMX/mediaserver died, signalling error!");
mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
break;
@@ -3556,7 +3756,13 @@
ALOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1);
- mCodec->signalError((OMX_ERRORTYPE)data1);
+ // verify OMX component sends back an error we expect.
+ OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1;
+ if (!isOMXError(omxError)) {
+ ALOGW("Invalid OMX error %#x", omxError);
+ omxError = OMX_ErrorUndefined;
+ }
+ mCodec->signalError(omxError);
return true;
}
@@ -3999,7 +4205,7 @@
info->mGraphicBuffer.get(), -1)) == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
info->mStatus = BufferInfo::OWNED_BY_US;
}
} else {
@@ -4371,7 +4577,7 @@
ALOGE("[%s] configureCodec returning error %d",
mCodec->mComponentName.c_str(), err);
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
return false;
}
@@ -4518,7 +4724,7 @@
"(error 0x%08x)",
err);
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
mCodec->changeState(mCodec->mLoadedState);
}
@@ -5046,7 +5252,7 @@
"port reconfiguration (error 0x%08x)",
err);
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
// This is technically not correct, but appears to be
// the only way to free the component instance.
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 7c02959..42691b9 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -113,24 +113,26 @@
}
// static
sp<MediaCodec> MediaCodec::CreateByType(
- const sp<ALooper> &looper, const char *mime, bool encoder) {
+ const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err) {
sp<MediaCodec> codec = new MediaCodec(looper);
- if (codec->init(mime, true /* nameIsType */, encoder) != OK) {
- return NULL;
- }
- return codec;
+ const status_t ret = codec->init(mime, true /* nameIsType */, encoder);
+ if (err != NULL) {
+ *err = ret;
+ }
+ return ret == OK ? codec : NULL; // NULL deallocates codec.
}
// static
sp<MediaCodec> MediaCodec::CreateByComponentName(
- const sp<ALooper> &looper, const char *name) {
+ const sp<ALooper> &looper, const char *name, status_t *err) {
sp<MediaCodec> codec = new MediaCodec(looper);
- if (codec->init(name, false /* nameIsType */, false /* encoder */) != OK) {
- return NULL;
- }
- return codec;
+ const status_t ret = codec->init(name, false /* nameIsType */, false /* encoder */);
+ if (err != NULL) {
+ *err = ret;
+ }
+ return ret == OK ? codec : NULL; // NULL deallocates codec.
}
MediaCodec::MediaCodec(const sp<ALooper> &looper)
@@ -139,6 +141,7 @@
mCodec(NULL),
mReplyID(0),
mFlags(0),
+ mStickyError(OK),
mSoftRenderer(NULL),
mBatteryStatNotified(false),
mIsVideo(false),
@@ -195,16 +198,16 @@
if (tmp.endsWith(".secure")) {
tmp.erase(tmp.size() - 7, 7);
}
- const MediaCodecList *mcl = MediaCodecList::getInstance();
+ const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
ssize_t codecIdx = mcl->findCodecByName(tmp.c_str());
if (codecIdx >= 0) {
- Vector<AString> types;
- if (mcl->getSupportedTypes(codecIdx, &types) == OK) {
- for (size_t i = 0; i < types.size(); i++) {
- if (types[i].startsWith("video/")) {
- needDedicatedLooper = true;
- break;
- }
+ const sp<MediaCodecInfo> info = mcl->getCodecInfo(codecIdx);
+ Vector<AString> mimes;
+ info->getSupportedMimes(&mimes);
+ for (size_t i = 0; i < mimes.size(); i++) {
+ if (mimes[i].startsWith("video/")) {
+ needDedicatedLooper = true;
+ break;
}
}
}
@@ -330,6 +333,7 @@
mLooper->unregisterHandler(id());
mFlags = 0; // clear all flags
+ mStickyError = OK;
// reset state not reset by setState(UNINITIALIZED)
mReplyID = 0;
@@ -620,10 +624,12 @@
bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
if (!isExecuting() || (mFlags & kFlagIsAsync)
- || (mFlags & kFlagStickyError)
|| (newRequest && (mFlags & kFlagDequeueInputPending))) {
PostReplyWithError(replyID, INVALID_OPERATION);
return true;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ return true;
}
ssize_t index = dequeuePortBuffer(kPortIndexInput);
@@ -644,9 +650,10 @@
sp<AMessage> response = new AMessage;
if (!isExecuting() || (mFlags & kFlagIsAsync)
- || (mFlags & kFlagStickyError)
|| (newRequest && (mFlags & kFlagDequeueOutputPending))) {
response->setInt32("err", INVALID_OPERATION);
+ } else if (mFlags & kFlagStickyError) {
+ response->setInt32("err", getStickyError());
} else if (mFlags & kFlagOutputBuffersChanged) {
response->setInt32("err", INFO_OUTPUT_BUFFERS_CHANGED);
mFlags &= ~kFlagOutputBuffersChanged;
@@ -705,16 +712,12 @@
switch (what) {
case CodecBase::kWhatError:
{
- int32_t omxError, internalError;
- CHECK(msg->findInt32("omx-error", &omxError));
- CHECK(msg->findInt32("err", &internalError));
+ int32_t err, actionCode;
+ CHECK(msg->findInt32("err", &err));
+ CHECK(msg->findInt32("actionCode", &actionCode));
- ALOGE("Codec reported an error. "
- "(omx error 0x%08x, internalError %d)",
- omxError, internalError);
-
- if (omxError == OMX_ErrorResourcesLost
- && internalError == DEAD_OBJECT) {
+ ALOGE("Codec reported err %#x, actionCode %d", err, actionCode);
+ if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
}
@@ -774,15 +777,24 @@
{
sendErrorReponse = false;
- mFlags |= kFlagStickyError;
+ setStickyError(err);
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
if (mFlags & kFlagIsAsync) {
- onError(omxError, 0);
+ onError(err, actionCode);
}
- setState(UNINITIALIZED);
+ switch (actionCode) {
+ case ACTION_CODE_TRANSIENT:
+ break;
+ case ACTION_CODE_RECOVERABLE:
+ setState(INITIALIZED);
+ break;
+ default:
+ setState(UNINITIALIZED);
+ break;
+ }
break;
}
@@ -790,19 +802,32 @@
{
sendErrorReponse = false;
- mFlags |= kFlagStickyError;
+ setStickyError(err);
postActivityNotificationIfPossible();
- if (mFlags & kFlagIsAsync) {
- onError(omxError, 0);
+ // actionCode in an uninitialized state is always fatal.
+ if (mState == UNINITIALIZED) {
+ actionCode = ACTION_CODE_FATAL;
}
- setState(UNINITIALIZED);
+ if (mFlags & kFlagIsAsync) {
+ onError(err, actionCode);
+ }
+ switch (actionCode) {
+ case ACTION_CODE_TRANSIENT:
+ break;
+ case ACTION_CODE_RECOVERABLE:
+ setState(INITIALIZED);
+ break;
+ default:
+ setState(UNINITIALIZED);
+ break;
+ }
break;
}
}
if (sendErrorReponse) {
- PostReplyWithError(mReplyID, UNKNOWN_ERROR);
+ PostReplyWithError(mReplyID, err);
}
break;
}
@@ -1009,7 +1034,7 @@
ALOGE("queueCSDInputBuffer failed w/ error %d",
err);
- mFlags |= kFlagStickyError;
+ setStickyError(err);
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
@@ -1401,9 +1426,12 @@
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting() || (mFlags & kFlagStickyError)) {
+ if (!isExecuting()) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
}
status_t err = onQueueInputBuffer(msg);
@@ -1472,9 +1500,12 @@
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting() || (mFlags & kFlagStickyError)) {
+ if (!isExecuting()) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
}
status_t err = onReleaseOutputBuffer(msg);
@@ -1488,9 +1519,12 @@
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting() || (mFlags & kFlagStickyError)) {
+ if (!isExecuting()) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
}
mReplyID = replyID;
@@ -1503,10 +1537,12 @@
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting() || (mFlags & kFlagIsAsync)
- || (mFlags & kFlagStickyError)) {
+ if (!isExecuting() || (mFlags & kFlagIsAsync)) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
}
int32_t portIndex;
@@ -1535,9 +1571,12 @@
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting() || (mFlags & kFlagStickyError)) {
+ if (!isExecuting()) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
}
mReplyID = replyID;
@@ -1561,10 +1600,12 @@
if ((mState != CONFIGURED && mState != STARTING &&
mState != STARTED && mState != FLUSHING &&
mState != FLUSHED)
- || (mFlags & kFlagStickyError)
|| format == NULL) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
}
sp<AMessage> response = new AMessage;
@@ -1687,6 +1728,7 @@
mFlags &= ~kFlagIsEncoder;
mFlags &= ~kFlagGatherCodecSpecificData;
mFlags &= ~kFlagIsAsync;
+ mStickyError = OK;
mActivityNotify.clear();
mCallback.clear();
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index d021533..7f8b7f5 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -18,13 +18,19 @@
#define LOG_TAG "MediaCodecList"
#include <utils/Log.h>
-#include <media/stagefright/MediaCodecList.h>
+#include <binder/IServiceManager.h>
+
+#include <media/IMediaCodecList.h>
+#include <media/IMediaPlayerService.h>
+#include <media/MediaCodecInfo.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
+
#include <utils/threads.h>
#include <libexpat/expat.h>
@@ -33,18 +39,47 @@
static Mutex sInitMutex;
-// static
-MediaCodecList *MediaCodecList::sCodecList;
+static MediaCodecList *gCodecList = NULL;
// static
-const MediaCodecList *MediaCodecList::getInstance() {
+sp<IMediaCodecList> MediaCodecList::sCodecList;
+
+// static
+sp<IMediaCodecList> MediaCodecList::getLocalInstance() {
Mutex::Autolock autoLock(sInitMutex);
- if (sCodecList == NULL) {
- sCodecList = new MediaCodecList;
+ if (gCodecList == NULL) {
+ gCodecList = new MediaCodecList;
+ if (gCodecList->initCheck() == OK) {
+ sCodecList = gCodecList;
+ }
}
- return sCodecList->initCheck() == OK ? sCodecList : NULL;
+ return sCodecList;
+}
+
+static Mutex sRemoteInitMutex;
+
+sp<IMediaCodecList> MediaCodecList::sRemoteList;
+
+// static
+sp<IMediaCodecList> MediaCodecList::getInstance() {
+ Mutex::Autolock _l(sRemoteInitMutex);
+ if (sRemoteList == NULL) {
+ sp<IBinder> binder =
+ defaultServiceManager()->getService(String16("media.player"));
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+ if (service.get() != NULL) {
+ sRemoteList = service->getCodecList();
+ }
+
+ if (sRemoteList == NULL) {
+ // if failed to get remote list, create local list
+ sRemoteList = getLocalInstance();
+ }
+ }
+ return sRemoteList;
}
MediaCodecList::MediaCodecList()
@@ -59,37 +94,69 @@
mHrefBase = AString(codecs_xml, href_base_end - codecs_xml + 1);
}
- mInitCheck = OK;
+ mInitCheck = OK; // keeping this here for safety
mCurrentSection = SECTION_TOPLEVEL;
mDepth = 0;
+ OMXClient client;
+ mInitCheck = client.connect();
+ if (mInitCheck != OK) {
+ return;
+ }
+ mOMX = client.interface();
parseXMLFile(codecs_xml);
+ mOMX.clear();
if (mInitCheck != OK) {
mCodecInfos.clear();
- mCodecQuirks.clear();
return;
}
for (size_t i = mCodecInfos.size(); i-- > 0;) {
- CodecInfo *info = &mCodecInfos.editItemAt(i);
+ const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();
- if (info->mTypes == 0) {
+ if (info.mCaps.size() == 0) {
// No types supported by this component???
ALOGW("Component %s does not support any type of media?",
- info->mName.c_str());
+ info.mName.c_str());
mCodecInfos.removeAt(i);
#if LOG_NDEBUG == 0
} else {
- for (size_t type_ix = 0; type_ix < mTypes.size(); ++type_ix) {
- uint32_t typeMask = 1ul << mTypes.valueAt(type_ix);
- if (info->mTypes & typeMask) {
- AString mime = mTypes.keyAt(type_ix);
- uint32_t bit = mTypes.valueAt(type_ix);
+ for (size_t type_ix = 0; type_ix < info.mCaps.size(); ++type_ix) {
+ AString mime = info.mCaps.keyAt(type_ix);
+ const sp<MediaCodecInfo::Capabilities> &caps = info.mCaps.valueAt(type_ix);
- ALOGV("%s codec info for %s: %s", info->mName.c_str(), mime.c_str(),
- info->mCaps.editValueFor(bit)->debugString().c_str());
+ ALOGV("%s codec info for %s: %s", info.mName.c_str(), mime.c_str(),
+ caps->getDetails()->debugString().c_str());
+ ALOGV(" flags=%d", caps->getFlags());
+ {
+ Vector<uint32_t> colorFormats;
+ caps->getSupportedColorFormats(&colorFormats);
+ AString nice;
+ for (size_t ix = 0; ix < colorFormats.size(); ix++) {
+ if (ix > 0) {
+ nice.append(", ");
+ }
+ nice.append(colorFormats.itemAt(ix));
+ }
+ ALOGV(" colors=[%s]", nice.c_str());
+ }
+ {
+ Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+ caps->getSupportedProfileLevels(&profileLevels);
+ AString nice;
+ for (size_t ix = 0; ix < profileLevels.size(); ix++) {
+ if (ix > 0) {
+ nice.append(", ");
+ }
+ const MediaCodecInfo::ProfileLevel &pl =
+ profileLevels.itemAt(ix);
+ nice.append(pl.mProfile);
+ nice.append("/");
+ nice.append(pl.mLevel);
+ }
+ ALOGV(" levels=[%s]", nice.c_str());
}
}
#endif
@@ -294,9 +361,8 @@
case SECTION_DECODER_TYPE:
case SECTION_ENCODER_TYPE:
{
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
// ignore limits and features specified outside of type
- bool outside = !inType && info->mSoleType == 0;
+ bool outside = !inType && !mCurrentInfo->mHasSoleMime;
if (outside && (!strcmp(name, "Limit") || !strcmp(name, "Feature"))) {
ALOGW("ignoring %s specified outside of a Type", name);
} else if (!strcmp(name, "Limit")) {
@@ -344,8 +410,7 @@
(mCurrentSection == SECTION_DECODER_TYPE
? SECTION_DECODER : SECTION_ENCODER);
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mCurrentCaps = NULL;
+ mCurrentInfo->complete();
}
break;
}
@@ -354,9 +419,8 @@
{
if (!strcmp(name, "MediaCodec")) {
mCurrentSection = SECTION_DECODERS;
-
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mCurrentCaps = NULL;
+ mCurrentInfo->complete();
+ mCurrentInfo = NULL;
}
break;
}
@@ -365,9 +429,8 @@
{
if (!strcmp(name, "MediaCodec")) {
mCurrentSection = SECTION_ENCODERS;
-
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mCurrentCaps = NULL;
+ mCurrentInfo->complete();;
+ mCurrentInfo = NULL;
}
break;
}
@@ -418,28 +481,27 @@
return -EINVAL;
}
- addMediaCodec(encoder, name, type);
-
- return OK;
+ mCurrentInfo = new MediaCodecInfo(name, encoder, type);
+ mCodecInfos.push_back(mCurrentInfo);
+ return initializeCapabilities(type);
}
-void MediaCodecList::addMediaCodec(
- bool encoder, const char *name, const char *type) {
- mCodecInfos.push();
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mName = name;
- info->mIsEncoder = encoder;
- info->mSoleType = 0;
- info->mTypes = 0;
- info->mQuirks = 0;
- info->mCurrentCaps = NULL;
+status_t MediaCodecList::initializeCapabilities(const char *type) {
+ ALOGV("initializeCapabilities %s:%s",
+ mCurrentInfo->mName.c_str(), type);
- if (type != NULL) {
- addType(type);
- // if type was specified in attributes, we do not allow
- // subsequent types
- info->mSoleType = info->mTypes;
+ CodecCapabilities caps;
+ status_t err = QueryCodec(
+ mOMX,
+ mCurrentInfo->mName.c_str(),
+ type,
+ mCurrentInfo->mIsEncoder,
+ &caps);
+ if (err != OK) {
+ return err;
}
+
+ return mCurrentInfo->initializeCapabilities(caps);
}
status_t MediaCodecList::addQuirk(const char **attrs) {
@@ -464,36 +526,13 @@
return -EINVAL;
}
- uint32_t bit;
- ssize_t index = mCodecQuirks.indexOfKey(name);
- if (index < 0) {
- bit = mCodecQuirks.size();
-
- if (bit == 32) {
- ALOGW("Too many distinct quirk names in configuration.");
- return OK;
- }
-
- mCodecQuirks.add(name, bit);
- } else {
- bit = mCodecQuirks.valueAt(index);
- }
-
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mQuirks |= 1ul << bit;
-
+ mCurrentInfo->addQuirk(name);
return OK;
}
status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {
const char *name = NULL;
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- if (info->mSoleType != 0) {
- ALOGE("Codec '%s' already had its type specified", info->mName.c_str());
- return -EINVAL;
- }
-
size_t i = 0;
while (attrs[i] != NULL) {
if (!strcmp(attrs[i], "name")) {
@@ -513,54 +552,47 @@
return -EINVAL;
}
- addType(name);
-
- return OK;
+ status_t ret = mCurrentInfo->addMime(name);
+ if (ret == OK) {
+ ret = initializeCapabilities(name);
+ }
+ return ret;
}
-void MediaCodecList::addType(const char *name) {
- uint32_t bit;
- ssize_t index = mTypes.indexOfKey(name);
- if (index < 0) {
- bit = mTypes.size();
-
- if (bit == 32) {
- ALOGW("Too many distinct type names in configuration.");
- return;
- }
-
- mTypes.add(name, bit);
- } else {
- bit = mTypes.valueAt(index);
- }
-
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mTypes |= 1ul << bit;
- if (info->mCaps.indexOfKey(bit) < 0) {
- AMessage *msg = new AMessage();
- info->mCaps.add(bit, msg);
- }
- info->mCurrentCaps = info->mCaps.editValueFor(bit);
-}
-
+// legacy method for non-advanced codecs
ssize_t MediaCodecList::findCodecByType(
const char *type, bool encoder, size_t startIndex) const {
- ssize_t typeIndex = mTypes.indexOfKey(type);
+ static const char *advancedFeatures[] = {
+ "feature-secure-playback",
+ "feature-tunneled-playback",
+ };
- if (typeIndex < 0) {
- return -ENOENT;
- }
+ size_t numCodecs = mCodecInfos.size();
+ for (; startIndex < numCodecs; ++startIndex) {
+ const MediaCodecInfo &info = *mCodecInfos.itemAt(startIndex).get();
- uint32_t typeMask = 1ul << mTypes.valueAt(typeIndex);
+ if (info.isEncoder() != encoder) {
+ continue;
+ }
+ sp<MediaCodecInfo::Capabilities> capabilities = info.getCapabilitiesFor(type);
+ if (capabilities == NULL) {
+ continue;
+ }
+ const sp<AMessage> &details = capabilities->getDetails();
- while (startIndex < mCodecInfos.size()) {
- const CodecInfo &info = mCodecInfos.itemAt(startIndex);
-
- if (info.mIsEncoder == encoder && (info.mTypes & typeMask)) {
- return startIndex;
+ int32_t required;
+ bool isAdvanced = false;
+ for (size_t ix = 0; ix < ARRAY_SIZE(advancedFeatures); ix++) {
+ if (details->findInt32(advancedFeatures[ix], &required) &&
+ required != 0) {
+ isAdvanced = true;
+ break;
+ }
}
- ++startIndex;
+ if (!isAdvanced) {
+ return startIndex;
+ }
}
return -ENOENT;
@@ -616,12 +648,11 @@
return -EINVAL;
}
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
-
// size, blocks, bitrate, frame-rate, blocks-per-second, aspect-ratio: range
// quality: range + default + [scale]
// complexity: range + default
bool found;
+
if (name == "aspect-ratio" || name == "bitrate" || name == "block-count"
|| name == "blocks-per-second" || name == "complexity"
|| name == "frame-rate" || name == "quality" || name == "size") {
@@ -672,16 +703,16 @@
name = in_;
}
if (name == "quality") {
- info->mCurrentCaps->setString("quality-scale", scale);
+ mCurrentInfo->addDetail("quality-scale", scale);
}
if (name == "quality" || name == "complexity") {
AString tag = name;
tag.append("-default");
- info->mCurrentCaps->setString(tag.c_str(), def);
+ mCurrentInfo->addDetail(tag, def);
}
AString tag = name;
tag.append("-range");
- info->mCurrentCaps->setString(tag.c_str(), range);
+ mCurrentInfo->addDetail(tag, range);
} else {
AString max, value, ranges;
if (msg->contains("default")) {
@@ -708,13 +739,13 @@
if (max.size()) {
AString tag = "max-";
tag.append(name);
- info->mCurrentCaps->setString(tag.c_str(), max);
+ mCurrentInfo->addDetail(tag, max);
} else if (value.size()) {
- info->mCurrentCaps->setString(name.c_str(), value);
+ mCurrentInfo->addDetail(name, value);
} else if (ranges.size()) {
AString tag = name;
tag.append("-ranges");
- info->mCurrentCaps->setString(tag.c_str(), ranges);
+ mCurrentInfo->addDetail(tag, ranges);
} else {
ALOGW("Ignoring unrecognized limit '%s'", name.c_str());
}
@@ -769,16 +800,13 @@
return -EINVAL;
}
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- AString tag = "feature-";
- tag.append(name);
- info->mCurrentCaps->setInt32(tag.c_str(), (required == 1) || (optional == 0));
+ mCurrentInfo->addFeature(name, (required == 1) || (optional == 0));
return OK;
}
ssize_t MediaCodecList::findCodecByName(const char *name) const {
for (size_t i = 0; i < mCodecInfos.size(); ++i) {
- const CodecInfo &info = mCodecInfos.itemAt(i);
+ const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();
if (info.mName == name) {
return i;
@@ -792,121 +820,4 @@
return mCodecInfos.size();
}
-const char *MediaCodecList::getCodecName(size_t index) const {
- if (index >= mCodecInfos.size()) {
- return NULL;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
- return info.mName.c_str();
-}
-
-bool MediaCodecList::isEncoder(size_t index) const {
- if (index >= mCodecInfos.size()) {
- return false;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
- return info.mIsEncoder;
-}
-
-bool MediaCodecList::codecHasQuirk(
- size_t index, const char *quirkName) const {
- if (index >= mCodecInfos.size()) {
- return false;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
-
- if (info.mQuirks != 0) {
- ssize_t index = mCodecQuirks.indexOfKey(quirkName);
- if (index >= 0 && info.mQuirks & (1ul << mCodecQuirks.valueAt(index))) {
- return true;
- }
- }
-
- return false;
-}
-
-status_t MediaCodecList::getSupportedTypes(
- size_t index, Vector<AString> *types) const {
- types->clear();
-
- if (index >= mCodecInfos.size()) {
- return -ERANGE;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
-
- for (size_t i = 0; i < mTypes.size(); ++i) {
- uint32_t typeMask = 1ul << mTypes.valueAt(i);
-
- if (info.mTypes & typeMask) {
- types->push(mTypes.keyAt(i));
- }
- }
-
- return OK;
-}
-
-status_t MediaCodecList::getCodecCapabilities(
- size_t index, const char *type,
- Vector<ProfileLevel> *profileLevels,
- Vector<uint32_t> *colorFormats,
- uint32_t *flags,
- sp<AMessage> *capabilities) const {
- profileLevels->clear();
- colorFormats->clear();
-
- if (index >= mCodecInfos.size()) {
- return -ERANGE;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
-
- ssize_t typeIndex = mTypes.indexOfKey(type);
- if (typeIndex < 0) {
- return -EINVAL;
- }
- // essentially doing valueFor without the CHECK abort
- typeIndex = mTypes.valueAt(typeIndex);
-
- OMXClient client;
- status_t err = client.connect();
- if (err != OK) {
- return err;
- }
-
- CodecCapabilities caps;
- err = QueryCodec(
- client.interface(),
- info.mName.c_str(), type, info.mIsEncoder, &caps);
-
- if (err != OK) {
- return err;
- }
-
- for (size_t i = 0; i < caps.mProfileLevels.size(); ++i) {
- const CodecProfileLevel &src = caps.mProfileLevels.itemAt(i);
-
- ProfileLevel profileLevel;
- profileLevel.mProfile = src.mProfile;
- profileLevel.mLevel = src.mLevel;
- profileLevels->push(profileLevel);
- }
-
- for (size_t i = 0; i < caps.mColorFormats.size(); ++i) {
- colorFormats->push(caps.mColorFormats.itemAt(i));
- }
-
- *flags = caps.mFlags;
-
- // TODO this check will be removed once JNI side is merged
- if (capabilities != NULL) {
- *capabilities = info.mCaps.valueFor(typeIndex);
- }
-
- return OK;
-}
-
} // namespace android
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index aca21cf..ca031aa 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -78,6 +78,10 @@
node_id node, OMX_U32 port_index, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
+
virtual status_t enableGraphicBuffers(
node_id node, OMX_U32 port_index, OMX_BOOL enable);
@@ -291,6 +295,13 @@
node, port_index, enable, maxFrameWidth, maxFrameHeight);
}
+status_t MuxOMX::configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
+ return getOMX(node)->configureVideoTunnelMode(
+ node, portIndex, enable, audioHwSync, sidebandHandle);
+}
+
status_t MuxOMX::enableGraphicBuffers(
node_id node, OMX_U32 port_index, OMX_BOOL enable) {
return getOMX(node)->enableGraphicBuffers(node, port_index, enable);
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 354712c..3d1d40e 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -35,6 +35,7 @@
#include <HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/IMediaPlayerService.h>
+#include <media/stagefright/ACodec.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
@@ -197,7 +198,7 @@
Vector<CodecNameAndQuirks> *matchingCodecs) {
matchingCodecs->clear();
- const MediaCodecList *list = MediaCodecList::getInstance();
+ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
if (list == NULL) {
return;
}
@@ -213,7 +214,9 @@
index = matchIndex + 1;
- const char *componentName = list->getCodecName(matchIndex);
+ const sp<MediaCodecInfo> info = list->getCodecInfo(matchIndex);
+ CHECK(info != NULL);
+ const char *componentName = info->getCodecName();
// If a specific codec is requested, skip the non-matching ones.
if (matchComponentName && strcmp(componentName, matchComponentName)) {
@@ -231,7 +234,7 @@
ssize_t index = matchingCodecs->add();
CodecNameAndQuirks *entry = &matchingCodecs->editItemAt(index);
entry->mName = String8(componentName);
- entry->mQuirks = getComponentQuirks(list, matchIndex);
+ entry->mQuirks = getComponentQuirks(info);
ALOGV("matching '%s' quirks 0x%08x",
entry->mName.string(), entry->mQuirks);
@@ -245,18 +248,15 @@
// static
uint32_t OMXCodec::getComponentQuirks(
- const MediaCodecList *list, size_t index) {
+ const sp<MediaCodecInfo> &info) {
uint32_t quirks = 0;
- if (list->codecHasQuirk(
- index, "requires-allocate-on-input-ports")) {
+ if (info->hasQuirk("requires-allocate-on-input-ports")) {
quirks |= kRequiresAllocateBufferOnInputPorts;
}
- if (list->codecHasQuirk(
- index, "requires-allocate-on-output-ports")) {
+ if (info->hasQuirk("requires-allocate-on-output-ports")) {
quirks |= kRequiresAllocateBufferOnOutputPorts;
}
- if (list->codecHasQuirk(
- index, "output-buffers-are-unreadable")) {
+ if (info->hasQuirk("output-buffers-are-unreadable")) {
quirks |= kOutputBuffersAreUnreadable;
}
@@ -265,8 +265,7 @@
// static
bool OMXCodec::findCodecQuirks(const char *componentName, uint32_t *quirks) {
- const MediaCodecList *list = MediaCodecList::getInstance();
-
+ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
if (list == NULL) {
return false;
}
@@ -277,7 +276,9 @@
return false;
}
- *quirks = getComponentQuirks(list, index);
+ const sp<MediaCodecInfo> info = list->getCodecInfo(index);
+ CHECK(info != NULL);
+ *quirks = getComponentQuirks(info);
return true;
}
@@ -1551,7 +1552,7 @@
status_t err = mOMX->freeNode(mNode);
CHECK_EQ(err, (status_t)OK);
- mNode = NULL;
+ mNode = 0;
setState(DEAD);
clearCodecSpecificData();
@@ -4746,6 +4747,8 @@
}
// Color format query
+ // return colors in the order reported by the OMX component
+ // prefix "flexible" standard ones with the flexible equivalent
OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
InitOMXParams(&portFormat);
portFormat.nPortIndex = !isEncoder ? 1 : 0;
@@ -4756,6 +4759,21 @@
if (err != OK) {
break;
}
+
+ OMX_U32 flexibleEquivalent;
+ if (ACodec::isFlexibleColorFormat(
+ omx, node, portFormat.eColorFormat, &flexibleEquivalent)) {
+ bool marked = false;
+ for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
+ if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
+ marked = true;
+ break;
+ }
+ }
+ if (!marked) {
+ caps->mColorFormats.push(flexibleEquivalent);
+ }
+ }
caps->mColorFormats.push(portFormat.eColorFormat);
}
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index 7124fd3..a4a651d 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -439,6 +439,10 @@
maxBytesToRead = maxBytesAvailable;
}
+ // read only integral amounts of audio unit frames.
+ const size_t inputUnitFrameSize = mNumChannels * mBitsPerSample / 8;
+ maxBytesToRead -= maxBytesToRead % inputUnitFrameSize;
+
if (mWaveFormat == WAVE_FORMAT_MSGSM) {
// Microsoft packs 2 frames into 65 bytes, rather than using separate 33-byte frames,
// so read multiples of 65, and use smaller buffers to account for ~10:1 expansion ratio
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index ab30865..09c6e69 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -19,6 +19,8 @@
#include <utils/Log.h>
#include "SoftAAC2.h"
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -119,6 +121,7 @@
}
status_t SoftAAC2::initDecoder() {
+ ALOGV("initDecoder()");
status_t status = UNKNOWN_ERROR;
mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
if (mAACDecoder != NULL) {
@@ -275,7 +278,7 @@
OMX_ERRORTYPE SoftAAC2::internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
+ switch ((int)index) {
case OMX_IndexParamStandardComponentRole:
{
const OMX_PARAM_COMPONENTROLETYPE *roleParams =
@@ -311,6 +314,67 @@
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioAndroidAacPresentation:
+ {
+ const OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE *aacPresParams =
+ (const OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE *)params;
+ // for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
+ // a value of -1 implies the parameter is not set by the application:
+ // nMaxOutputChannels uses default platform properties, see configureDownmix()
+ // nDrcCut uses default platform properties, see initDecoder()
+ // nDrcBoost idem
+ // nHeavyCompression idem
+ // nTargetReferenceLevel idem
+ // nEncodedTargetLevel idem
+ if (aacPresParams->nMaxOutputChannels >= 0) {
+ int max;
+ if (aacPresParams->nMaxOutputChannels >= 8) { max = 8; }
+ else if (aacPresParams->nMaxOutputChannels >= 6) { max = 6; }
+ else if (aacPresParams->nMaxOutputChannels >= 2) { max = 2; }
+ else {
+ // -1 or 0: disable downmix, 1: mono
+ max = aacPresParams->nMaxOutputChannels;
+ }
+ ALOGV("set nMaxOutputChannels=%d", max);
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, max);
+ }
+ bool updateDrcWrapper = false;
+ if (aacPresParams->nDrcBoost >= 0) {
+ ALOGV("set nDrcBoost=%d", aacPresParams->nDrcBoost);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR,
+ aacPresParams->nDrcBoost);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nDrcCut >= 0) {
+ ALOGV("set nDrcCut=%d", aacPresParams->nDrcCut);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, aacPresParams->nDrcCut);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nHeavyCompression >= 0) {
+ ALOGV("set nHeavyCompression=%d", aacPresParams->nHeavyCompression);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY,
+ aacPresParams->nHeavyCompression);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nTargetReferenceLevel >= 0) {
+ ALOGV("set nTargetReferenceLevel=%d", aacPresParams->nTargetReferenceLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET,
+ aacPresParams->nTargetReferenceLevel);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nEncodedTargetLevel >= 0) {
+ ALOGV("set nEncodedTargetLevel=%d", aacPresParams->nEncodedTargetLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET,
+ aacPresParams->nEncodedTargetLevel);
+ updateDrcWrapper = true;
+ }
+ if (updateDrcWrapper) {
+ mDrcWrap.update();
+ }
+
+ return OMX_ErrorNone;
+ }
+
case OMX_IndexParamAudioPcm:
{
const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
index 8451195..f23b5a0 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
@@ -29,6 +29,7 @@
.global Filt_6k_7k_asm
.extern voAWB_Copy
.extern fir_6k_7k
+ .hidden fir_6k_7k
Filt_6k_7k_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
index ac2dd13..deb7efc 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
@@ -32,6 +32,7 @@
.section .text
.global pred_lt4_asm
.extern inter4_2
+ .hidden inter4_2
pred_lt4_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
index fc42a03..8df0caa 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
@@ -28,6 +28,7 @@
.section .text
.global Filt_6k_7k_asm
.extern fir_6k_7k
+ .hidden fir_6k_7k
Filt_6k_7k_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
index 8d2aaf2..67be1ed 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
@@ -29,6 +29,7 @@
.section .text
.global pred_lt4_asm
.extern inter4_2
+ .hidden inter4_2
pred_lt4_asm:
diff --git a/media/libstagefright/codecs/hevcdec/Android.mk b/media/libstagefright/codecs/hevcdec/Android.mk
index 960602f..c0c694e 100644
--- a/media/libstagefright/codecs/hevcdec/Android.mk
+++ b/media/libstagefright/codecs/hevcdec/Android.mk
@@ -20,6 +20,10 @@
LOCAL_SHARED_LIBRARIES += libutils
LOCAL_SHARED_LIBRARIES += liblog
+# We need this because the current asm generates the following link error:
+# requires unsupported dynamic reloc R_ARM_REL32; recompile with -fPIC
+# Bug: 16853291
+LOCAL_LDFLAGS := -Wl,-Bsymbolic
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
index b74c849..1140ed7 100644
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
+++ b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
@@ -35,6 +35,7 @@
.text
.extern pqmfSynthWin
+.hidden pqmfSynthWin
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S
index 073dbba..bcc6b6b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S
@@ -10,6 +10,22 @@
.fpu neon
.text
+ .extern armVCM4P10_CAVLCCoeffTokenTables
+ .extern armVCM4P10_SuffixToLevel
+ .extern armVCM4P10_CAVLCTotalZeros2x2Tables
+ .extern armVCM4P10_CAVLCTotalZeroTables
+ .extern armVCM4P10_CAVLCRunBeforeTables
+ .extern armVCM4P10_ZigZag_2x2
+ .extern armVCM4P10_ZigZag_4x4
+
+ .hidden armVCM4P10_CAVLCCoeffTokenTables
+ .hidden armVCM4P10_SuffixToLevel
+ .hidden armVCM4P10_CAVLCTotalZeros2x2Tables
+ .hidden armVCM4P10_CAVLCTotalZeroTables
+ .hidden armVCM4P10_CAVLCRunBeforeTables
+ .hidden armVCM4P10_ZigZag_2x2
+ .hidden armVCM4P10_ZigZag_4x4
+
.global armVCM4P10_DecodeCoeffsToPair
.func armVCM4P10_DecodeCoeffsToPair
armVCM4P10_DecodeCoeffsToPair:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S
index 44eb428..5bc7875 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S
@@ -20,6 +20,14 @@
.global armVCM4P10_QPModuloTable
.global armVCM4P10_VMatrixU16
+ .hidden armVCM4P10_QPDivTable
+ .hidden armVCM4P10_VMatrixQPModTable
+ .hidden armVCM4P10_PosToVCol4x4
+ .hidden armVCM4P10_PosToVCol2x2
+ .hidden armVCM4P10_VMatrix
+ .hidden armVCM4P10_QPModuloTable
+ .hidden armVCM4P10_VMatrixU16
+
armVCM4P10_PosToVCol4x4:
.byte 0, 2, 0, 2
.byte 2, 1, 2, 1
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 67dfcd2..0c5527a 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -112,6 +112,17 @@
bufHeight,
halFormat));
+ // NOTE: native window uses extended right-bottom coordinate
+ android_native_rect_t crop;
+ crop.left = mCropLeft;
+ crop.top = mCropTop;
+ crop.right = mCropRight + 1;
+ crop.bottom = mCropBottom + 1;
+ ALOGV("setting crop: [%d, %d, %d, %d] for size [%zu, %zu]",
+ crop.left, crop.top, crop.right, crop.bottom, bufWidth, bufHeight);
+
+ CHECK_EQ(0, native_window_set_crop(mNativeWindow.get(), &crop));
+
uint32_t transform;
switch (rotationDegrees) {
case 0: transform = 0; break;
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index 894f65c..9835ca3 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -20,6 +20,7 @@
#include <stdlib.h>
#include <string.h>
+#include <binder/Parcel.h>
#include <utils/String8.h>
#include "ADebug.h"
#include "AString.h"
@@ -306,6 +307,14 @@
return strcmp(mData, other.mData);
}
+int AString::compareIgnoreCase(const AString &other) const {
+ return strcasecmp(mData, other.mData);
+}
+
+bool AString::equalsIgnoreCase(const AString &other) const {
+ return compareIgnoreCase(other) == 0;
+}
+
void AString::tolower() {
makeMutable();
@@ -342,6 +351,21 @@
return !strcasecmp(mData + mSize - suffixLen, suffix);
}
+// static
+AString AString::FromParcel(const Parcel &parcel) {
+ size_t size = static_cast<size_t>(parcel.readInt32());
+ return AString(static_cast<const char *>(parcel.readInplace(size)), size);
+}
+
+status_t AString::writeToParcel(Parcel *parcel) const {
+ CHECK_LE(mSize, static_cast<size_t>(INT32_MAX));
+ status_t err = parcel->writeInt32(mSize);
+ if (err == OK) {
+ err = parcel->write(mData, mSize);
+ }
+ return err;
+}
+
AString StringPrintf(const char *format, ...) {
va_list ap;
va_start(ap, format);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index cd51bbf..e8c4970 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -75,6 +75,10 @@
node_id node, OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 max_frame_width, OMX_U32 max_frame_height);
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
buffer_id *buffer);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 3967dc6..dc6d410 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -62,6 +62,10 @@
OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+ status_t configureVideoTunnelMode(
+ OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
+
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
OMX::buffer_id *buffer);
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
index d050fa6..7f200dd 100644
--- a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
@@ -27,8 +27,6 @@
#include <utils/threads.h>
#include <utils/Vector.h>
-#define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a)))
-
namespace android {
struct SoftVideoDecoderOMXComponent : public SimpleSoftOMXComponent {
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index cc4770a..41407e4 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -342,6 +342,13 @@
portIndex, enable, maxFrameWidth, maxFrameHeight);
}
+status_t OMX::configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
+ return findInstance(node)->configureVideoTunnelMode(
+ portIndex, tunneled, audioHwSync, sidebandHandle);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
buffer_id *buffer) {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index d6ab109..efb27f5 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -460,6 +460,49 @@
return err;
}
+status_t OMXNodeInstance::configureVideoTunnelMode(
+ OMX_U32 portIndex, OMX_BOOL tunneled, OMX_U32 audioHwSync,
+ native_handle_t **sidebandHandle) {
+ Mutex::Autolock autolock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.configureVideoTunnelMode");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ ALOGE("configureVideoTunnelMode extension is missing!");
+ return StatusFromOMXError(err);
+ }
+
+ ConfigureVideoTunnelModeParams tunnelParams;
+ tunnelParams.nSize = sizeof(tunnelParams);
+ tunnelParams.nVersion.s.nVersionMajor = 1;
+ tunnelParams.nVersion.s.nVersionMinor = 0;
+ tunnelParams.nVersion.s.nRevision = 0;
+ tunnelParams.nVersion.s.nStep = 0;
+
+ tunnelParams.nPortIndex = portIndex;
+ tunnelParams.bTunneled = tunneled;
+ tunnelParams.nAudioHwSync = audioHwSync;
+ err = OMX_SetParameter(mHandle, index, &tunnelParams);
+ if (err != OMX_ErrorNone) {
+ ALOGE("configureVideoTunnelMode failed! (err %d).", err);
+ return UNKNOWN_ERROR;
+ }
+
+ err = OMX_GetParameter(mHandle, index, &tunnelParams);
+ if (err != OMX_ErrorNone) {
+ ALOGE("GetVideoTunnelWindow failed! (err %d).", err);
+ return UNKNOWN_ERROR;
+ }
+ if (sidebandHandle) {
+ *sidebandHandle = (native_handle_t*)tunnelParams.pSidebandWindow;
+ }
+
+ return err;
+}
+
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
OMX::buffer_id *buffer) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 4e9d49b..753314f 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -343,7 +343,8 @@
uint32_t channelCount = FCC_2; // stereo is default
if (kEnableExtendedChannels) {
channelCount = audio_channel_count_from_out_mask(channelMask);
- if (channelCount > AudioMixer::MAX_NUM_CHANNELS) {
+ if (channelCount < FCC_2 // mono is not supported at this time
+ || channelCount > AudioMixer::MAX_NUM_CHANNELS) {
return false;
}
}
diff --git a/services/audioflinger/AudioResamplerDyn.cpp b/services/audioflinger/AudioResamplerDyn.cpp
index 159ab70..0eeb201 100644
--- a/services/audioflinger/AudioResamplerDyn.cpp
+++ b/services/audioflinger/AudioResamplerDyn.cpp
@@ -393,7 +393,7 @@
mPhaseFraction = static_cast<unsigned long long>(mPhaseFraction)
* phaseWrapLimit / oldPhaseWrapLimit;
mPhaseFraction %= phaseWrapLimit; // should not do anything, but just in case.
- mPhaseIncrement = static_cast<uint32_t>(static_cast<double>(phaseWrapLimit)
+ mPhaseIncrement = static_cast<uint32_t>(static_cast<uint64_t>(phaseWrapLimit)
* inSampleRate / mSampleRate);
// determine which resampler to use
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 49422a9..2d0a25f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -151,12 +151,15 @@
if (handle == NULL || patch == NULL) {
return BAD_VALUE;
}
- // limit number of sources to 1 for now or 2 sources for special cross hw module case.
- // only the audio policy manager can request a patch creation with 2 sources.
- if (patch->num_sources == 0 || patch->num_sources > 2 ||
+ if (patch->num_sources == 0 || patch->num_sources > AUDIO_PATCH_PORTS_MAX ||
patch->num_sinks == 0 || patch->num_sinks > AUDIO_PATCH_PORTS_MAX) {
return BAD_VALUE;
}
+ // limit number of sources to 1 for now or 2 sources for special cross hw module case.
+ // only the audio policy manager can request a patch creation with 2 sources.
+ if (patch->num_sources > 2) {
+ return INVALID_OPERATION;
+ }
if (*handle != AUDIO_PATCH_HANDLE_NONE) {
for (size_t index = 0; *handle != 0 && index < mPatches.size(); index++) {
@@ -173,20 +176,22 @@
switch (patch->sources[0].type) {
case AUDIO_PORT_TYPE_DEVICE: {
- // limit number of sinks to 1 for now
- if (patch->num_sinks > 1) {
- status = BAD_VALUE;
- goto exit;
- }
- audio_module_handle_t src_module = patch->sources[0].ext.device.hw_module;
- ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(src_module);
+ audio_module_handle_t srcModule = patch->sources[0].ext.device.hw_module;
+ ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(srcModule);
if (index < 0) {
- ALOGW("createAudioPatch() bad src hw module %d", src_module);
+ ALOGW("createAudioPatch() bad src hw module %d", srcModule);
status = BAD_VALUE;
goto exit;
}
AudioHwDevice *audioHwDevice = audioflinger->mAudioHwDevs.valueAt(index);
for (unsigned int i = 0; i < patch->num_sinks; i++) {
+ // support only one sink if connection to a mix or across HW modules
+ if ((patch->sinks[i].type == AUDIO_PORT_TYPE_MIX ||
+ patch->sinks[i].ext.mix.hw_module != srcModule) &&
+ patch->num_sinks > 1) {
+ status = INVALID_OPERATION;
+ goto exit;
+ }
// reject connection to different sink types
if (patch->sinks[i].type != patch->sinks[0].type) {
ALOGW("createAudioPatch() different sink types in same patch not supported");
@@ -194,7 +199,7 @@
goto exit;
}
// limit to connections between devices and input streams for HAL before 3.0
- if (patch->sinks[i].ext.mix.hw_module == src_module &&
+ if (patch->sinks[i].ext.mix.hw_module == srcModule &&
(audioHwDevice->version() < AUDIO_DEVICE_API_VERSION_3_0) &&
(patch->sinks[i].type != AUDIO_PORT_TYPE_MIX)) {
ALOGW("createAudioPatch() invalid sink type %d for device source",
@@ -204,7 +209,7 @@
}
}
- if (patch->sinks[0].ext.device.hw_module != src_module) {
+ if (patch->sinks[0].ext.device.hw_module != srcModule) {
// limit to device to device connection if not on same hw module
if (patch->sinks[0].type != AUDIO_PORT_TYPE_DEVICE) {
ALOGW("createAudioPatch() invalid sink type for cross hw module");
@@ -258,7 +263,7 @@
config.channel_mask = inChannelMask;
config.format = newPatch->mPlaybackThread->format();
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
- newPatch->mRecordThread = audioflinger->openInput_l(src_module,
+ newPatch->mRecordThread = audioflinger->openInput_l(srcModule,
&input,
&config,
device,
@@ -279,10 +284,10 @@
if (audioHwDevice->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
if (patch->sinks[0].type == AUDIO_PORT_TYPE_MIX) {
sp<ThreadBase> thread = audioflinger->checkRecordThread_l(
- patch->sinks[0].ext.mix.handle);
+ patch->sinks[0].ext.mix.handle);
if (thread == 0) {
ALOGW("createAudioPatch() bad capture I/O handle %d",
- patch->sinks[0].ext.mix.handle);
+ patch->sinks[0].ext.mix.handle);
status = BAD_VALUE;
goto exit;
}
@@ -298,10 +303,10 @@
}
} else {
sp<ThreadBase> thread = audioflinger->checkRecordThread_l(
- patch->sinks[0].ext.mix.handle);
+ patch->sinks[0].ext.mix.handle);
if (thread == 0) {
ALOGW("createAudioPatch() bad capture I/O handle %d",
- patch->sinks[0].ext.mix.handle);
+ patch->sinks[0].ext.mix.handle);
status = BAD_VALUE;
goto exit;
}
@@ -326,10 +331,10 @@
}
} break;
case AUDIO_PORT_TYPE_MIX: {
- audio_module_handle_t src_module = patch->sources[0].ext.mix.hw_module;
- ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(src_module);
+ audio_module_handle_t srcModule = patch->sources[0].ext.mix.hw_module;
+ ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(srcModule);
if (index < 0) {
- ALOGW("createAudioPatch() bad src hw module %d", src_module);
+ ALOGW("createAudioPatch() bad src hw module %d", srcModule);
status = BAD_VALUE;
goto exit;
}
@@ -342,7 +347,7 @@
goto exit;
}
// limit to connections between sinks and sources on same HW module
- if (patch->sinks[i].ext.device.hw_module != src_module) {
+ if (patch->sinks[i].ext.device.hw_module != srcModule) {
status = BAD_VALUE;
goto exit;
}
@@ -365,6 +370,7 @@
}
char *address;
if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
+ //FIXME: we only support address on first sink with HAL version < 3.0
address = audio_device_address_to_parameter(
patch->sinks[0].ext.device.type,
patch->sinks[0].ext.device.address);
@@ -562,16 +568,16 @@
switch (patch->sources[0].type) {
case AUDIO_PORT_TYPE_DEVICE: {
- audio_module_handle_t src_module = patch->sources[0].ext.device.hw_module;
- ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(src_module);
+ audio_module_handle_t srcModule = patch->sources[0].ext.device.hw_module;
+ ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(srcModule);
if (index < 0) {
- ALOGW("releaseAudioPatch() bad src hw module %d", src_module);
+ ALOGW("releaseAudioPatch() bad src hw module %d", srcModule);
status = BAD_VALUE;
break;
}
if (patch->sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
- patch->sinks[0].ext.device.hw_module != src_module) {
+ patch->sinks[0].ext.device.hw_module != srcModule) {
clearPatchConnections(removedPatch);
break;
}
@@ -609,10 +615,10 @@
}
} break;
case AUDIO_PORT_TYPE_MIX: {
- audio_module_handle_t src_module = patch->sources[0].ext.mix.hw_module;
- ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(src_module);
+ audio_module_handle_t srcModule = patch->sources[0].ext.mix.hw_module;
+ ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(srcModule);
if (index < 0) {
- ALOGW("releaseAudioPatch() bad src hw module %d", src_module);
+ ALOGW("releaseAudioPatch() bad src hw module %d", srcModule);
status = BAD_VALUE;
break;
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2f65370..30cebf4 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -26,6 +26,7 @@
#include <sys/stat.h>
#include <cutils/properties.h>
#include <media/AudioParameter.h>
+#include <media/AudioResamplerPublic.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -910,11 +911,11 @@
goto Exit;
}
- // Reject any effect on multichannel sinks.
+ // Reject any effect on mixer or duplicating multichannel sinks.
// TODO: fix both format and multichannel issues with effects.
- if (mChannelCount != FCC_2) {
- ALOGW("createEffect_l() Cannot add effect %s for multichannel(%d) thread",
- desc->name, mChannelCount);
+ if ((mType == MIXER || mType == DUPLICATING) && mChannelCount != FCC_2) {
+ ALOGW("createEffect_l() Cannot add effect %s for multichannel(%d) %s threads",
+ desc->name, mChannelCount, mType == MIXER ? "MIXER" : "DUPLICATING");
lStatus = BAD_VALUE;
goto Exit;
}
@@ -1479,8 +1480,7 @@
lStatus = BAD_VALUE;
goto Exit;
}
- // Resampler implementation limits input sampling rate to 2 x output sampling rate.
- if (sampleRate > mSampleRate*2) {
+ if (sampleRate > mSampleRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) {
ALOGE("Sample rate out of range: %u mSampleRate %u", sampleRate, mSampleRate);
lStatus = BAD_VALUE;
goto Exit;
@@ -3500,7 +3500,7 @@
AudioMixer::TRACK,
AudioMixer::MIXER_CHANNEL_MASK, (void *)(uintptr_t)mChannelMask);
// limit track sample rate to 2 x output sample rate, which changes at re-configuration
- uint32_t maxSampleRate = mSampleRate * 2;
+ uint32_t maxSampleRate = mSampleRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX;
uint32_t reqSampleRate = track->mAudioTrackServerProxy->getSampleRate();
if (reqSampleRate == 0) {
reqSampleRate = mSampleRate;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 48093da..c5ab832 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1885,10 +1885,10 @@
buf.mFrameCount = buffer->frameCount;
status_t status = mPeerProxy->obtainBuffer(&buf, &mPeerTimeout);
ALOGV_IF(status != NO_ERROR, "PatchTrack() %p getNextBuffer status %d", this, status);
+ buffer->frameCount = buf.mFrameCount;
if (buf.mFrameCount == 0) {
return WOULD_BLOCK;
}
- buffer->frameCount = buf.mFrameCount;
status = Track::getNextBuffer(buffer, pts);
return status;
}
@@ -2166,10 +2166,10 @@
status_t status = mPeerProxy->obtainBuffer(&buf, &mPeerTimeout);
ALOGV_IF(status != NO_ERROR,
"PatchRecord() %p mPeerProxy->obtainBuffer status %d", this, status);
+ buffer->frameCount = buf.mFrameCount;
if (buf.mFrameCount == 0) {
return WOULD_BLOCK;
}
- buffer->frameCount = buf.mFrameCount;
status = RecordTrack::getNextBuffer(buffer, pts);
return status;
}
diff --git a/services/audiopolicy/AudioPolicyManager.cpp b/services/audiopolicy/AudioPolicyManager.cpp
index c00541c..9e59488 100644
--- a/services/audiopolicy/AudioPolicyManager.cpp
+++ b/services/audiopolicy/AudioPolicyManager.cpp
@@ -95,8 +95,8 @@
STRING_TO_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
STRING_TO_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
STRING_TO_ENUM(AUDIO_DEVICE_IN_HDMI),
- STRING_TO_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
STRING_TO_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
+ STRING_TO_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
STRING_TO_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
STRING_TO_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
STRING_TO_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
@@ -254,7 +254,7 @@
return NO_MEMORY;
}
- if (checkOutputsForDevice(device, state, outputs, address) != NO_ERROR) {
+ if (checkOutputsForDevice(devDesc, state, outputs, address) != NO_ERROR) {
mAvailableOutputDevices.remove(devDesc);
return INVALID_OPERATION;
}
@@ -275,7 +275,7 @@
// remove device from available output devices
mAvailableOutputDevices.remove(devDesc);
- checkOutputsForDevice(device, state, outputs, address);
+ checkOutputsForDevice(devDesc, state, outputs, address);
} break;
default:
@@ -304,17 +304,24 @@
}
updateDevicesAndOutputs();
+ if (mPhoneState == AUDIO_MODE_IN_CALL) {
+ audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
+ updateCallRouting(newDevice);
+ }
for (size_t i = 0; i < mOutputs.size(); i++) {
- // do not force device change on duplicated output because if device is 0, it will
- // also force a device 0 for the two outputs it is duplicated to which may override
- // a valid device selection on those outputs.
- bool force = !mOutputs.valueAt(i)->isDuplicated()
- && (!deviceDistinguishesOnAddress(device)
- // always force when disconnecting (a non-duplicated device)
- || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
- setOutputDevice(mOutputs.keyAt(i),
- getNewOutputDevice(mOutputs.keyAt(i), true /*fromCache*/),
- force, 0);
+ audio_io_handle_t output = mOutputs.keyAt(i);
+ if ((mPhoneState != AUDIO_MODE_IN_CALL) || (output != mPrimaryOutput)) {
+ audio_devices_t newDevice = getNewOutputDevice(mOutputs.keyAt(i),
+ true /*fromCache*/);
+ // do not force device change on duplicated output because if device is 0, it will
+ // also force a device 0 for the two outputs it is duplicated to which may override
+ // a valid device selection on those outputs.
+ bool force = !mOutputs.valueAt(i)->isDuplicated()
+ && (!deviceDistinguishesOnAddress(device)
+ // always force when disconnecting (a non-duplicated device)
+ || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+ setOutputDevice(output, newDevice, force, 0);
+ }
}
mpClientInterface->onAudioPortListUpdate();
@@ -372,6 +379,11 @@
closeAllInputs();
+ if (mPhoneState == AUDIO_MODE_IN_CALL) {
+ audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
+ updateCallRouting(newDevice);
+ }
+
mpClientInterface->onAudioPortListUpdate();
return NO_ERROR;
} // end if is input device
@@ -406,10 +418,124 @@
}
}
+void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs)
+{
+ bool createTxPatch = false;
+ struct audio_patch patch;
+ patch.num_sources = 1;
+ patch.num_sinks = 1;
+ status_t status;
+ audio_patch_handle_t afPatchHandle;
+ DeviceVector deviceList;
+
+ audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ ALOGV("updateCallRouting device rxDevice %08x txDevice %08x", rxDevice, txDevice);
+
+ // release existing RX patch if any
+ if (mCallRxPatch != 0) {
+ mpClientInterface->releaseAudioPatch(mCallRxPatch->mAfPatchHandle, 0);
+ mCallRxPatch.clear();
+ }
+ // release TX patch if any
+ if (mCallTxPatch != 0) {
+ mpClientInterface->releaseAudioPatch(mCallTxPatch->mAfPatchHandle, 0);
+ mCallTxPatch.clear();
+ }
+
+ // If the RX device is on the primary HW module, then use legacy routing method for voice calls
+ // via setOutputDevice() on primary output.
+ // Otherwise, create two audio patches for TX and RX path.
+ if (availablePrimaryOutputDevices() & rxDevice) {
+ setOutputDevice(mPrimaryOutput, rxDevice, true, delayMs);
+ // If the TX device is also on the primary HW module, setOutputDevice() will take care
+ // of it due to legacy implementation. If not, create a patch.
+ if ((availablePrimaryInputDevices() & txDevice & ~AUDIO_DEVICE_BIT_IN)
+ == AUDIO_DEVICE_NONE) {
+ createTxPatch = true;
+ }
+ } else {
+ // create RX path audio patch
+ deviceList = mAvailableOutputDevices.getDevicesFromType(rxDevice);
+ ALOG_ASSERT(!deviceList.isEmpty(),
+ "updateCallRouting() selected device not in output device list");
+ sp<DeviceDescriptor> rxSinkDeviceDesc = deviceList.itemAt(0);
+ deviceList = mAvailableInputDevices.getDevicesFromType(AUDIO_DEVICE_IN_TELEPHONY_RX);
+ ALOG_ASSERT(!deviceList.isEmpty(),
+ "updateCallRouting() no telephony RX device");
+ sp<DeviceDescriptor> rxSourceDeviceDesc = deviceList.itemAt(0);
+
+ rxSourceDeviceDesc->toAudioPortConfig(&patch.sources[0]);
+ rxSinkDeviceDesc->toAudioPortConfig(&patch.sinks[0]);
+
+ // request to reuse existing output stream if one is already opened to reach the RX device
+ SortedVector<audio_io_handle_t> outputs =
+ getOutputsForDevice(rxDevice, mOutputs);
+ audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE);
+ if (output != AUDIO_IO_HANDLE_NONE) {
+ sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
+ ALOG_ASSERT(!outputDesc->isDuplicated(),
+ "updateCallRouting() RX device output is duplicated");
+ outputDesc->toAudioPortConfig(&patch.sources[1]);
+ patch.num_sources = 2;
+ }
+
+ afPatchHandle = AUDIO_PATCH_HANDLE_NONE;
+ status = mpClientInterface->createAudioPatch(&patch, &afPatchHandle, 0);
+ ALOGW_IF(status != NO_ERROR, "updateCallRouting() error %d creating RX audio patch",
+ status);
+ if (status == NO_ERROR) {
+ mCallRxPatch = new AudioPatch((audio_patch_handle_t)nextUniqueId(),
+ &patch, mUidCached);
+ mCallRxPatch->mAfPatchHandle = afPatchHandle;
+ mCallRxPatch->mUid = mUidCached;
+ }
+ createTxPatch = true;
+ }
+ if (createTxPatch) {
+
+ struct audio_patch patch;
+ patch.num_sources = 1;
+ patch.num_sinks = 1;
+ deviceList = mAvailableInputDevices.getDevicesFromType(txDevice);
+ ALOG_ASSERT(!deviceList.isEmpty(),
+ "updateCallRouting() selected device not in input device list");
+ sp<DeviceDescriptor> txSourceDeviceDesc = deviceList.itemAt(0);
+ txSourceDeviceDesc->toAudioPortConfig(&patch.sources[0]);
+ deviceList = mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_TELEPHONY_TX);
+ ALOG_ASSERT(!deviceList.isEmpty(),
+ "updateCallRouting() no telephony TX device");
+ sp<DeviceDescriptor> txSinkDeviceDesc = deviceList.itemAt(0);
+ txSinkDeviceDesc->toAudioPortConfig(&patch.sinks[0]);
+
+ SortedVector<audio_io_handle_t> outputs =
+ getOutputsForDevice(AUDIO_DEVICE_OUT_TELEPHONY_TX, mOutputs);
+ audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE);
+ // request to reuse existing output stream if one is already opened to reach the TX
+ // path output device
+ if (output != AUDIO_IO_HANDLE_NONE) {
+ sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
+ ALOG_ASSERT(!outputDesc->isDuplicated(),
+ "updateCallRouting() RX device output is duplicated");
+ outputDesc->toAudioPortConfig(&patch.sources[1]);
+ patch.num_sources = 2;
+ }
+
+ afPatchHandle = AUDIO_PATCH_HANDLE_NONE;
+ status = mpClientInterface->createAudioPatch(&patch, &afPatchHandle, 0);
+ ALOGW_IF(status != NO_ERROR, "setPhoneState() error %d creating TX audio patch",
+ status);
+ if (status == NO_ERROR) {
+ mCallTxPatch = new AudioPatch((audio_patch_handle_t)nextUniqueId(),
+ &patch, mUidCached);
+ mCallTxPatch->mAfPatchHandle = afPatchHandle;
+ mCallTxPatch->mUid = mUidCached;
+ }
+ }
+}
+
void AudioPolicyManager::setPhoneState(audio_mode_t state)
{
ALOGV("setPhoneState() state %d", state);
- audio_devices_t newDevice = AUDIO_DEVICE_NONE;
if (state < 0 || state >= AUDIO_MODE_CNT) {
ALOGW("setPhoneState() invalid state %d", state);
return;
@@ -461,19 +587,12 @@
}
// check for device and output changes triggered by new phone state
- newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
checkA2dpSuspend();
checkOutputForAllStrategies();
updateDevicesAndOutputs();
sp<AudioOutputDescriptor> hwOutputDesc = mOutputs.valueFor(mPrimaryOutput);
- // force routing command to audio hardware when ending call
- // even if no device change is needed
- if (isStateInCall(oldState) && newDevice == AUDIO_DEVICE_NONE) {
- newDevice = hwOutputDesc->device();
- }
-
int delayMs = 0;
if (isStateInCall(state)) {
nsecs_t sysTime = systemTime();
@@ -500,9 +619,30 @@
}
}
- // change routing is necessary
- setOutputDevice(mPrimaryOutput, newDevice, force, delayMs);
+ // Note that despite the fact that getNewOutputDevice() is called on the primary output,
+ // the device returned is not necessarily reachable via this output
+ audio_devices_t rxDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
+ // force routing command to audio hardware when ending call
+ // even if no device change is needed
+ if (isStateInCall(oldState) && rxDevice == AUDIO_DEVICE_NONE) {
+ rxDevice = hwOutputDesc->device();
+ }
+ if (state == AUDIO_MODE_IN_CALL) {
+ updateCallRouting(rxDevice, delayMs);
+ } else if (oldState == AUDIO_MODE_IN_CALL) {
+ if (mCallRxPatch != 0) {
+ mpClientInterface->releaseAudioPatch(mCallRxPatch->mAfPatchHandle, 0);
+ mCallRxPatch.clear();
+ }
+ if (mCallTxPatch != 0) {
+ mpClientInterface->releaseAudioPatch(mCallTxPatch->mAfPatchHandle, 0);
+ mCallTxPatch.clear();
+ }
+ setOutputDevice(mPrimaryOutput, rxDevice, force, 0);
+ } else {
+ setOutputDevice(mPrimaryOutput, rxDevice, force, 0);
+ }
// if entering in call state, handle special case of active streams
// pertaining to sonification strategy see handleIncallSonification()
if (isStateInCall(state)) {
@@ -591,10 +731,16 @@
checkA2dpSuspend();
checkOutputForAllStrategies();
updateDevicesAndOutputs();
+ if (mPhoneState == AUDIO_MODE_IN_CALL) {
+ audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, true /*fromCache*/);
+ updateCallRouting(newDevice);
+ }
for (size_t i = 0; i < mOutputs.size(); i++) {
audio_io_handle_t output = mOutputs.keyAt(i);
audio_devices_t newDevice = getNewOutputDevice(output, true /*fromCache*/);
- setOutputDevice(output, newDevice, (newDevice != AUDIO_DEVICE_NONE));
+ if ((mPhoneState != AUDIO_MODE_IN_CALL) || (output != mPrimaryOutput)) {
+ setOutputDevice(output, newDevice, (newDevice != AUDIO_DEVICE_NONE));
+ }
if (forceVolumeReeval && (newDevice != AUDIO_DEVICE_NONE)) {
applyStreamVolumes(output, newDevice, 0, true);
}
@@ -1901,6 +2047,25 @@
return module;
}
+audio_devices_t AudioPolicyManager::availablePrimaryOutputDevices()
+{
+ sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(mPrimaryOutput);
+ audio_devices_t devices = outputDesc->mProfile->mSupportedDevices.types();
+ return devices & mAvailableOutputDevices.types();
+}
+
+audio_devices_t AudioPolicyManager::availablePrimaryInputDevices()
+{
+ audio_module_handle_t primaryHandle =
+ mOutputs.valueFor(mPrimaryOutput)->mProfile->mModule->mHandle;
+ audio_devices_t devices = AUDIO_DEVICE_NONE;
+ for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
+ if (mAvailableInputDevices[i]->mModule->mHandle == primaryHandle) {
+ devices |= mAvailableInputDevices[i]->mDeviceType;
+ }
+ }
+ return devices;
+}
status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
@@ -1913,22 +2078,37 @@
}
ALOGV("createAudioPatch() num sources %d num sinks %d", patch->num_sources, patch->num_sinks);
- if (patch->num_sources > 1 || patch->num_sinks > 1) {
+ if (patch->num_sources == 0 || patch->num_sources > AUDIO_PATCH_PORTS_MAX ||
+ patch->num_sinks == 0 || patch->num_sinks > AUDIO_PATCH_PORTS_MAX) {
+ return BAD_VALUE;
+ }
+ // only one source per audio patch supported for now
+ if (patch->num_sources > 1) {
return INVALID_OPERATION;
}
- if (patch->sources[0].role != AUDIO_PORT_ROLE_SOURCE ||
- patch->sinks[0].role != AUDIO_PORT_ROLE_SINK) {
+
+ if (patch->sources[0].role != AUDIO_PORT_ROLE_SOURCE) {
return INVALID_OPERATION;
}
+ for (size_t i = 0; i < patch->num_sinks; i++) {
+ if (patch->sinks[i].role != AUDIO_PORT_ROLE_SINK) {
+ return INVALID_OPERATION;
+ }
+ }
sp<AudioPatch> patchDesc;
ssize_t index = mAudioPatches.indexOfKey(*handle);
- ALOGV("createAudioPatch sink id %d role %d type %d", patch->sinks[0].id, patch->sinks[0].role,
- patch->sinks[0].type);
ALOGV("createAudioPatch source id %d role %d type %d", patch->sources[0].id,
patch->sources[0].role,
patch->sources[0].type);
+#if LOG_NDEBUG == 0
+ for (size_t i = 0; i < patch->num_sinks; i++) {
+ ALOGV("createAudioPatch sink %d: id %d role %d type %d", i, patch->sinks[i].id,
+ patch->sinks[i].role,
+ patch->sinks[i].type);
+ }
+#endif
if (index >= 0) {
patchDesc = mAudioPatches.valueAt(index);
@@ -1942,12 +2122,6 @@
}
if (patch->sources[0].type == AUDIO_PORT_TYPE_MIX) {
- // TODO add support for mix to mix connection
- if (patch->sinks[0].type != AUDIO_PORT_TYPE_DEVICE) {
- ALOGV("createAudioPatch() source mix sink not device");
- return BAD_VALUE;
- }
- // output mix to output device connection
sp<AudioOutputDescriptor> outputDesc = getOutputFromId(patch->sources[0].id);
if (outputDesc == NULL) {
ALOGV("createAudioPatch() output not found for id %d", patch->sources[0].id);
@@ -1962,30 +2136,41 @@
return BAD_VALUE;
}
}
- sp<DeviceDescriptor> devDesc =
- mAvailableOutputDevices.getDeviceFromId(patch->sinks[0].id);
- if (devDesc == 0) {
- ALOGV("createAudioPatch() out device not found for id %d", patch->sinks[0].id);
- return BAD_VALUE;
- }
+ DeviceVector devices;
+ for (size_t i = 0; i < patch->num_sinks; i++) {
+ // Only support mix to devices connection
+ // TODO add support for mix to mix connection
+ if (patch->sinks[i].type != AUDIO_PORT_TYPE_DEVICE) {
+ ALOGV("createAudioPatch() source mix but sink is not a device");
+ return INVALID_OPERATION;
+ }
+ sp<DeviceDescriptor> devDesc =
+ mAvailableOutputDevices.getDeviceFromId(patch->sinks[i].id);
+ if (devDesc == 0) {
+ ALOGV("createAudioPatch() out device not found for id %d", patch->sinks[i].id);
+ return BAD_VALUE;
+ }
- if (!outputDesc->mProfile->isCompatibleProfile(devDesc->mDeviceType,
- patch->sources[0].sample_rate,
- NULL, // updatedSamplingRate
- patch->sources[0].format,
- patch->sources[0].channel_mask,
- AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
- ALOGV("createAudioPatch() profile not supported");
+ if (!outputDesc->mProfile->isCompatibleProfile(devDesc->mDeviceType,
+ patch->sources[0].sample_rate,
+ NULL, // updatedSamplingRate
+ patch->sources[0].format,
+ patch->sources[0].channel_mask,
+ AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
+ ALOGV("createAudioPatch() profile not supported for device %08x",
+ devDesc->mDeviceType);
+ return INVALID_OPERATION;
+ }
+ devices.add(devDesc);
+ }
+ if (devices.size() == 0) {
return INVALID_OPERATION;
}
+
// TODO: reconfigure output format and channels here
ALOGV("createAudioPatch() setting device %08x on output %d",
- devDesc->mDeviceType, outputDesc->mIoHandle);
- setOutputDevice(outputDesc->mIoHandle,
- devDesc->mDeviceType,
- true,
- 0,
- handle);
+ devices.types(), outputDesc->mIoHandle);
+ setOutputDevice(outputDesc->mIoHandle, devices.types(), true, 0, handle);
index = mAudioPatches.indexOfKey(*handle);
if (index >= 0) {
if (patchDesc != 0 && patchDesc != mAudioPatches.valueAt(index)) {
@@ -2001,6 +2186,10 @@
} else if (patch->sources[0].type == AUDIO_PORT_TYPE_DEVICE) {
if (patch->sinks[0].type == AUDIO_PORT_TYPE_MIX) {
// input device to input mix connection
+ // only one sink supported when connecting an input device to a mix
+ if (patch->num_sinks > 1) {
+ return INVALID_OPERATION;
+ }
sp<AudioInputDescriptor> inputDesc = getInputFromId(patch->sinks[0].id);
if (inputDesc == NULL) {
return BAD_VALUE;
@@ -2030,10 +2219,7 @@
// TODO: reconfigure output format and channels here
ALOGV("createAudioPatch() setting device %08x on output %d",
devDesc->mDeviceType, inputDesc->mIoHandle);
- setInputDevice(inputDesc->mIoHandle,
- devDesc->mDeviceType,
- true,
- handle);
+ setInputDevice(inputDesc->mIoHandle, devDesc->mDeviceType, true, handle);
index = mAudioPatches.indexOfKey(*handle);
if (index >= 0) {
if (patchDesc != 0 && patchDesc != mAudioPatches.valueAt(index)) {
@@ -2049,38 +2235,53 @@
} else if (patch->sinks[0].type == AUDIO_PORT_TYPE_DEVICE) {
// device to device connection
if (patchDesc != 0) {
- if (patchDesc->mPatch.sources[0].id != patch->sources[0].id &&
- patchDesc->mPatch.sinks[0].id != patch->sinks[0].id) {
+ if (patchDesc->mPatch.sources[0].id != patch->sources[0].id) {
return BAD_VALUE;
}
}
-
sp<DeviceDescriptor> srcDeviceDesc =
mAvailableInputDevices.getDeviceFromId(patch->sources[0].id);
- sp<DeviceDescriptor> sinkDeviceDesc =
- mAvailableOutputDevices.getDeviceFromId(patch->sinks[0].id);
- if (srcDeviceDesc == 0 || sinkDeviceDesc == 0) {
- return BAD_VALUE;
- }
+
//update source and sink with our own data as the data passed in the patch may
// be incomplete.
struct audio_patch newPatch = *patch;
srcDeviceDesc->toAudioPortConfig(&newPatch.sources[0], &patch->sources[0]);
- sinkDeviceDesc->toAudioPortConfig(&newPatch.sinks[0], &patch->sinks[0]);
+ if (srcDeviceDesc == 0) {
+ return BAD_VALUE;
+ }
- if (srcDeviceDesc->mModule != sinkDeviceDesc->mModule) {
- SortedVector<audio_io_handle_t> outputs =
- getOutputsForDevice(sinkDeviceDesc->mDeviceType, mOutputs);
- // if the sink device is reachable via an opened output stream, request to go via
- // this output stream by adding a second source to the patch description
- audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE);
- if (output != AUDIO_IO_HANDLE_NONE) {
- sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
- if (outputDesc->isDuplicated()) {
+ for (size_t i = 0; i < patch->num_sinks; i++) {
+ if (patch->sinks[i].type != AUDIO_PORT_TYPE_DEVICE) {
+ ALOGV("createAudioPatch() source device but one sink is not a device");
+ return INVALID_OPERATION;
+ }
+
+ sp<DeviceDescriptor> sinkDeviceDesc =
+ mAvailableOutputDevices.getDeviceFromId(patch->sinks[i].id);
+ if (sinkDeviceDesc == 0) {
+ return BAD_VALUE;
+ }
+ sinkDeviceDesc->toAudioPortConfig(&newPatch.sinks[i], &patch->sinks[i]);
+
+ if (srcDeviceDesc->mModule != sinkDeviceDesc->mModule) {
+ // only one sink supported when connected devices across HW modules
+ if (patch->num_sinks > 1) {
return INVALID_OPERATION;
}
- outputDesc->toAudioPortConfig(&newPatch.sources[1], &patch->sources[0]);
- newPatch.num_sources = 2;
+ SortedVector<audio_io_handle_t> outputs =
+ getOutputsForDevice(sinkDeviceDesc->mDeviceType,
+ mOutputs);
+ // if the sink device is reachable via an opened output stream, request to go via
+ // this output stream by adding a second source to the patch description
+ audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE);
+ if (output != AUDIO_IO_HANDLE_NONE) {
+ sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
+ if (outputDesc->isDuplicated()) {
+ return INVALID_OPERATION;
+ }
+ outputDesc->toAudioPortConfig(&newPatch.sources[1], &patch->sources[0]);
+ newPatch.num_sources = 2;
+ }
}
}
// TODO: check from routing capabilities in config file and other conflicting patches
@@ -2782,7 +2983,7 @@
patchDesc->mPatch.sinks[j].ext.device.address;
if (strncmp(patchAddr,
address.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
- ALOGV("checkOutputsForDevice(): adding opened output %d on same address %s",
+ ALOGV("findIoHandlesByAddress(): adding opened output %d on same address %s",
desc->mIoHandle, patchDesc->mPatch.sinks[j].ext.device.address);
outputs.add(desc->mIoHandle);
break;
@@ -2792,12 +2993,15 @@
}
}
-status_t AudioPolicyManager::checkOutputsForDevice(audio_devices_t device,
+status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> devDesc,
audio_policy_dev_state_t state,
SortedVector<audio_io_handle_t>& outputs,
const String8 address)
{
+ audio_devices_t device = devDesc->mDeviceType;
sp<AudioOutputDescriptor> desc;
+ // erase all current sample rates, formats and channel masks
+ devDesc->clearCapabilities();
if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
// first list already open outputs that can be routed to this device
@@ -2846,6 +3050,9 @@
for (j = 0; j < outputs.size(); j++) {
desc = mOutputs.valueFor(outputs.itemAt(j));
if (!desc->isDuplicated() && desc->mProfile == profile) {
+ // matching profile: save the sample rates, format and channel masks supported
+ // by the profile in our device descriptor
+ devDesc->importAudioPort(profile);
break;
}
}
@@ -2995,6 +3202,8 @@
profile_index--;
} else {
outputs.add(output);
+ devDesc->importAudioPort(profile);
+
if (deviceDistinguishesOnAddress(device)) {
ALOGV("checkOutputsForDevice(): setOutputDevice(dev=0x%x, addr=%s)",
device, address.string());
@@ -3650,6 +3859,21 @@
// FALL THROUGH
case STRATEGY_PHONE:
+ // Force use of only devices on primary output if:
+ // - in call AND
+ // - cannot route from voice call RX OR
+ // - audio HAL version is < 3.0 and TX device is on the primary HW module
+ if (mPhoneState == AUDIO_MODE_IN_CALL) {
+ audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ sp<AudioOutputDescriptor> hwOutputDesc = mOutputs.valueFor(mPrimaryOutput);
+ if (((mAvailableInputDevices.types() &
+ AUDIO_DEVICE_IN_TELEPHONY_RX & ~AUDIO_DEVICE_BIT_IN) == 0) ||
+ (((txDevice & availablePrimaryInputDevices() & ~AUDIO_DEVICE_BIT_IN) != 0) &&
+ (hwOutputDesc->mAudioPort->mModule->mHalVersion <
+ AUDIO_DEVICE_API_VERSION_3_0))) {
+ availableOutputDeviceTypes = availablePrimaryOutputDevices();
+ }
+ }
// for phone strategy, we first consider the forced use and then the available devices by order
// of priority
switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) {
@@ -3679,11 +3903,11 @@
if (device) break;
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_WIRED_HEADSET;
if (device) break;
+ device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_DEVICE;
+ if (device) break;
if (mPhoneState != AUDIO_MODE_IN_CALL) {
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_ACCESSORY;
if (device) break;
- device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_DEVICE;
- if (device) break;
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
if (device) break;
device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_AUX_DIGITAL;
@@ -4187,19 +4411,60 @@
device = AUDIO_DEVICE_IN_VOICE_CALL;
break;
}
- // FALL THROUGH
+ break;
case AUDIO_SOURCE_DEFAULT:
case AUDIO_SOURCE_MIC:
if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) {
device = AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
- break;
+ } else if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) {
+ device = AUDIO_DEVICE_IN_WIRED_HEADSET;
+ } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) {
+ device = AUDIO_DEVICE_IN_USB_DEVICE;
+ } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ device = AUDIO_DEVICE_IN_BUILTIN_MIC;
}
- // FALL THROUGH
+ break;
+
+ case AUDIO_SOURCE_VOICE_COMMUNICATION:
+ // Allow only use of devices on primary input if in call and HAL does not support routing
+ // to voice call path.
+ if ((mPhoneState == AUDIO_MODE_IN_CALL) &&
+ (mAvailableOutputDevices.types() & AUDIO_DEVICE_OUT_TELEPHONY_TX) == 0) {
+ availableDeviceTypes = availablePrimaryInputDevices() & ~AUDIO_DEVICE_BIT_IN;
+ }
+
+ switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) {
+ case AUDIO_POLICY_FORCE_BT_SCO:
+ // if SCO device is requested but no SCO device is available, fall back to default case
+ if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) {
+ device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ break;
+ }
+ // FALL THROUGH
+
+ default: // FORCE_NONE
+ if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) {
+ device = AUDIO_DEVICE_IN_WIRED_HEADSET;
+ } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) {
+ device = AUDIO_DEVICE_IN_USB_DEVICE;
+ } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ device = AUDIO_DEVICE_IN_BUILTIN_MIC;
+ }
+ break;
+
+ case AUDIO_POLICY_FORCE_SPEAKER:
+ if (availableDeviceTypes & AUDIO_DEVICE_IN_BACK_MIC) {
+ device = AUDIO_DEVICE_IN_BACK_MIC;
+ } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ device = AUDIO_DEVICE_IN_BUILTIN_MIC;
+ }
+ break;
+ }
+ break;
case AUDIO_SOURCE_VOICE_RECOGNITION:
case AUDIO_SOURCE_HOTWORD:
- case AUDIO_SOURCE_VOICE_COMMUNICATION:
if (mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO &&
availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) {
device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
@@ -5318,15 +5583,21 @@
port->type = mType;
unsigned int i;
for (i = 0; i < mSamplingRates.size() && i < AUDIO_PORT_MAX_SAMPLING_RATES; i++) {
- port->sample_rates[i] = mSamplingRates[i];
+ if (mSamplingRates[i] != 0) {
+ port->sample_rates[i] = mSamplingRates[i];
+ }
}
port->num_sample_rates = i;
for (i = 0; i < mChannelMasks.size() && i < AUDIO_PORT_MAX_CHANNEL_MASKS; i++) {
- port->channel_masks[i] = mChannelMasks[i];
+ if (mChannelMasks[i] != 0) {
+ port->channel_masks[i] = mChannelMasks[i];
+ }
}
port->num_channel_masks = i;
for (i = 0; i < mFormats.size() && i < AUDIO_PORT_MAX_FORMATS; i++) {
- port->formats[i] = mFormats[i];
+ if (mFormats[i] != 0) {
+ port->formats[i] = mFormats[i];
+ }
}
port->num_formats = i;
@@ -5338,6 +5609,59 @@
port->num_gains = i;
}
+void AudioPolicyManager::AudioPort::importAudioPort(const sp<AudioPort> port) {
+ for (size_t k = 0 ; k < port->mSamplingRates.size() ; k++) {
+ const uint32_t rate = port->mSamplingRates.itemAt(k);
+ if (rate != 0) { // skip "dynamic" rates
+ bool hasRate = false;
+ for (size_t l = 0 ; l < mSamplingRates.size() ; l++) {
+ if (rate == mSamplingRates.itemAt(l)) {
+ hasRate = true;
+ break;
+ }
+ }
+ if (!hasRate) { // never import a sampling rate twice
+ mSamplingRates.add(rate);
+ }
+ }
+ }
+ for (size_t k = 0 ; k < port->mChannelMasks.size() ; k++) {
+ const audio_channel_mask_t mask = port->mChannelMasks.itemAt(k);
+ if (mask != 0) { // skip "dynamic" masks
+ bool hasMask = false;
+ for (size_t l = 0 ; l < mChannelMasks.size() ; l++) {
+ if (mask == mChannelMasks.itemAt(l)) {
+ hasMask = true;
+ break;
+ }
+ }
+ if (!hasMask) { // never import a channel mask twice
+ mChannelMasks.add(mask);
+ }
+ }
+ }
+ for (size_t k = 0 ; k < port->mFormats.size() ; k++) {
+ const audio_format_t format = port->mFormats.itemAt(k);
+ if (format != 0) { // skip "dynamic" formats
+ bool hasFormat = false;
+ for (size_t l = 0 ; l < mFormats.size() ; l++) {
+ if (format == mFormats.itemAt(l)) {
+ hasFormat = true;
+ break;
+ }
+ }
+ if (!hasFormat) { // never import a channel mask twice
+ mFormats.add(format);
+ }
+ }
+ }
+}
+
+void AudioPolicyManager::AudioPort::clearCapabilities() {
+ mChannelMasks.clear();
+ mFormats.clear();
+ mSamplingRates.clear();
+}
void AudioPolicyManager::AudioPort::loadSamplingRates(char *name)
{
diff --git a/services/audiopolicy/AudioPolicyManager.h b/services/audiopolicy/AudioPolicyManager.h
index 47235f7..f071675 100644
--- a/services/audiopolicy/AudioPolicyManager.h
+++ b/services/audiopolicy/AudioPolicyManager.h
@@ -238,6 +238,9 @@
virtual void toAudioPort(struct audio_port *port) const;
+ void importAudioPort(const sp<AudioPort> port);
+ void clearCapabilities();
+
void loadSamplingRates(char *name);
void loadFormats(char *name);
void loadOutChannels(char *name);
@@ -628,7 +631,7 @@
// when a device is disconnected, checks if an output is not used any more and
// returns its handle if any.
// transfers the audio tracks and effects from one output thread to another accordingly.
- status_t checkOutputsForDevice(audio_devices_t device,
+ status_t checkOutputsForDevice(const sp<DeviceDescriptor> devDesc,
audio_policy_dev_state_t state,
SortedVector<audio_io_handle_t>& outputs,
const String8 address);
@@ -727,6 +730,11 @@
sp<AudioInputDescriptor> getInputFromId(audio_port_handle_t id) const;
sp<HwModule> getModuleForDevice(audio_devices_t device) const;
sp<HwModule> getModuleFromName(const char *name) const;
+ audio_devices_t availablePrimaryOutputDevices();
+ audio_devices_t availablePrimaryInputDevices();
+
+ void updateCallRouting(audio_devices_t rxDevice, int delayMs = 0);
+
//
// Audio policy configuration file parsing (audio_policy.conf)
//
@@ -785,6 +793,9 @@
DefaultKeyedVector<audio_session_t, audio_io_handle_t> mSoundTriggerSessions;
+ sp<AudioPatch> mCallTxPatch;
+ sp<AudioPatch> mCallRxPatch;
+
#ifdef AUDIO_POLICY_TEST
Mutex mLock;
Condition mWaitWorkCV;
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 046988e..5eb5181 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1368,7 +1368,8 @@
ATRACE_CALL();
ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
Mutex::Autolock icl(mBinderSerializationLock);
- if ( checkPid(__FUNCTION__) != OK) return String8();
+ // The camera service can unconditionally get the parameters at all times
+ if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
SharedParameters::ReadLock l(mParameters);
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index 517226d..fb6b678 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -556,7 +556,8 @@
// get preview/capture parameters - key/value pairs
String8 CameraClient::getParameters() const {
Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return String8();
+ // The camera service can unconditionally get the parameters at all times
+ if (getCallingPid() != mServicePid && checkPidAndHardware() != NO_ERROR) return String8();
String8 params(mHardware->getParameters().flatten());
LOG1("getParameters (pid %d) (%s)", getCallingPid(), params.string());
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index c266213..bf3318e 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -155,7 +155,7 @@
callbackFormat, params.previewFormat);
res = device->createStream(mCallbackWindow,
params.previewWidth, params.previewHeight,
- callbackFormat, 0, &mCallbackStreamId);
+ callbackFormat, &mCallbackStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 964d278..cda98be 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -73,11 +73,10 @@
}
// Find out buffer size for JPEG
- camera_metadata_ro_entry_t maxJpegSize =
- params.staticInfo(ANDROID_JPEG_MAX_SIZE);
- if (maxJpegSize.count == 0) {
- ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
- __FUNCTION__, mId);
+ ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
+ if (maxJpegSize <= 0) {
+ ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
+ __FUNCTION__, mId, maxJpegSize);
return INVALID_OPERATION;
}
@@ -91,8 +90,7 @@
mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
mCaptureWindow = new Surface(producer);
// Create memory for API consumption
- mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
- "Camera2Client::CaptureHeap");
+ mCaptureHeap = new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
if (mCaptureHeap->getSize() == 0) {
ALOGE("%s: Camera %d: Unable to allocate memory for capture",
__FUNCTION__, mId);
@@ -134,8 +132,7 @@
// Create stream for HAL production
res = device->createStream(mCaptureWindow,
params.pictureWidth, params.pictureHeight,
- HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
- &mCaptureStreamId);
+ HAL_PIXEL_FORMAT_BLOB, &mCaptureStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for capture: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 911f55a..ab0af0d 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -181,8 +181,7 @@
if (mPreviewStreamId == NO_STREAM) {
res = device->createStream(mPreviewWindow,
params.previewWidth, params.previewHeight,
- CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0,
- &mPreviewStreamId);
+ CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, &mPreviewStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
@@ -385,7 +384,7 @@
mRecordingFrameCount = 0;
res = device->createStream(mRecordingWindow,
params.videoWidth, params.videoHeight,
- CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, &mRecordingStreamId);
+ CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, &mRecordingStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for recording: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 10463c1..8fb876e 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -183,8 +183,7 @@
(int)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
res = device->createStream(mZslWindow,
params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
- streamType, 0,
- &mZslStreamId);
+ streamType, &mZslStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for ZSL: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index ab61c44..37de610 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -489,6 +489,22 @@
continue;
}
+ // Make sure the candidate frame has good focus.
+ entry = frame.find(ANDROID_CONTROL_AF_STATE);
+ if (entry.count == 0) {
+ ALOGW("%s: ZSL queue frame has no AF state field!",
+ __FUNCTION__);
+ continue;
+ }
+ uint8_t afState = entry.data.u8[0];
+ if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
+ afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
+ afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
+ ALOGW("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
+ __FUNCTION__, afState);
+ continue;
+ }
+
minTimestamp = frameTimestamp;
idx = j;
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index b8611f8..86f82a3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -384,23 +384,7 @@
// after each call, but only once we are done with all.
int streamId = -1;
- if (format == HAL_PIXEL_FORMAT_BLOB) {
- // JPEG buffers need to be sized for maximum possible compressed size
- CameraMetadata staticInfo = mDevice->info();
- camera_metadata_entry_t entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: Can't find maximum JPEG size in "
- "static metadata!", __FUNCTION__, mCameraId);
- return INVALID_OPERATION;
- }
- int32_t maxJpegSize = entry.data.i32[0];
- res = mDevice->createStream(anw, width, height, format, maxJpegSize,
- &streamId);
- } else {
- // All other streams are a known size
- res = mDevice->createStream(anw, width, height, format, /*size*/0,
- &streamId);
- }
+ res = mDevice->createStream(anw, width, height, format, &streamId);
if (res == OK) {
mStreamMap.add(bufferProducer->asBinder(), streamId);
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
index 0f6d278..f8823a3 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
@@ -280,7 +280,7 @@
window = new Surface(bufferProducer);
}
- return mDevice->createStream(window, width, height, format, /*size*/1,
+ return mDevice->createStream(window, width, height, format,
streamId);
}
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 037695d..9e124b0 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -109,8 +109,7 @@
* other formats, the size parameter is ignored.
*/
virtual status_t createStream(sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size,
- int *id) = 0;
+ uint32_t width, uint32_t height, int format, int *id) = 0;
/**
* Create an input reprocess stream that uses buffers from an existing
@@ -156,6 +155,12 @@
virtual status_t waitUntilDrained() = 0;
/**
+ * Get Jpeg buffer size for a given jpeg resolution.
+ * Negative values are error codes.
+ */
+ virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const = 0;
+
+ /**
* Abstract class for HAL notification listeners
*/
class NotificationListener {
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 8c2520e..d473a76 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -242,13 +242,16 @@
}
status_t Camera2Device::createStream(sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size, int *id) {
+ uint32_t width, uint32_t height, int format, int *id) {
ATRACE_CALL();
status_t res;
ALOGV("%s: E", __FUNCTION__);
sp<StreamAdapter> stream = new StreamAdapter(mHal2Device);
-
+ size_t size = 0;
+ if (format == HAL_PIXEL_FORMAT_BLOB) {
+ size = getJpegBufferSize(width, height);
+ }
res = stream->connectToDevice(consumer, width, height, format, size);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to create stream (%d x %d, format %x):"
@@ -263,6 +266,17 @@
return OK;
}
+ssize_t Camera2Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
+ // Always give the max jpeg buffer size regardless of the actual jpeg resolution.
+ camera_metadata_ro_entry jpegBufMaxSize = mDeviceInfo.find(ANDROID_JPEG_MAX_SIZE);
+ if (jpegBufMaxSize.count == 0) {
+ ALOGE("%s: Camera %d: Can't find maximum JPEG size in static metadata!", __FUNCTION__, mId);
+ return BAD_VALUE;
+ }
+
+ return jpegBufMaxSize.data.i32[0];
+}
+
status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) {
ATRACE_CALL();
status_t res;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 46182f8..d0ca46e 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -57,8 +57,7 @@
virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
virtual status_t createStream(sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size,
- int *id);
+ uint32_t width, uint32_t height, int format, int *id);
virtual status_t createReprocessStreamFromStream(int outputId, int *id);
virtual status_t getStreamInfo(int id,
uint32_t *width, uint32_t *height, uint32_t *format);
@@ -79,6 +78,7 @@
// Flush implemented as just a wait
virtual status_t flush(int64_t *lastFrameNumber = NULL);
virtual uint32_t getDeviceVersion();
+ virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
private:
const int mId;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index a6214cc..ed350c1 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -793,12 +793,12 @@
}
status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size, int *id) {
+ uint32_t width, uint32_t height, int format, int *id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, size %zu",
- mId, mNextStreamId, width, height, format, size);
+ ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d",
+ mId, mNextStreamId, width, height, format);
status_t res;
bool wasActive = false;
@@ -832,10 +832,7 @@
sp<Camera3OutputStream> newStream;
if (format == HAL_PIXEL_FORMAT_BLOB) {
ssize_t jpegBufferSize = getJpegBufferSize(width, height);
- if (jpegBufferSize > 0) {
- ALOGV("%s: Overwrite Jpeg output buffer size from %zu to %zu",
- __FUNCTION__, size, jpegBufferSize);
- } else {
+ if (jpegBufferSize <= 0) {
SET_ERR_L("Invalid jpeg buffer size %zd", jpegBufferSize);
return BAD_VALUE;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index b1b0033..7656237 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -93,11 +93,9 @@
// Actual stream creation/deletion is delayed until first request is submitted
// If adding streams while actively capturing, will pause device before adding
- // stream, reconfiguring device, and unpausing. Note that, for JPEG stream, the
- // buffer size may be overwritten by an more accurate value calculated by Camera3Device.
+ // stream, reconfiguring device, and unpausing.
virtual status_t createStream(sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size,
- int *id);
+ uint32_t width, uint32_t height, int format, int *id);
virtual status_t createInputStream(
uint32_t width, uint32_t height, int format,
int *id);
@@ -137,6 +135,8 @@
virtual uint32_t getDeviceVersion();
+ virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
+
// Methods called by subclasses
void notifyStatus(bool idle); // updates from StatusTracker
@@ -316,12 +316,6 @@
*/
Size getMaxJpegResolution() const;
- /**
- * Get Jpeg buffer size for a given jpeg resolution.
- * Negative values are error codes.
- */
- ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
-
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
uint32_t metadataTag;