Merge "Added Visualizer effect." into gingerbread
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 33696f4..9a97284 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -7,7 +7,7 @@
SineSource.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia libutils libbinder
+ libstagefright libmedia libutils libbinder libstagefright_foundation
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 877b908..b7a3f99 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -38,6 +38,9 @@
#include <media/stagefright/OMXCodec.h>
#include <media/mediametadataretriever.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MPEG4Writer.h>
+
using namespace android;
static long gNumRepetitions;
@@ -45,6 +48,8 @@
static long gReproduceBug; // if not -1.
static bool gPreferSoftwareCodec;
static bool gPlaybackAudio;
+static bool gWriteMP4;
+static String8 gWriteMP4Filename;
static int64_t getNowUs() {
struct timeval tv;
@@ -258,6 +263,21 @@
}
}
+static void writeSourceToMP4(const sp<MediaSource> &source) {
+ sp<MPEG4Writer> writer =
+ new MPEG4Writer(gWriteMP4Filename.string());
+
+ CHECK_EQ(writer->addSource(source), OK);
+
+ sp<MetaData> params = new MetaData;
+ CHECK_EQ(writer->start(), OK);
+
+ while (!writer->reachedEOS()) {
+ usleep(100000);
+ }
+ writer->stop();
+}
+
static void usage(const char *me) {
fprintf(stderr, "usage: %s\n", me);
fprintf(stderr, " -h(elp)\n");
@@ -270,6 +290,7 @@
fprintf(stderr, " -t(humbnail) extract video thumbnail or album art\n");
fprintf(stderr, " -s(oftware) prefer software codec\n");
fprintf(stderr, " -o playback audio\n");
+ fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
}
int main(int argc, char **argv) {
@@ -284,9 +305,10 @@
gReproduceBug = -1;
gPreferSoftwareCodec = false;
gPlaybackAudio = false;
+ gWriteMP4 = false;
int res;
- while ((res = getopt(argc, argv, "han:lm:b:ptso")) >= 0) {
+ while ((res = getopt(argc, argv, "han:lm:b:ptsow:")) >= 0) {
switch (res) {
case 'a':
{
@@ -322,6 +344,13 @@
break;
}
+ case 'w':
+ {
+ gWriteMP4 = true;
+ gWriteMP4Filename.setTo(optarg);
+ break;
+ }
+
case 'p':
{
dumpProfiles = true;
@@ -554,7 +583,11 @@
mediaSource = extractor->getTrack(i);
}
- playSource(&client, mediaSource);
+ if (gWriteMP4) {
+ writeSourceToMP4(mediaSource);
+ } else {
+ playSource(&client, mediaSource);
+ }
}
client.disconnect();
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index a4eea2a..c3cd361 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -48,8 +48,8 @@
static MediaProfiles* getInstance();
/**
- * Returns the value for the given param name at the given quality level,
- * or -1 if error.
+ * Returns the value for the given param name for the given camera at
+ * the given quality level, or -1 if error.
*
* Supported param name are:
* duration - the recording duration.
@@ -64,7 +64,8 @@
* aud.hz - audio sample rate
* aud.ch - number of audio channels
*/
- int getCamcorderProfileParamByName(const char *name, camcorder_quality quality) const;
+ int getCamcorderProfileParamByName(const char *name, int cameraId,
+ camcorder_quality quality) const;
/**
* Returns the output file formats supported.
@@ -124,12 +125,7 @@
/**
* Returns the number of image encoding quality levels supported.
*/
- Vector<int> getImageEncodingQualityLevels() const;
-
- /**
- * Returns the maximum amount of memory in bytes we can use for decoding a JPEG file.
- */
- int getImageDecodingMaxMemory() const;
+ Vector<int> getImageEncodingQualityLevels(int cameraId) const;
private:
MediaProfiles& operator=(const MediaProfiles&); // Don't call me
@@ -171,7 +167,8 @@
struct CamcorderProfile {
CamcorderProfile()
- : mFileFormat(OUTPUT_FORMAT_THREE_GPP),
+ : mCameraId(0),
+ mFileFormat(OUTPUT_FORMAT_THREE_GPP),
mQuality(CAMCORDER_QUALITY_HIGH),
mDuration(0),
mVideoCodec(0),
@@ -182,6 +179,7 @@
delete mAudioCodec;
}
+ int mCameraId;
output_format mFileFormat;
camcorder_quality mQuality;
int mDuration;
@@ -249,6 +247,11 @@
int tag;
};
+ struct ImageEncodingQualityLevels {
+ int mCameraId;
+ Vector<int> mLevels;
+ };
+
// Debug
static void logVideoCodec(const VideoCodec& codec);
static void logAudioCodec(const AudioCodec& codec);
@@ -267,9 +270,11 @@
static VideoDecoderCap* createVideoDecoderCap(const char **atts);
static VideoEncoderCap* createVideoEncoderCap(const char **atts);
static AudioEncoderCap* createAudioEncoderCap(const char **atts);
- static CamcorderProfile* createCamcorderProfile(const char **atts);
- static int getImageEncodingQualityLevel(const char **atts);
- static int getImageDecodingMaxMemory(const char **atts);
+ static CamcorderProfile* createCamcorderProfile(int cameraId, const char **atts);
+ static int getCameraId(const char **atts);
+
+ ImageEncodingQualityLevels* findImageEncodingQualityLevels(int cameraId) const;
+ void addImageEncodingQualityLevel(int cameraId, const char** atts);
// Customized element tag handler for parsing the xml configuration file.
static void startElementHandler(void *userData, const char *name, const char **atts);
@@ -303,6 +308,7 @@
static bool sIsInitialized;
static MediaProfiles *sInstance;
static Mutex sLock;
+ int mCurrentCameraId;
Vector<CamcorderProfile*> mCamcorderProfiles;
Vector<AudioEncoderCap*> mAudioEncoders;
@@ -310,8 +316,7 @@
Vector<AudioDecoderCap*> mAudioDecoders;
Vector<VideoDecoderCap*> mVideoDecoders;
Vector<output_format> mEncoderOutputFileFormats;
- Vector<int> mImageEncodingQualityLevels;
- int mImageDecodingMaxMemory;
+ Vector<ImageEncodingQualityLevels *> mImageEncodingQualityLevels;
};
}; // namespace android
diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h
index 1e341b9..bc3f464 100644
--- a/include/media/stagefright/ColorConverter.h
+++ b/include/media/stagefright/ColorConverter.h
@@ -58,6 +58,11 @@
const void *srcBits, size_t srcSkip,
void *dstBits, size_t dstSkip);
+ void convertYUV420SemiPlanar(
+ size_t width, size_t height,
+ const void *srcBits, size_t srcSkip,
+ void *dstBits, size_t dstSkip);
+
ColorConverter(const ColorConverter &);
ColorConverter &operator=(const ColorConverter &);
};
diff --git a/include/media/stagefright/MediaWriter.h b/include/media/stagefright/MediaWriter.h
index e91d066..8d3a9df 100644
--- a/include/media/stagefright/MediaWriter.h
+++ b/include/media/stagefright/MediaWriter.h
@@ -27,7 +27,10 @@
struct MetaData;
struct MediaWriter : public RefBase {
- MediaWriter() {}
+ MediaWriter()
+ : mMaxFileSizeLimitBytes(0),
+ mMaxFileDurationLimitUs(0) {
+ }
virtual status_t addSource(const sp<MediaSource> &source) = 0;
virtual bool reachedEOS() = 0;
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index 95fe6f6..73f5547 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -69,6 +69,10 @@
kKeyDate = 'date', // cstring
kKeyWriter = 'writ', // cstring
+ // video profile and level
+ kKeyVideoProfile = 'vprf', // int32_t
+ kKeyVideoLevel = 'vlev', // int32_t
+
// Set this key to enable authoring files in 64-bit offset
kKey64BitFileOffset = 'fobt', // int32_t (bool)
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index c95fc02..214f43a 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -27,6 +27,7 @@
class MemoryDealer;
struct OMXCodecObserver;
+struct CodecProfileLevel;
struct OMXCodec : public MediaSource,
public MediaBufferObserver {
@@ -172,9 +173,18 @@
void setVideoInputFormat(
const char *mime, const sp<MetaData>& meta);
+ status_t setupBitRate(int32_t bitRate);
+ status_t setupErrorCorrectionParameters();
+ status_t setupH263EncoderParameters(const sp<MetaData>& meta);
status_t setupMPEG4EncoderParameters(const sp<MetaData>& meta);
status_t setupAVCEncoderParameters(const sp<MetaData>& meta);
+ // If profile/level is set in the meta data, its value in the meta
+ // data will be used; otherwise, the default value will be used.
+ status_t getVideoProfileLevel(const sp<MetaData>& meta,
+ const CodecProfileLevel& defaultProfileLevel,
+ CodecProfileLevel& profileLevel);
+
status_t setVideoOutputFormat(
const char *mime, OMX_U32 width, OMX_U32 height);
diff --git a/include/media/stagefright/foundation/AHandler.h b/include/media/stagefright/foundation/AHandler.h
index 9fccead..b008b54 100644
--- a/include/media/stagefright/foundation/AHandler.h
+++ b/include/media/stagefright/foundation/AHandler.h
@@ -34,6 +34,8 @@
return mID;
}
+ sp<ALooper> looper();
+
protected:
virtual void onMessageReceived(const sp<AMessage> &msg) = 0;
diff --git a/include/media/stagefright/foundation/ALooper.h b/include/media/stagefright/foundation/ALooper.h
index 69ad837..194f1fc 100644
--- a/include/media/stagefright/foundation/ALooper.h
+++ b/include/media/stagefright/foundation/ALooper.h
@@ -39,7 +39,10 @@
handler_id registerHandler(const sp<AHandler> &handler);
void unregisterHandler(handler_id handlerID);
- status_t start(bool runOnCallingThread = false);
+ status_t start(
+ bool runOnCallingThread = false,
+ bool canCallJava = false);
+
status_t stop();
static int64_t GetNowUs();
diff --git a/include/media/stagefright/foundation/ALooperRoster.h b/include/media/stagefright/foundation/ALooperRoster.h
index 1c6869c..c1bd4ed 100644
--- a/include/media/stagefright/foundation/ALooperRoster.h
+++ b/include/media/stagefright/foundation/ALooperRoster.h
@@ -34,10 +34,12 @@
void postMessage(const sp<AMessage> &msg, int64_t delayUs = 0);
void deliverMessage(const sp<AMessage> &msg);
+ sp<ALooper> findLooper(ALooper::handler_id handlerID);
+
private:
struct HandlerInfo {
- sp<ALooper> mLooper;
- sp<AHandler> mHandler;
+ wp<ALooper> mLooper;
+ wp<AHandler> mHandler;
};
Mutex mLock;
diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h
index 139c620..c674cba 100644
--- a/include/media/stagefright/foundation/AMessage.h
+++ b/include/media/stagefright/foundation/AMessage.h
@@ -60,6 +60,8 @@
sp<AMessage> dup() const;
+ AString debugString(int32_t indent = 0) const;
+
protected:
virtual ~AMessage();
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 1263373..3869389 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -272,7 +272,7 @@
}
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createCamcorderProfile(const char **atts)
+MediaProfiles::createCamcorderProfile(int cameraId, const char **atts)
{
CHECK(!strcmp("quality", atts[0]) &&
!strcmp("fileFormat", atts[2]) &&
@@ -287,16 +287,47 @@
CHECK(fileFormat != -1);
MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = cameraId;
profile->mFileFormat = static_cast<output_format>(fileFormat);
profile->mQuality = static_cast<camcorder_quality>(quality);
profile->mDuration = atoi(atts[5]);
return profile;
}
-/*static*/ int
-MediaProfiles::getImageEncodingQualityLevel(const char** atts)
+MediaProfiles::ImageEncodingQualityLevels*
+MediaProfiles::findImageEncodingQualityLevels(int cameraId) const
+{
+ int n = mImageEncodingQualityLevels.size();
+ for (int i = 0; i < n; i++) {
+ ImageEncodingQualityLevels *levels = mImageEncodingQualityLevels[i];
+ if (levels->mCameraId == cameraId) {
+ return levels;
+ }
+ }
+ return NULL;
+}
+
+void MediaProfiles::addImageEncodingQualityLevel(int cameraId, const char** atts)
{
CHECK(!strcmp("quality", atts[0]));
+ int quality = atoi(atts[1]);
+ LOGV("%s: cameraId=%d, quality=%d\n", __func__, cameraId, quality);
+ ImageEncodingQualityLevels *levels = findImageEncodingQualityLevels(cameraId);
+
+ if (levels == NULL) {
+ levels = new ImageEncodingQualityLevels();
+ levels->mCameraId = cameraId;
+ mImageEncodingQualityLevels.add(levels);
+ }
+
+ levels->mLevels.add(quality);
+}
+
+/*static*/ int
+MediaProfiles::getCameraId(const char** atts)
+{
+ if (!atts[0]) return 0; // default cameraId = 0
+ CHECK(!strcmp("cameraId", atts[0]));
return atoi(atts[1]);
}
@@ -322,10 +353,13 @@
profiles->mAudioDecoders.add(createAudioDecoderCap(atts));
} else if (strcmp("EncoderOutputFileFormat", name) == 0) {
profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts));
+ } else if (strcmp("CamcorderProfiles", name) == 0) {
+ profiles->mCurrentCameraId = getCameraId(atts);
} else if (strcmp("EncoderProfile", name) == 0) {
- profiles->mCamcorderProfiles.add(createCamcorderProfile(atts));
+ profiles->mCamcorderProfiles.add(
+ createCamcorderProfile(profiles->mCurrentCameraId, atts));
} else if (strcmp("ImageEncoding", name) == 0) {
- profiles->mImageEncodingQualityLevels.add(getImageEncodingQualityLevel(atts));
+ profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts);
}
}
@@ -383,7 +417,8 @@
new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
- CamcorderProfile *profile = new CamcorderProfile;
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
profile->mQuality = CAMCORDER_QUALITY_HIGH;
profile->mDuration = 60;
@@ -402,6 +437,7 @@
new MediaProfiles::AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
profile->mQuality = CAMCORDER_QUALITY_LOW;
profile->mDuration = 30;
@@ -458,9 +494,12 @@
/*static*/ void
MediaProfiles::createDefaultImageEncodingQualityLevels(MediaProfiles *profiles)
{
- profiles->mImageEncodingQualityLevels.add(70);
- profiles->mImageEncodingQualityLevels.add(80);
- profiles->mImageEncodingQualityLevels.add(90);
+ ImageEncodingQualityLevels *levels = new ImageEncodingQualityLevels();
+ levels->mCameraId = 0;
+ levels->mLevels.add(70);
+ levels->mLevels.add(80);
+ levels->mLevels.add(90);
+ profiles->mImageEncodingQualityLevels.add(levels);
}
/*static*/ MediaProfiles*
@@ -629,19 +668,24 @@
return decoders; // copy out
}
-int MediaProfiles::getCamcorderProfileParamByName(const char *name, camcorder_quality quality) const
+int MediaProfiles::getCamcorderProfileParamByName(const char *name,
+ int cameraId,
+ camcorder_quality quality) const
{
- LOGV("getCamcorderProfileParamByName: %s for quality %d", name, quality);
+ LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
+ name, cameraId, quality);
int index = -1;
for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
- if (mCamcorderProfiles[i]->mQuality == quality) {
+ if (mCamcorderProfiles[i]->mCameraId == cameraId &&
+ mCamcorderProfiles[i]->mQuality == quality) {
index = i;
break;
}
}
if (index == -1) {
- LOGE("The given camcorder profile quality %d is not found", quality);
+ LOGE("The given camcorder profile camera %d quality %d is not found",
+ cameraId, quality);
return -1;
}
@@ -657,13 +701,18 @@
if (!strcmp("aud.ch", name)) return mCamcorderProfiles[index]->mAudioCodec->mChannels;
if (!strcmp("aud.hz", name)) return mCamcorderProfiles[index]->mAudioCodec->mSampleRate;
- LOGE("The given camcorder profile param name %s is not found", name);
+ LOGE("The given camcorder profile param id %d name %s is not found", cameraId, name);
return -1;
}
-Vector<int> MediaProfiles::getImageEncodingQualityLevels() const
+Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const
{
- return mImageEncodingQualityLevels; // copy out
+ Vector<int> result;
+ ImageEncodingQualityLevels *levels = findImageEncodingQualityLevels(cameraId);
+ if (levels != NULL) {
+ result = levels->mLevels; // copy out
+ }
+ return result;
}
MediaProfiles::~MediaProfiles()
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index c4aeec3..50f74f2 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -42,11 +42,16 @@
namespace android {
-StagefrightRecorder::StagefrightRecorder() {
+StagefrightRecorder::StagefrightRecorder()
+ : mWriter(NULL),
+ mOutputFd(-1) {
+
+ LOGV("Constructor");
reset();
}
StagefrightRecorder::~StagefrightRecorder() {
+ LOGV("Destructor");
stop();
if (mOutputFd >= 0) {
@@ -56,40 +61,97 @@
}
status_t StagefrightRecorder::init() {
+ LOGV("init");
return OK;
}
status_t StagefrightRecorder::setAudioSource(audio_source as) {
- mAudioSource = as;
+ LOGV("setAudioSource: %d", as);
+ if (as < AUDIO_SOURCE_DEFAULT ||
+ as >= AUDIO_SOURCE_LIST_END) {
+ LOGE("Invalid audio source: %d", as);
+ return BAD_VALUE;
+ }
+
+ if (as == AUDIO_SOURCE_DEFAULT) {
+ mAudioSource = AUDIO_SOURCE_MIC;
+ } else {
+ mAudioSource = as;
+ }
return OK;
}
status_t StagefrightRecorder::setVideoSource(video_source vs) {
- mVideoSource = vs;
+ LOGV("setVideoSource: %d", vs);
+ if (vs < VIDEO_SOURCE_DEFAULT ||
+ vs >= VIDEO_SOURCE_LIST_END) {
+ LOGE("Invalid video source: %d", vs);
+ return BAD_VALUE;
+ }
+
+ if (vs == VIDEO_SOURCE_DEFAULT) {
+ mVideoSource = VIDEO_SOURCE_CAMERA;
+ } else {
+ mVideoSource = vs;
+ }
return OK;
}
status_t StagefrightRecorder::setOutputFormat(output_format of) {
- mOutputFormat = of;
+ LOGV("setOutputFormat: %d", of);
+ if (of < OUTPUT_FORMAT_DEFAULT ||
+ of >= OUTPUT_FORMAT_LIST_END) {
+ LOGE("Invalid output format: %d", of);
+ return BAD_VALUE;
+ }
+
+ if (of == OUTPUT_FORMAT_DEFAULT) {
+ mOutputFormat = OUTPUT_FORMAT_THREE_GPP;
+ } else {
+ mOutputFormat = of;
+ }
return OK;
}
status_t StagefrightRecorder::setAudioEncoder(audio_encoder ae) {
- mAudioEncoder = ae;
+ LOGV("setAudioEncoder: %d", ae);
+ if (ae < AUDIO_ENCODER_DEFAULT ||
+ ae >= AUDIO_ENCODER_LIST_END) {
+ LOGE("Invalid audio encoder: %d", ae);
+ return BAD_VALUE;
+ }
+
+ if (ae == AUDIO_ENCODER_DEFAULT) {
+ mAudioEncoder = AUDIO_ENCODER_AMR_NB;
+ } else {
+ mAudioEncoder = ae;
+ }
return OK;
}
status_t StagefrightRecorder::setVideoEncoder(video_encoder ve) {
- mVideoEncoder = ve;
+ LOGV("setVideoEncoder: %d", ve);
+ if (ve < VIDEO_ENCODER_DEFAULT ||
+ ve >= VIDEO_ENCODER_LIST_END) {
+ LOGE("Invalid video encoder: %d", ve);
+ return BAD_VALUE;
+ }
+
+ if (ve == VIDEO_ENCODER_DEFAULT) {
+ mVideoEncoder = VIDEO_ENCODER_H263;
+ } else {
+ mVideoEncoder = ve;
+ }
return OK;
}
status_t StagefrightRecorder::setVideoSize(int width, int height) {
+ LOGV("setVideoSize: %dx%d", width, height);
if (width <= 0 || height <= 0) {
LOGE("Invalid video size: %dx%d", width, height);
return BAD_VALUE;
@@ -103,6 +165,7 @@
}
status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
+ LOGV("setVideoFrameRate: %d", frames_per_second);
if (frames_per_second <= 0 || frames_per_second > 30) {
LOGE("Invalid video frame rate: %d", frames_per_second);
return BAD_VALUE;
@@ -118,7 +181,7 @@
LOGV("setCamera");
if (camera == 0) {
LOGE("camera is NULL");
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
int64_t token = IPCThreadState::self()->clearCallingIdentity();
@@ -127,7 +190,7 @@
if (mCamera == 0) {
LOGE("Unable to connect to camera");
IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
+ return -EBUSY;
}
LOGV("Connected to camera");
@@ -141,23 +204,31 @@
}
status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
+ LOGV("setPreviewSurface: %p", surface.get());
mPreviewSurface = surface;
return OK;
}
status_t StagefrightRecorder::setOutputFile(const char *path) {
+ LOGE("setOutputFile(const char*) must not be called");
// We don't actually support this at all, as the media_server process
// no longer has permissions to create files.
- return UNKNOWN_ERROR;
+ return -EPERM;
}
status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
+ LOGV("setOutputFile: %d, %lld, %lld", fd, offset, length);
// These don't make any sense, do they?
CHECK_EQ(offset, 0);
CHECK_EQ(length, 0);
+ if (fd < 0) {
+ LOGE("Invalid file descriptor: %d", fd);
+ return -EBADF;
+ }
+
if (mOutputFd >= 0) {
::close(mOutputFd);
}
@@ -233,6 +304,7 @@
LOGV("setParamAudioNumberOfChannels: %d", channels);
if (channels <= 0 || channels >= 3) {
LOGE("Invalid number of audio channels: %d", channels);
+ return BAD_VALUE;
}
// Additional check on the number of channels will be performed later.
@@ -270,21 +342,23 @@
return OK;
}
-status_t StagefrightRecorder::setParamMaxDurationOrFileSize(int64_t limit,
- bool limit_is_duration) {
- LOGV("setParamMaxDurationOrFileSize: limit (%lld) for %s",
- limit, limit_is_duration?"duration":"size");
- if (limit_is_duration) { // limit is in ms
- if (limit <= 1000) { // XXX: 1 second
- LOGE("Max file duration is too short: %lld us", limit);
- }
- mMaxFileDurationUs = limit * 1000LL;
- } else {
- if (limit <= 1024) { // XXX: 1 kB
- LOGE("Max file size is too small: %lld bytes", limit);
- }
- mMaxFileSizeBytes = limit;
+status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
+ LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
+ if (timeUs <= 1000000LL) { // XXX: 1 second
+ LOGE("Max file duration is too short: %lld us", timeUs);
+ return BAD_VALUE;
}
+ mMaxFileDurationUs = timeUs;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
+ LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
+ if (bytes <= 1024) { // XXX: 1 kB
+ LOGE("Max file size is too small: %lld bytes", bytes);
+ return BAD_VALUE;
+ }
+ mMaxFileSizeBytes = bytes;
return OK;
}
@@ -309,7 +383,7 @@
// If interval < 0, only the first frame is I frame, and rest are all P frames
// If interval == 0, all frames are encoded as I frames. No P frames
-// If interval > 0, it is the time spacing between 2 neighboring I frames
+// If interval > 0, it is the time spacing (seconds) between 2 neighboring I frames
status_t StagefrightRecorder::setParamVideoIFramesInterval(int32_t interval) {
LOGV("setParamVideoIFramesInterval: %d seconds", interval);
mIFramesInterval = interval;
@@ -335,6 +409,7 @@
status_t StagefrightRecorder::setParamTrackFrameStatus(int32_t nFrames) {
LOGV("setParamTrackFrameStatus: %d", nFrames);
if (nFrames <= 0) {
+ LOGE("Invalid number of frames to track: %d", nFrames);
return BAD_VALUE;
}
mTrackEveryNumberOfFrames = nFrames;
@@ -344,26 +419,43 @@
status_t StagefrightRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
LOGV("setParamTrackTimeStatus: %lld", timeDurationUs);
if (timeDurationUs < 20000) { // Infeasible if shorter than 20 ms?
+ LOGE("Tracking time duration too short: %lld us", timeDurationUs);
return BAD_VALUE;
}
mTrackEveryTimeDurationUs = timeDurationUs;
return OK;
}
+status_t StagefrightRecorder::setParamVideoEncoderProfile(int32_t profile) {
+ LOGV("setParamVideoEncoderProfile: %d", profile);
+
+ // Additional check will be done later when we load the encoder.
+ // For now, we are accepting values defined in OpenMAX IL.
+ mVideoEncoderProfile = profile;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoEncoderLevel(int32_t level) {
+ LOGV("setParamVideoEncoderLevel: %d", level);
+
+ // Additional check will be done later when we load the encoder.
+ // For now, we are accepting values defined in OpenMAX IL.
+ mVideoEncoderLevel = level;
+ return OK;
+}
+
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
if (key == "max-duration") {
int64_t max_duration_ms;
if (safe_strtoi64(value.string(), &max_duration_ms)) {
- return setParamMaxDurationOrFileSize(
- max_duration_ms, true /* limit_is_duration */);
+ return setParamMaxFileDurationUs(1000LL * max_duration_ms);
}
} else if (key == "max-filesize") {
int64_t max_filesize_bytes;
if (safe_strtoi64(value.string(), &max_filesize_bytes)) {
- return setParamMaxDurationOrFileSize(
- max_filesize_bytes, false /* limit is filesize */);
+ return setParamMaxFileSizeBytes(max_filesize_bytes);
}
} else if (key == "interleave-duration-us") {
int32_t durationUs;
@@ -410,6 +502,16 @@
if (safe_strtoi32(value.string(), &interval)) {
return setParamVideoIFramesInterval(interval);
}
+ } else if (key == "video-param-encoder-profile") {
+ int32_t profile;
+ if (safe_strtoi32(value.string(), &profile)) {
+ return setParamVideoEncoderProfile(profile);
+ }
+ } else if (key == "video-param-encoder-level") {
+ int32_t level;
+ if (safe_strtoi32(value.string(), &level)) {
+ return setParamVideoEncoderLevel(level);
+ }
} else if (key == "video-param-camera-id") {
int32_t cameraId;
if (safe_strtoi32(value.string(), &cameraId)) {
@@ -467,7 +569,10 @@
}
status_t StagefrightRecorder::start() {
+ CHECK(mOutputFd >= 0);
+
if (mWriter != NULL) {
+ LOGE("File writer is not avaialble");
return UNKNOWN_ERROR;
}
@@ -486,6 +591,7 @@
return startAACRecording();
default:
+ LOGE("Unsupported output file format: %d", mOutputFormat);
return UNKNOWN_ERROR;
}
}
@@ -549,7 +655,6 @@
CHECK(mAudioEncoder == AUDIO_ENCODER_AAC);
CHECK(mAudioSource != AUDIO_SOURCE_LIST_END);
- CHECK(mOutputFd >= 0);
CHECK(0 == "AACWriter is not implemented yet");
@@ -565,34 +670,34 @@
mAudioEncoder != AUDIO_ENCODER_AMR_NB) {
LOGE("Invalid encoder %d used for AMRNB recording",
mAudioEncoder);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
if (mSampleRate != 8000) {
LOGE("Invalid sampling rate %d used for AMRNB recording",
mSampleRate);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
} else { // mOutputFormat must be OUTPUT_FORMAT_AMR_WB
if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) {
LOGE("Invlaid encoder %d used for AMRWB recording",
mAudioEncoder);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
if (mSampleRate != 16000) {
LOGE("Invalid sample rate %d used for AMRWB recording",
mSampleRate);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
}
if (mAudioChannels != 1) {
LOGE("Invalid number of audio channels %d used for amr recording",
mAudioChannels);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
if (mAudioSource >= AUDIO_SOURCE_LIST_END) {
LOGE("Invalid audio source: %d", mAudioSource);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
sp<MediaSource> audioEncoder = createAudioSource();
@@ -601,7 +706,6 @@
return UNKNOWN_ERROR;
}
- CHECK(mOutputFd >= 0);
mWriter = new AMRWriter(dup(mOutputFd));
mWriter->addSource(audioEncoder);
@@ -668,6 +772,54 @@
}
}
+status_t StagefrightRecorder::setupCameraSource() {
+ clipVideoBitRate();
+ clipVideoFrameRate();
+ clipVideoFrameWidth();
+ clipVideoFrameHeight();
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mCamera == 0) {
+ mCamera = Camera::connect(mCameraId);
+ if (mCamera == 0) {
+ LOGE("Camera connection could not be established.");
+ return -EBUSY;
+ }
+ mFlags &= ~FLAGS_HOT_CAMERA;
+ mCamera->lock();
+ }
+
+ // Set the actual video recording frame size
+ CameraParameters params(mCamera->getParameters());
+ params.setPreviewSize(mVideoWidth, mVideoHeight);
+ params.setPreviewFrameRate(mFrameRate);
+ String8 s = params.flatten();
+ CHECK_EQ(OK, mCamera->setParameters(s));
+ CameraParameters newCameraParams(mCamera->getParameters());
+
+ // Check on video frame size
+ int frameWidth = 0, frameHeight = 0;
+ newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
+ if (frameWidth < 0 || frameWidth != mVideoWidth ||
+ frameHeight < 0 || frameHeight != mVideoHeight) {
+ LOGE("Failed to set the video frame size to %dx%d",
+ mVideoWidth, mVideoHeight);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check on video frame rate
+ int frameRate = newCameraParams.getPreviewFrameRate();
+ if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
+ LOGE("Failed to set frame rate to %d fps. The actual "
+ "frame rate is %d", mFrameRate, frameRate);
+ }
+
+ CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return OK;
+}
+
void StagefrightRecorder::clipVideoFrameHeight() {
LOGV("clipVideoFrameHeight: encoder %d", mVideoEncoder);
int minFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
@@ -685,140 +837,116 @@
}
}
+status_t StagefrightRecorder::setupVideoEncoder(const sp<MediaWriter>& writer) {
+ status_t err = setupCameraSource();
+ if (err != OK) return err;
+
+ sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
+ CHECK(cameraSource != NULL);
+
+ sp<MetaData> enc_meta = new MetaData;
+ enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
+ enc_meta->setInt32(kKeySampleRate, mFrameRate);
+
+ switch (mVideoEncoder) {
+ case VIDEO_ENCODER_H263:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ break;
+
+ case VIDEO_ENCODER_MPEG_4_SP:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ break;
+
+ case VIDEO_ENCODER_H264:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ break;
+
+ default:
+ CHECK(!"Should not be here, unsupported video encoding.");
+ break;
+ }
+
+ sp<MetaData> meta = cameraSource->getFormat();
+
+ int32_t width, height, stride, sliceHeight;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+ CHECK(meta->findInt32(kKeyStride, &stride));
+ CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, mIFramesInterval);
+ enc_meta->setInt32(kKeyStride, stride);
+ enc_meta->setInt32(kKeySliceHeight, sliceHeight);
+ if (mVideoEncoderProfile != -1) {
+ enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
+ }
+ if (mVideoEncoderLevel != -1) {
+ enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+ }
+
+ OMXClient client;
+ CHECK_EQ(client.connect(), OK);
+
+ sp<MediaSource> encoder = OMXCodec::Create(
+ client.interface(), enc_meta,
+ true /* createEncoder */, cameraSource);
+ if (encoder == NULL) {
+ return UNKNOWN_ERROR;
+ }
+
+ writer->addSource(encoder);
+ return OK;
+}
+
+status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+ sp<MediaSource> audioEncoder;
+ switch(mAudioEncoder) {
+ case AUDIO_ENCODER_AMR_NB:
+ case AUDIO_ENCODER_AMR_WB:
+ case AUDIO_ENCODER_AAC:
+ audioEncoder = createAudioSource();
+ break;
+ default:
+ LOGE("Unsupported audio encoder: %d", mAudioEncoder);
+ return UNKNOWN_ERROR;
+ }
+
+ if (audioEncoder == NULL) {
+ return UNKNOWN_ERROR;
+ }
+ writer->addSource(audioEncoder);
+ return OK;
+}
+
status_t StagefrightRecorder::startMPEG4Recording() {
- mWriter = new MPEG4Writer(dup(mOutputFd));
int32_t totalBitRate = 0;
+ status_t err = OK;
+ sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
// Add audio source first if it exists
if (mAudioSource != AUDIO_SOURCE_LIST_END) {
- sp<MediaSource> audioEncoder;
- switch(mAudioEncoder) {
- case AUDIO_ENCODER_AMR_NB:
- case AUDIO_ENCODER_AMR_WB:
- case AUDIO_ENCODER_AAC:
- audioEncoder = createAudioSource();
- break;
- default:
- LOGE("Unsupported audio encoder: %d", mAudioEncoder);
- return UNKNOWN_ERROR;
- }
-
- if (audioEncoder == NULL) {
- return UNKNOWN_ERROR;
- }
+ err = setupAudioEncoder(writer);
+ if (err != OK) return err;
totalBitRate += mAudioBitRate;
- mWriter->addSource(audioEncoder);
}
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource == VIDEO_SOURCE_CAMERA) {
-
- clipVideoBitRate();
- clipVideoFrameRate();
- clipVideoFrameWidth();
- clipVideoFrameHeight();
-
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if (mCamera == 0) {
- mCamera = Camera::connect(mCameraId);
- mCamera->lock();
- }
-
- // Set the actual video recording frame size
- CameraParameters params(mCamera->getParameters());
- params.setPreviewSize(mVideoWidth, mVideoHeight);
- params.setPreviewFrameRate(mFrameRate);
- String8 s = params.flatten();
- CHECK_EQ(OK, mCamera->setParameters(s));
- CameraParameters newCameraParams(mCamera->getParameters());
-
- // Check on video frame size
- int frameWidth = 0, frameHeight = 0;
- newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
- if (frameWidth < 0 || frameWidth != mVideoWidth ||
- frameHeight < 0 || frameHeight != mVideoHeight) {
- LOGE("Failed to set the video frame size to %dx%d",
- mVideoWidth, mVideoHeight);
- IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
- }
-
- // Check on video frame rate
- int frameRate = newCameraParams.getPreviewFrameRate();
- if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
- LOGE("Failed to set frame rate to %d fps. The actual "
- "frame rate is %d", mFrameRate, frameRate);
- }
-
- CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
- IPCThreadState::self()->restoreCallingIdentity(token);
-
- sp<CameraSource> cameraSource =
- CameraSource::CreateFromCamera(mCamera);
-
- CHECK(cameraSource != NULL);
-
- sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
- enc_meta->setInt32(kKeySampleRate, mFrameRate);
-
- switch (mVideoEncoder) {
- case VIDEO_ENCODER_H263:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
- break;
-
- case VIDEO_ENCODER_MPEG_4_SP:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
- break;
-
- case VIDEO_ENCODER_H264:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
- break;
-
- default:
- CHECK(!"Should not be here, unsupported video encoding.");
- break;
- }
-
- sp<MetaData> meta = cameraSource->getFormat();
-
- int32_t width, height, stride, sliceHeight;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
- CHECK(meta->findInt32(kKeyStride, &stride));
- CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
-
- enc_meta->setInt32(kKeyWidth, width);
- enc_meta->setInt32(kKeyHeight, height);
- enc_meta->setInt32(kKeyIFramesInterval, mIFramesInterval);
- enc_meta->setInt32(kKeyStride, stride);
- enc_meta->setInt32(kKeySliceHeight, sliceHeight);
-
- OMXClient client;
- CHECK_EQ(client.connect(), OK);
-
- sp<MediaSource> encoder =
- OMXCodec::Create(
- client.interface(), enc_meta,
- true /* createEncoder */, cameraSource);
-
- CHECK(mOutputFd >= 0);
+ err = setupVideoEncoder(writer);
+ if (err != OK) return err;
totalBitRate += mVideoBitRate;
- mWriter->addSource(encoder);
}
- {
- // MPEGWriter specific handling
- MPEG4Writer *writer = ((MPEG4Writer *) mWriter.get());
- writer->setInterleaveDuration(mInterleaveDurationUs);
- }
+ reinterpret_cast<MPEG4Writer *>(writer.get())->
+ setInterleaveDuration(mInterleaveDurationUs);
if (mMaxFileDurationUs != 0) {
- mWriter->setMaxFileDuration(mMaxFileDurationUs);
+ writer->setMaxFileDuration(mMaxFileDurationUs);
}
if (mMaxFileSizeBytes != 0) {
- mWriter->setMaxFileSize(mMaxFileSizeBytes);
+ writer->setMaxFileSize(mMaxFileSizeBytes);
}
- mWriter->setListener(mListener);
sp<MetaData> meta = new MetaData;
meta->setInt64(kKeyTime, systemTime() / 1000);
meta->setInt32(kKeyFileType, mOutputFormat);
@@ -830,11 +958,13 @@
if (mTrackEveryTimeDurationUs > 0) {
meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
}
- mWriter->start(meta.get());
- return OK;
+ writer->setListener(mListener);
+ mWriter = writer;
+ return mWriter->start(meta.get());
}
status_t StagefrightRecorder::pause() {
+ LOGV("pause");
if (mWriter == NULL) {
return UNKNOWN_ERROR;
}
@@ -843,34 +973,37 @@
}
status_t StagefrightRecorder::stop() {
- if (mWriter == NULL) {
- return UNKNOWN_ERROR;
+ LOGV("stop");
+ if (mWriter != NULL) {
+ mWriter->stop();
+ mWriter.clear();
}
- mWriter->stop();
- mWriter = NULL;
-
- return OK;
-}
-
-status_t StagefrightRecorder::close() {
- stop();
-
if (mCamera != 0) {
+ LOGV("Disconnect camera");
int64_t token = IPCThreadState::self()->clearCallingIdentity();
if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
LOGV("Camera was cold when we started, stopping preview");
mCamera->stopPreview();
}
mCamera->unlock();
- mCamera = NULL;
+ mCamera.clear();
IPCThreadState::self()->restoreCallingIdentity(token);
mFlags = 0;
}
+
+ return OK;
+}
+
+status_t StagefrightRecorder::close() {
+ LOGV("close");
+ stop();
+
return OK;
}
status_t StagefrightRecorder::reset() {
+ LOGV("reset");
stop();
// No audio or video source by default
@@ -893,6 +1026,10 @@
mAudioSourceNode = 0;
mUse64BitFileOffset = false;
mCameraId = 0;
+ mVideoEncoderProfile = -1;
+ mVideoEncoderLevel = -1;
+ mMaxFileDurationUs = 0;
+ mMaxFileSizeBytes = 0;
mTrackEveryNumberOfFrames = 0;
mTrackEveryTimeDurationUs = 0;
mEncoderProfiles = MediaProfiles::getInstance();
@@ -904,6 +1041,13 @@
}
status_t StagefrightRecorder::getMaxAmplitude(int *max) {
+ LOGV("getMaxAmplitude");
+
+ if (max == NULL) {
+ LOGE("Null pointer argument");
+ return BAD_VALUE;
+ }
+
if (mAudioSourceNode != 0) {
*max = mAudioSourceNode->getMaxAmplitude();
} else {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index f4488b6..85d2557 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -82,6 +82,8 @@
int32_t mInterleaveDurationUs;
int32_t mIFramesInterval;
int32_t mCameraId;
+ int32_t mVideoEncoderProfile;
+ int32_t mVideoEncoderLevel;
int64_t mMaxFileSizeBytes;
int64_t mMaxFileDurationUs;
int32_t mTrackEveryNumberOfFrames;
@@ -97,18 +99,26 @@
status_t startAMRRecording();
status_t startAACRecording();
sp<MediaSource> createAudioSource();
+ status_t setupCameraSource();
+ status_t setupAudioEncoder(const sp<MediaWriter>& writer);
+ status_t setupVideoEncoder(const sp<MediaWriter>& writer);
+
+ // Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
status_t setParamAudioEncodingBitRate(int32_t bitRate);
status_t setParamAudioNumberOfChannels(int32_t channles);
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t interval);
+ status_t setParamVideoEncoderProfile(int32_t profile);
+ status_t setParamVideoEncoderLevel(int32_t level);
status_t setParamVideoCameraId(int32_t cameraId);
status_t setParamTrackTimeStatus(int64_t timeDurationUs);
status_t setParamTrackFrameStatus(int32_t nFrames);
status_t setParamInterleaveDuration(int32_t durationUs);
status_t setParam64BitFileOffset(bool use64BitFileOffset);
- status_t setParamMaxDurationOrFileSize(int64_t limit, bool limit_is_duration);
+ status_t setParamMaxFileDurationUs(int64_t timeUs);
+ status_t setParamMaxFileSizeBytes(int64_t bytes);
void clipVideoBitRate();
void clipVideoFrameRate();
void clipVideoFrameWidth();
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 4a1580f..ffed74f 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -371,9 +371,6 @@
}
mAudioSource.clear();
- if (mTimeSource != mAudioPlayer) {
- delete mTimeSource;
- }
mTimeSource = NULL;
delete mAudioPlayer;
@@ -494,22 +491,35 @@
}
mStreamDoneEventPending = false;
- if (mStreamDoneStatus == ERROR_END_OF_STREAM && (mFlags & LOOPING)) {
+ if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
+ LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
+
+ notifyListener_l(
+ MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
+
+ pause_l();
+
+ mFlags |= AT_EOS;
+ return;
+ }
+
+ const bool allDone =
+ (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
+ && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
+
+ if (!allDone) {
+ return;
+ }
+
+ if (mFlags & LOOPING) {
seekTo_l(0);
if (mVideoSource != NULL) {
postVideoEvent_l();
}
} else {
- if (mStreamDoneStatus == ERROR_END_OF_STREAM) {
- LOGV("MEDIA_PLAYBACK_COMPLETE");
- notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
- } else {
- LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
-
- notifyListener_l(
- MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
- }
+ LOGV("MEDIA_PLAYBACK_COMPLETE");
+ notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
pause_l();
@@ -563,7 +573,6 @@
return err;
}
- delete mTimeSource;
mTimeSource = mAudioPlayer;
deferredAudioSeek = true;
@@ -579,7 +588,7 @@
}
if (mTimeSource == NULL && mAudioPlayer == NULL) {
- mTimeSource = new SystemTimeSource;
+ mTimeSource = &mSystemTimeSource;
}
if (mVideoSource != NULL) {
@@ -744,7 +753,7 @@
mSeeking = true;
mSeekNotificationSent = false;
mSeekTimeUs = timeUs;
- mFlags &= ~AT_EOS;
+ mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
seekAudioIfNecessary_l();
@@ -924,6 +933,7 @@
continue;
}
+ mFlags |= VIDEO_AT_EOS;
postStreamDoneEvent_l(err);
return;
}
@@ -968,19 +978,21 @@
mSeekNotificationSent = false;
}
+ TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
+
if (mFlags & FIRST_FRAME) {
mFlags &= ~FIRST_FRAME;
- mTimeSourceDeltaUs = mTimeSource->getRealTimeUs() - timeUs;
+ mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
}
int64_t realTimeUs, mediaTimeUs;
- if (mAudioPlayer != NULL
+ if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
}
- int64_t nowUs = mTimeSource->getRealTimeUs() - mTimeSourceDeltaUs;
+ int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
int64_t latenessUs = nowUs - timeUs;
@@ -1081,6 +1093,8 @@
status_t finalStatus;
if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) {
mWatchForAudioEOS = false;
+ mFlags |= AUDIO_AT_EOS;
+ mFlags |= FIRST_FRAME;
postStreamDoneEvent_l(finalStatus);
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a52c888..6a4a131 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -34,6 +34,8 @@
#include <media/mediarecorder.h>
#include <cutils/properties.h>
+#include "include/ESDS.h"
+
namespace android {
class MPEG4Writer::Track {
@@ -126,6 +128,8 @@
int32_t *min, int32_t *avg, int32_t *max);
void findMinMaxChunkDurations(int64_t *min, int64_t *max);
+ void getCodecSpecificDataFromInputFormatIfPossible();
+
Track(const Track &);
Track &operator=(const Track &);
};
@@ -678,6 +682,38 @@
mCodecSpecificDataSize(0),
mGotAllCodecSpecificData(false),
mReachedEOS(false) {
+ getCodecSpecificDataFromInputFormatIfPossible();
+}
+
+void MPEG4Writer::Track::getCodecSpecificDataFromInputFormatIfPossible() {
+ const char *mime;
+ CHECK(mMeta->findCString(kKeyMIMEType, &mime));
+
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (mMeta->findData(kKeyAVCC, &type, &data, &size)) {
+ mCodecSpecificData = malloc(size);
+ mCodecSpecificDataSize = size;
+ memcpy(mCodecSpecificData, data, size);
+ mGotAllCodecSpecificData = true;
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
+ || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
+ ESDS esds(data, size);
+ if (esds.getCodecSpecificInfo(&data, &size) == OK) {
+ mCodecSpecificData = malloc(size);
+ mCodecSpecificDataSize = size;
+ memcpy(mCodecSpecificData, data, size);
+ mGotAllCodecSpecificData = true;
+ }
+ }
+ }
}
MPEG4Writer::Track::~Track() {
@@ -721,7 +757,10 @@
}
int64_t startTimeUs;
- CHECK(params && params->findInt64(kKeyTime, &startTimeUs));
+ if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
+ startTimeUs = 0;
+ }
+
initTrackingProgressStatus(params);
sp<MetaData> meta = new MetaData;
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 02a073e..83f7040 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -149,13 +149,16 @@
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
@@ -171,16 +174,19 @@
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
- { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
+// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
+// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
+// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
+// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
};
#undef OPTIONAL
@@ -334,10 +340,17 @@
quirks |= kRequiresAllocateBufferOnInputPorts;
quirks |= kRequiresAllocateBufferOnOutputPorts;
}
+ if (!strncmp(componentName, "OMX.qcom.7x30.video.encoder.", 28)) {
+ }
if (!strncmp(componentName, "OMX.qcom.video.decoder.", 23)) {
quirks |= kRequiresAllocateBufferOnOutputPorts;
quirks |= kDefersOutputBufferAllocation;
}
+ if (!strncmp(componentName, "OMX.qcom.7x30.video.decoder.", 28)) {
+ quirks |= kRequiresAllocateBufferOnInputPorts;
+ quirks |= kRequiresAllocateBufferOnOutputPorts;
+ quirks |= kDefersOutputBufferAllocation;
+ }
if (!strncmp(componentName, "OMX.TI.", 7)) {
// Apparently I must not use OMX_UseBuffer on either input or
@@ -818,7 +831,7 @@
video_def->nFrameWidth = width;
video_def->nFrameHeight = height;
- video_def->xFramerate = (frameRate << 16); // Q16 format
+ video_def->xFramerate = 0; // No need for output port
video_def->nBitrate = bitRate; // Q16 format
video_def->eCompressionFormat = compressionFormat;
video_def->eColorFormat = OMX_COLOR_FormatUnused;
@@ -836,6 +849,7 @@
}
case OMX_VIDEO_CodingH263:
+ CHECK_EQ(setupH263EncoderParameters(meta), OK);
break;
case OMX_VIDEO_CodingAVC:
@@ -861,6 +875,142 @@
return ret;
}
+status_t OMXCodec::setupErrorCorrectionParameters() {
+ OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
+ InitOMXParams(&errorCorrectionType);
+ errorCorrectionType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+ CHECK_EQ(err, OK);
+
+ errorCorrectionType.bEnableHEC = OMX_FALSE;
+ errorCorrectionType.bEnableResync = OMX_TRUE;
+ errorCorrectionType.nResynchMarkerSpacing = 256;
+ errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
+ errorCorrectionType.bEnableRVLC = OMX_FALSE;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+ CHECK_EQ(err, OK);
+ return OK;
+}
+
+status_t OMXCodec::setupBitRate(int32_t bitRate) {
+ OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
+ InitOMXParams(&bitrateType);
+ bitrateType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+ CHECK_EQ(err, OK);
+
+ bitrateType.eControlRate = OMX_Video_ControlRateVariable;
+ bitrateType.nTargetBitrate = bitRate;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+ CHECK_EQ(err, OK);
+ return OK;
+}
+
+status_t OMXCodec::getVideoProfileLevel(
+ const sp<MetaData>& meta,
+ const CodecProfileLevel& defaultProfileLevel,
+ CodecProfileLevel &profileLevel) {
+ CODEC_LOGV("Default profile: %ld, level %ld",
+ defaultProfileLevel.mProfile, defaultProfileLevel.mLevel);
+
+ // Are the default profile and level overwriten?
+ int32_t profile, level;
+ if (!meta->findInt32(kKeyVideoProfile, &profile)) {
+ profile = defaultProfileLevel.mProfile;
+ }
+ if (!meta->findInt32(kKeyVideoLevel, &level)) {
+ level = defaultProfileLevel.mLevel;
+ }
+ CODEC_LOGV("Target profile: %d, level: %d", profile, level);
+
+ // Are the target profile and level supported by the encoder?
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
+ InitOMXParams(¶m);
+ param.nPortIndex = kPortIndexOutput;
+ for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoProfileLevelQuerySupported,
+ ¶m, sizeof(param));
+
+ if (err != OK) return err;
+
+ int32_t supportedProfile = static_cast<int32_t>(param.eProfile);
+ int32_t supportedLevel = static_cast<int32_t>(param.eLevel);
+ CODEC_LOGV("Supported profile: %ld, level %ld",
+ supportedProfile, supportedLevel);
+
+ if (profile == supportedProfile &&
+ level == supportedLevel) {
+ profileLevel.mProfile = profile;
+ profileLevel.mLevel = level;
+ return OK;
+ }
+ }
+
+ CODEC_LOGE("Target profile (%d) and level (%d) is not supported",
+ profile, level);
+ return BAD_VALUE;
+}
+
+status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
+ int32_t iFramesInterval, frameRate, bitRate;
+ bool success = meta->findInt32(kKeyBitRate, &bitRate);
+ success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
+ CHECK(success);
+ OMX_VIDEO_PARAM_H263TYPE h263type;
+ InitOMXParams(&h263type);
+ h263type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+ CHECK_EQ(err, OK);
+
+ h263type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ h263type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
+ if (h263type.nPFrames == 0) {
+ h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h263type.nBFrames = 0;
+
+ // Check profile and level parameters
+ CodecProfileLevel defaultProfileLevel, profileLevel;
+ defaultProfileLevel.mProfile = OMX_VIDEO_H263ProfileBaseline;
+ defaultProfileLevel.mLevel = OMX_VIDEO_H263Level45;
+ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
+ if (err != OK) return err;
+ h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profileLevel.mProfile);
+ h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(profileLevel.mLevel);
+
+ h263type.bPLUSPTYPEAllowed = OMX_FALSE;
+ h263type.bForceRoundingTypeToZero = OMX_FALSE;
+ h263type.nPictureHeaderRepetition = 0;
+ h263type.nGOBHeaderInterval = 0;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+ CHECK_EQ(err, OK);
+
+ CHECK_EQ(setupBitRate(bitRate), OK);
+ CHECK_EQ(setupErrorCorrectionParameters(), OK);
+
+ return OK;
+}
+
status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
@@ -894,53 +1044,21 @@
mpeg4type.nHeaderExtension = 0;
mpeg4type.bReversibleVLC = OMX_FALSE;
- mpeg4type.eProfile = OMX_VIDEO_MPEG4ProfileCore;
- mpeg4type.eLevel = OMX_VIDEO_MPEG4Level2;
+ // Check profile and level parameters
+ CodecProfileLevel defaultProfileLevel, profileLevel;
+ defaultProfileLevel.mProfile = OMX_VIDEO_MPEG4ProfileSimple;
+ defaultProfileLevel.mLevel = OMX_VIDEO_MPEG4Level2;
+ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
+ if (err != OK) return err;
+ mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profileLevel.mProfile);
+ mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(profileLevel.mLevel);
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
CHECK_EQ(err, OK);
- // ----------------
-
- OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
- InitOMXParams(&bitrateType);
- bitrateType.nPortIndex = kPortIndexOutput;
-
- err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoBitrate,
- &bitrateType, sizeof(bitrateType));
- CHECK_EQ(err, OK);
-
- bitrateType.eControlRate = OMX_Video_ControlRateVariable;
- bitrateType.nTargetBitrate = bitRate;
-
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoBitrate,
- &bitrateType, sizeof(bitrateType));
- CHECK_EQ(err, OK);
-
- // ----------------
-
- OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
- InitOMXParams(&errorCorrectionType);
- errorCorrectionType.nPortIndex = kPortIndexOutput;
-
- err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoErrorCorrection,
- &errorCorrectionType, sizeof(errorCorrectionType));
- CHECK_EQ(err, OK);
-
- errorCorrectionType.bEnableHEC = OMX_FALSE;
- errorCorrectionType.bEnableResync = OMX_TRUE;
- errorCorrectionType.nResynchMarkerSpacing = 256;
- errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
- errorCorrectionType.bEnableRVLC = OMX_FALSE;
-
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoErrorCorrection,
- &errorCorrectionType, sizeof(errorCorrectionType));
- CHECK_EQ(err, OK);
+ CHECK_EQ(setupBitRate(bitRate), OK);
+ CHECK_EQ(setupErrorCorrectionParameters(), OK);
return OK;
}
@@ -969,44 +1087,46 @@
if (h264type.nPFrames == 0) {
h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
}
- h264type.bUseHadamard = OMX_TRUE;
- h264type.nRefFrames = 1;
- h264type.nRefIdx10ActiveMinus1 = 0;
- h264type.nRefIdx11ActiveMinus1 = 0;
+
+ // Check profile and level parameters
+ CodecProfileLevel defaultProfileLevel, profileLevel;
+ defaultProfileLevel.mProfile = h264type.eProfile;
+ defaultProfileLevel.mLevel = h264type.eLevel;
+ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
+ if (err != OK) return err;
+ h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profileLevel.mProfile);
+ h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(profileLevel.mLevel);
+
+ if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
+ h264type.bUseHadamard = OMX_TRUE;
+ h264type.nRefFrames = 1;
+ h264type.nRefIdx10ActiveMinus1 = 0;
+ h264type.nRefIdx11ActiveMinus1 = 0;
+ h264type.bEntropyCodingCABAC = OMX_FALSE;
+ h264type.bWeightedPPrediction = OMX_FALSE;
+ h264type.bconstIpred = OMX_FALSE;
+ h264type.bDirect8x8Inference = OMX_FALSE;
+ h264type.bDirectSpatialTemporal = OMX_FALSE;
+ h264type.nCabacInitIdc = 0;
+ }
+
+ if (h264type.nBFrames != 0) {
+ h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
+ }
+
h264type.bEnableUEP = OMX_FALSE;
h264type.bEnableFMO = OMX_FALSE;
h264type.bEnableASO = OMX_FALSE;
h264type.bEnableRS = OMX_FALSE;
h264type.bFrameMBsOnly = OMX_TRUE;
h264type.bMBAFF = OMX_FALSE;
- h264type.bEntropyCodingCABAC = OMX_FALSE;
- h264type.bWeightedPPrediction = OMX_FALSE;
- h264type.bconstIpred = OMX_FALSE;
- h264type.bDirect8x8Inference = OMX_FALSE;
- h264type.bDirectSpatialTemporal = OMX_FALSE;
- h264type.nCabacInitIdc = 0;
h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
CHECK_EQ(err, OK);
- OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
- InitOMXParams(&bitrateType);
- bitrateType.nPortIndex = kPortIndexOutput;
-
- err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoBitrate,
- &bitrateType, sizeof(bitrateType));
- CHECK_EQ(err, OK);
-
- bitrateType.eControlRate = OMX_Video_ControlRateVariable;
- bitrateType.nTargetBitrate = bitRate;
-
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoBitrate,
- &bitrateType, sizeof(bitrateType));
- CHECK_EQ(err, OK);
+ CHECK_EQ(setupBitRate(bitRate), OK);
return OK;
}
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index b914023..2317de6 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -132,7 +132,10 @@
}
status_t AACEncoder::start(MetaData *params) {
- CHECK(!mStarted);
+ if (mStarted) {
+ LOGW("Call start() when encoder already started");
+ return OK;
+ }
mBufferGroup = new MediaBufferGroup;
mBufferGroup->add_buffer(new MediaBuffer(2048));
@@ -150,7 +153,10 @@
}
status_t AACEncoder::stop() {
- CHECK(mStarted);
+ if (!mStarted) {
+ LOGW("Call stop() when encoder has not started");
+ return OK;
+ }
if (mInputBuffer) {
mInputBuffer->release();
diff --git a/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
index 445438f..4c02fe9 100644
--- a/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
@@ -70,7 +70,10 @@
}
status_t AMRNBEncoder::start(MetaData *params) {
- CHECK(!mStarted);
+ if (mStarted) {
+ LOGW("Call start() when encoder already started");
+ return OK;
+ }
mBufferGroup = new MediaBufferGroup;
mBufferGroup->add_buffer(new MediaBuffer(32));
@@ -97,7 +100,10 @@
}
status_t AMRNBEncoder::stop() {
- CHECK(mStarted);
+ if (!mStarted) {
+ LOGW("Call stop() when encoder has not started.");
+ return OK;
+ }
if (mInputBuffer) {
mInputBuffer->release();
diff --git a/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp b/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
index b70cff1..4257c6a 100644
--- a/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
+++ b/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
@@ -124,7 +124,10 @@
}
status_t AMRWBEncoder::start(MetaData *params) {
- CHECK(!mStarted);
+ if (mStarted) {
+ LOGW("Call start() when encoder already started");
+ return OK;
+ }
mBufferGroup = new MediaBufferGroup;
@@ -142,8 +145,10 @@
}
status_t AMRWBEncoder::stop() {
- CHECK(mStarted);
-
+ if (!mStarted) {
+ LOGW("Call stop() when encoder has not started");
+ return OK;
+ }
if (mInputBuffer) {
mInputBuffer->release();
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index e74782f..5b16997 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -42,6 +42,7 @@
case OMX_COLOR_FormatYUV420Planar:
case OMX_COLOR_FormatCbYCrY:
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
return true;
default:
@@ -71,6 +72,11 @@
width, height, srcBits, srcSkip, dstBits, dstSkip);
break;
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ convertYUV420SemiPlanar(
+ width, height, srcBits, srcSkip, dstBits, dstSkip);
+ break;
+
default:
{
CHECK(!"Should not be here. Unknown color conversion.");
@@ -279,6 +285,68 @@
}
}
+void ColorConverter::convertYUV420SemiPlanar(
+ size_t width, size_t height,
+ const void *srcBits, size_t srcSkip,
+ void *dstBits, size_t dstSkip) {
+ CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
+ CHECK(dstSkip >= width * 2);
+ CHECK((dstSkip & 3) == 0);
+
+ uint8_t *kAdjustedClip = initClip();
+
+ uint32_t *dst_ptr = (uint32_t *)dstBits;
+ const uint8_t *src_y = (const uint8_t *)srcBits;
+
+ const uint8_t *src_u =
+ (const uint8_t *)src_y + width * height;
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; x += 2) {
+ signed y1 = (signed)src_y[x] - 16;
+ signed y2 = (signed)src_y[x + 1] - 16;
+
+ signed v = (signed)src_u[x & ~1] - 128;
+ signed u = (signed)src_u[(x & ~1) + 1] - 128;
+
+ signed u_b = u * 517;
+ signed u_g = -u * 100;
+ signed v_g = -v * 208;
+ signed v_r = v * 409;
+
+ signed tmp1 = y1 * 298;
+ signed b1 = (tmp1 + u_b) / 256;
+ signed g1 = (tmp1 + v_g + u_g) / 256;
+ signed r1 = (tmp1 + v_r) / 256;
+
+ signed tmp2 = y2 * 298;
+ signed b2 = (tmp2 + u_b) / 256;
+ signed g2 = (tmp2 + v_g + u_g) / 256;
+ signed r2 = (tmp2 + v_r) / 256;
+
+ uint32_t rgb1 =
+ ((kAdjustedClip[b1] >> 3) << 11)
+ | ((kAdjustedClip[g1] >> 2) << 5)
+ | (kAdjustedClip[r1] >> 3);
+
+ uint32_t rgb2 =
+ ((kAdjustedClip[b2] >> 3) << 11)
+ | ((kAdjustedClip[g2] >> 2) << 5)
+ | (kAdjustedClip[r2] >> 3);
+
+ dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
+ }
+
+ src_y += width;
+
+ if (y & 1) {
+ src_u += width;
+ }
+
+ dst_ptr += dstSkip / 4;
+ }
+}
+
uint8_t *ColorConverter::initClip() {
static const signed kClipMin = -278;
static const signed kClipMax = 535;
diff --git a/media/libstagefright/foundation/AHandler.cpp b/media/libstagefright/foundation/AHandler.cpp
new file mode 100644
index 0000000..bd5f7e9
--- /dev/null
+++ b/media/libstagefright/foundation/AHandler.cpp
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AHandler"
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/AHandler.h>
+
+#include <media/stagefright/foundation/ALooperRoster.h>
+
+namespace android {
+
+sp<ALooper> AHandler::looper() {
+ extern ALooperRoster gLooperRoster;
+
+ return gLooperRoster.findLooper(id());
+}
+
+} // namespace android
diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp
index 831fa2a..cd4f349 100644
--- a/media/libstagefright/foundation/ALooper.cpp
+++ b/media/libstagefright/foundation/ALooper.cpp
@@ -31,8 +31,9 @@
ALooperRoster gLooperRoster;
struct ALooper::LooperThread : public Thread {
- LooperThread(ALooper *looper)
- : mLooper(looper) {
+ LooperThread(ALooper *looper, bool canCallJava)
+ : Thread(canCallJava),
+ mLooper(looper) {
}
virtual bool threadLoop() {
@@ -72,7 +73,7 @@
gLooperRoster.unregisterHandler(handlerID);
}
-status_t ALooper::start(bool runOnCallingThread) {
+status_t ALooper::start(bool runOnCallingThread, bool canCallJava) {
if (runOnCallingThread) {
{
Mutex::Autolock autoLock(mLock);
@@ -96,7 +97,7 @@
return INVALID_OPERATION;
}
- mThread = new LooperThread(this);
+ mThread = new LooperThread(this, canCallJava);
status_t err = mThread->run("ALooper");
if (err != OK) {
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 5bb1cf9..65f7593 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -54,10 +54,15 @@
Mutex::Autolock autoLock(mLock);
ssize_t index = mHandlers.indexOfKey(handlerID);
- CHECK(index >= 0);
+ CHECK_GE(index, 0);
const HandlerInfo &info = mHandlers.valueAt(index);
- info.mHandler->setID(0);
+
+ sp<AHandler> handler = info.mHandler.promote();
+
+ if (handler != NULL) {
+ handler->setID(0);
+ }
mHandlers.removeItemsAt(index);
}
@@ -74,7 +79,18 @@
}
const HandlerInfo &info = mHandlers.valueAt(index);
- info.mLooper->post(msg, delayUs);
+
+ sp<ALooper> looper = info.mLooper.promote();
+
+ if (looper == NULL) {
+ LOG(WARNING) << "failed to post message. "
+ "Target handler still registered, but object gone.";
+
+ mHandlers.removeItemsAt(index);
+ return;
+ }
+
+ looper->post(msg, delayUs);
}
void ALooperRoster::deliverMessage(const sp<AMessage> &msg) {
@@ -86,15 +102,43 @@
ssize_t index = mHandlers.indexOfKey(msg->target());
if (index < 0) {
- LOG(WARNING) << "failed to deliver message. Target handler not registered.";
+ LOG(WARNING) << "failed to deliver message. "
+ << "Target handler not registered.";
return;
}
const HandlerInfo &info = mHandlers.valueAt(index);
- handler = info.mHandler;
+ handler = info.mHandler.promote();
+
+ if (handler == NULL) {
+ LOG(WARNING) << "failed to deliver message. "
+ "Target handler registered, but object gone.";
+
+ mHandlers.removeItemsAt(index);
+ return;
+ }
}
handler->onMessageReceived(msg);
}
+sp<ALooper> ALooperRoster::findLooper(ALooper::handler_id handlerID) {
+ Mutex::Autolock autoLock(mLock);
+
+ ssize_t index = mHandlers.indexOfKey(handlerID);
+
+ if (index < 0) {
+ return NULL;
+ }
+
+ sp<ALooper> looper = mHandlers.valueAt(index).mLooper.promote();
+
+ if (looper == NULL) {
+ mHandlers.removeItemsAt(index);
+ return NULL;
+ }
+
+ return looper;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index dfd1ae3..26c6d42 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -16,6 +16,8 @@
#include "AMessage.h"
+#include <ctype.h>
+
#include "AAtomizer.h"
#include "ADebug.h"
#include "ALooperRoster.h"
@@ -238,4 +240,105 @@
return msg;
}
+static void appendIndent(AString *s, int32_t indent) {
+ static const char kWhitespace[] =
+ " "
+ " ";
+
+ CHECK_LT((size_t)indent, sizeof(kWhitespace));
+
+ s->append(kWhitespace, indent);
+}
+
+static bool isFourcc(uint32_t what) {
+ return isprint(what & 0xff)
+ && isprint((what >> 8) & 0xff)
+ && isprint((what >> 16) & 0xff)
+ && isprint((what >> 24) & 0xff);
+}
+
+AString AMessage::debugString(int32_t indent) const {
+ AString s = "AMessage(what = ";
+
+ AString tmp;
+ if (isFourcc(mWhat)) {
+ tmp = StringPrintf(
+ "'%c%c%c%c'",
+ (char)(mWhat >> 24),
+ (char)((mWhat >> 16) & 0xff),
+ (char)((mWhat >> 8) & 0xff),
+ (char)(mWhat & 0xff));
+ } else {
+ tmp = StringPrintf("0x%08x", mWhat);
+ }
+ s.append(tmp);
+
+ if (mTarget != 0) {
+ tmp = StringPrintf(", target = %d", mTarget);
+ s.append(tmp);
+ }
+ s.append(") = {\n");
+
+ for (size_t i = 0; i < mNumItems; ++i) {
+ const Item &item = mItems[i];
+
+ switch (item.mType) {
+ case kTypeInt32:
+ tmp = StringPrintf(
+ "int32_t %s = %d", item.mName, item.u.int32Value);
+ break;
+ case kTypeInt64:
+ tmp = StringPrintf(
+ "int64_t %s = %lld", item.mName, item.u.int64Value);
+ break;
+ case kTypeSize:
+ tmp = StringPrintf(
+ "size_t %s = %d", item.mName, item.u.sizeValue);
+ break;
+ case kTypeFloat:
+ tmp = StringPrintf(
+ "float %s = %f", item.mName, item.u.floatValue);
+ break;
+ case kTypeDouble:
+ tmp = StringPrintf(
+ "double %s = %f", item.mName, item.u.doubleValue);
+ break;
+ case kTypePointer:
+ tmp = StringPrintf(
+ "void *%s = %p", item.mName, item.u.ptrValue);
+ break;
+ case kTypeString:
+ tmp = StringPrintf(
+ "string %s = \"%s\"",
+ item.mName,
+ item.u.stringValue->c_str());
+ break;
+ case kTypeObject:
+ tmp = StringPrintf(
+ "RefBase *%s = %p", item.mName, item.u.refValue);
+ break;
+ case kTypeMessage:
+ tmp = StringPrintf(
+ "AMessage %s = %s",
+ item.mName,
+ static_cast<AMessage *>(
+ item.u.refValue)->debugString(
+ indent + strlen(item.mName) + 14).c_str());
+ break;
+ default:
+ TRESPASS();
+ }
+
+ appendIndent(&s, indent);
+ s.append(" ");
+ s.append(tmp);
+ s.append("\n");
+ }
+
+ appendIndent(&s, indent);
+ s.append("}");
+
+ return s;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index 73047e7..35eea7e 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -5,6 +5,7 @@
AAtomizer.cpp \
ABuffer.cpp \
ADebug.cpp \
+ AHandler.cpp \
ALooper.cpp \
ALooperRoster.cpp \
AMessage.cpp \
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 2a9f21b..8d0877c 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -24,6 +24,7 @@
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/TimeSource.h>
#include <utils/threads.h>
namespace android {
@@ -33,7 +34,6 @@
struct MediaBuffer;
struct MediaExtractor;
struct MediaSource;
-struct TimeSource;
struct NuCachedSource2;
struct ALooper;
@@ -102,6 +102,8 @@
AT_EOS = 32,
PREPARE_CANCELLED = 64,
CACHE_UNDERRUN = 128,
+ AUDIO_AT_EOS = 256,
+ VIDEO_AT_EOS = 512,
};
mutable Mutex mLock;
@@ -115,6 +117,7 @@
sp<ISurface> mISurface;
sp<MediaPlayerBase::AudioSink> mAudioSink;
+ SystemTimeSource mSystemTimeSource;
TimeSource *mTimeSource;
String8 mUri;
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index ad5b0f9..6de761f 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -164,6 +164,10 @@
instance = mLiveNodes.editValueAt(index);
mLiveNodes.removeItemsAt(index);
+ index = mDispatchers.indexOfKey(instance->nodeID());
+ CHECK(index >= 0);
+ mDispatchers.removeItemsAt(index);
+
invalidateNodeID_l(instance->nodeID());
}
@@ -240,6 +244,11 @@
ssize_t index = mLiveNodes.indexOfKey(instance->observer()->asBinder());
CHECK(index >= 0);
mLiveNodes.removeItemsAt(index);
+
+ index = mDispatchers.indexOfKey(node);
+ CHECK(index >= 0);
+ mDispatchers.removeItemsAt(index);
+
instance->observer()->asBinder()->unlinkToDeath(this);
return instance->freeNode(mMaster);