Merge "HAVE_ANDROID_OS AUDIOFLINGER_SECURITY_ENABLED dead"
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 214cd4d..c295315 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -68,6 +68,10 @@
const char CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION[] = "max-exposure-compensation";
const char CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION[] = "min-exposure-compensation";
const char CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP[] = "exposure-compensation-step";
+const char CameraParameters::KEY_AUTO_EXPOSURE_LOCK[] = "auto-exposure-lock";
+const char CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED[] = "auto-exposure-lock-supported";
+const char CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK[] = "auto-whitebalance-lock";
+const char CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED[] = "auto-whitebalance-lock-supported";
const char CameraParameters::KEY_MAX_NUM_METERING_AREAS[] = "max-num-metering-areas";
const char CameraParameters::KEY_METERING_AREAS[] = "metering-areas";
const char CameraParameters::KEY_ZOOM[] = "zoom";
@@ -82,6 +86,7 @@
const char CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
const char CameraParameters::TRUE[] = "true";
+const char CameraParameters::FALSE[] = "false";
const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity";
// Values for white balance settings.
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index ceb254f..858681f 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -11,7 +11,7 @@
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
using namespace android;
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index 3c6dccc..7106bfa 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -20,122 +20,28 @@
#include <utils/Timers.h>
#include <camera/ICameraClient.h>
#include <gui/ISurfaceTexture.h>
+#include <system/camera.h>
namespace android {
-/*
- * A set of bit masks for specifying how the received preview frames are
- * handled before the previewCallback() call.
- *
- * The least significant 3 bits of an "int" value are used for this purpose:
- *
- * ..... 0 0 0
- * ^ ^ ^
- * | | |---------> determine whether the callback is enabled or not
- * | |-----------> determine whether the callback is one-shot or not
- * |-------------> determine whether the frame is copied out or not
- *
- * WARNING:
- * When a frame is sent directly without copying, it is the frame receiver's
- * responsiblity to make sure that the frame data won't get corrupted by
- * subsequent preview frames filled by the camera. This flag is recommended
- * only when copying out data brings significant performance price and the
- * handling/processing of the received frame data is always faster than
- * the preview frame rate so that data corruption won't occur.
- *
- * For instance,
- * 1. 0x00 disables the callback. In this case, copy out and one shot bits
- * are ignored.
- * 2. 0x01 enables a callback without copying out the received frames. A
- * typical use case is the Camcorder application to avoid making costly
- * frame copies.
- * 3. 0x05 is enabling a callback with frame copied out repeatedly. A typical
- * use case is the Camera application.
- * 4. 0x07 is enabling a callback with frame copied out only once. A typical use
- * case is the Barcode scanner application.
- */
-#define FRAME_CALLBACK_FLAG_ENABLE_MASK 0x01
-#define FRAME_CALLBACK_FLAG_ONE_SHOT_MASK 0x02
-#define FRAME_CALLBACK_FLAG_COPY_OUT_MASK 0x04
-
-// Typical use cases
-#define FRAME_CALLBACK_FLAG_NOOP 0x00
-#define FRAME_CALLBACK_FLAG_CAMCORDER 0x01
-#define FRAME_CALLBACK_FLAG_CAMERA 0x05
-#define FRAME_CALLBACK_FLAG_BARCODE_SCANNER 0x07
-
-// msgType in notifyCallback and dataCallback functions
-enum {
- CAMERA_MSG_ERROR = 0x0001,
- CAMERA_MSG_SHUTTER = 0x0002,
- CAMERA_MSG_FOCUS = 0x0004,
- CAMERA_MSG_ZOOM = 0x0008,
- CAMERA_MSG_PREVIEW_FRAME = 0x0010,
- CAMERA_MSG_VIDEO_FRAME = 0x0020,
- CAMERA_MSG_POSTVIEW_FRAME = 0x0040,
- CAMERA_MSG_RAW_IMAGE = 0x0080,
- CAMERA_MSG_COMPRESSED_IMAGE = 0x0100,
- CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x0200,
- CAMERA_MSG_ALL_MSGS = 0xFFFF
-};
-
-// cmdType in sendCommand functions
-enum {
- CAMERA_CMD_START_SMOOTH_ZOOM = 1,
- CAMERA_CMD_STOP_SMOOTH_ZOOM = 2,
- // Set the clockwise rotation of preview display (setPreviewDisplay) in
- // degrees. This affects the preview frames and the picture displayed after
- // snapshot. This method is useful for portrait mode applications. Note that
- // preview display of front-facing cameras is flipped horizontally before
- // the rotation, that is, the image is reflected along the central vertical
- // axis of the camera sensor. So the users can see themselves as looking
- // into a mirror.
- //
- // This does not affect the order of byte array of CAMERA_MSG_PREVIEW_FRAME,
- // CAMERA_MSG_VIDEO_FRAME, CAMERA_MSG_POSTVIEW_FRAME, CAMERA_MSG_RAW_IMAGE,
- // or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during
- // preview.
- CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,
-
- // cmdType to disable/enable shutter sound.
- // In sendCommand passing arg1 = 0 will disable,
- // while passing arg1 = 1 will enable the shutter sound.
- CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,
-
- // cmdType to play recording sound.
- CAMERA_CMD_PLAY_RECORDING_SOUND = 5,
-};
-
-// camera fatal errors
-enum {
- CAMERA_ERROR_UNKNOWN = 1,
- CAMERA_ERROR_SERVER_DIED = 100
-};
-
-enum {
- CAMERA_FACING_BACK = 0, /* The facing of the camera is opposite to that of the screen. */
- CAMERA_FACING_FRONT = 1 /* The facing of the camera is the same as that of the screen. */
-};
-
struct CameraInfo {
-
/**
- * The direction that the camera faces to. It should be
- * CAMERA_FACING_BACK or CAMERA_FACING_FRONT.
+ * The direction that the camera faces to. It should be CAMERA_FACING_BACK
+ * or CAMERA_FACING_FRONT.
*/
int facing;
/**
* The orientation of the camera image. The value is the angle that the
- * camera image needs to be rotated clockwise so it shows correctly on
- * the display in its natural orientation. It should be 0, 90, 180, or 270.
+ * camera image needs to be rotated clockwise so it shows correctly on the
+ * display in its natural orientation. It should be 0, 90, 180, or 270.
*
* For example, suppose a device has a naturally tall screen. The
* back-facing camera sensor is mounted in landscape. You are looking at
* the screen. If the top side of the camera sensor is aligned with the
* right edge of the screen in natural orientation, the value should be
- * 90. If the top side of a front-facing camera sensor is aligned with
- * the right of the screen, the value should be 270.
+ * 90. If the top side of a front-facing camera sensor is aligned with the
+ * right of the screen, the value should be 270.
*/
int orientation;
};
diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h
deleted file mode 100644
index 3f34120..0000000
--- a/include/camera/CameraHardwareInterface.h
+++ /dev/null
@@ -1,266 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-
-#include <binder/IMemory.h>
-#include <ui/egl/android_natives.h>
-#include <utils/RefBase.h>
-#include <surfaceflinger/ISurface.h>
-#include <ui/android_native_buffer.h>
-#include <ui/GraphicBuffer.h>
-#include <camera/Camera.h>
-#include <camera/CameraParameters.h>
-
-namespace android {
-
-typedef void (*notify_callback)(int32_t msgType,
- int32_t ext1,
- int32_t ext2,
- void* user);
-
-typedef void (*data_callback)(int32_t msgType,
- const sp<IMemory>& dataPtr,
- void* user);
-
-typedef void (*data_callback_timestamp)(nsecs_t timestamp,
- int32_t msgType,
- const sp<IMemory>& dataPtr,
- void* user);
-
-/**
- * CameraHardwareInterface.h defines the interface to the
- * camera hardware abstraction layer, used for setting and getting
- * parameters, live previewing, and taking pictures.
- *
- * It is a referenced counted interface with RefBase as its base class.
- * CameraService calls openCameraHardware() to retrieve a strong pointer to the
- * instance of this interface and may be called multiple times. The
- * following steps describe a typical sequence:
- *
- * -# After CameraService calls openCameraHardware(), getParameters() and
- * setParameters() are used to initialize the camera instance.
- * CameraService calls getPreviewHeap() to establish access to the
- * preview heap so it can be registered with SurfaceFlinger for
- * efficient display updating while in preview mode.
- * -# startPreview() is called. The camera instance then periodically
- * sends the message CAMERA_MSG_PREVIEW_FRAME (if enabled) each time
- * a new preview frame is available. If data callback code needs to use
- * this memory after returning, it must copy the data.
- *
- * Prior to taking a picture, CameraService calls autofocus(). When auto
- * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification,
- * which informs the application whether focusing was successful. The camera instance
- * only sends this message once and it is up to the application to call autoFocus()
- * again if refocusing is desired.
- *
- * CameraService calls takePicture() to request the camera instance take a
- * picture. At this point, if a shutter, postview, raw, and/or compressed callback
- * is desired, the corresponding message must be enabled. As with CAMERA_MSG_PREVIEW_FRAME,
- * any memory provided in a data callback must be copied if it's needed after returning.
- */
-class CameraHardwareInterface : public virtual RefBase {
-public:
- virtual ~CameraHardwareInterface() { }
-
- /** Set the ANativeWindow to which preview frames are sent */
- virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0;
-
- /** Set the notification and data callbacks */
- virtual void setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- data_callback_timestamp data_cb_timestamp,
- void* user) = 0;
-
- /**
- * The following three functions all take a msgtype,
- * which is a bitmask of the messages defined in
- * include/ui/Camera.h
- */
-
- /**
- * Enable a message, or set of messages.
- */
- virtual void enableMsgType(int32_t msgType) = 0;
-
- /**
- * Disable a message, or a set of messages.
- *
- * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
- * should not rely on its client to call releaseRecordingFrame() to release
- * video recording frames sent out by the cameral hal before and after the
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
- * modify/access any video recording frame after calling
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
- */
- virtual void disableMsgType(int32_t msgType) = 0;
-
- /**
- * Query whether a message, or a set of messages, is enabled.
- * Note that this is operates as an AND, if any of the messages
- * queried are off, this will return false.
- */
- virtual bool msgTypeEnabled(int32_t msgType) = 0;
-
- /**
- * Start preview mode.
- */
- virtual status_t startPreview() = 0;
-
- /**
- * Stop a previously started preview.
- */
- virtual void stopPreview() = 0;
-
- /**
- * Returns true if preview is enabled.
- */
- virtual bool previewEnabled() = 0;
-
- /**
- * Request the camera hal to store meta data or real YUV data in
- * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
- * recording session. If it is not called, the default camera
- * hal behavior is to store real YUV data in the video buffers.
- *
- * This method should be called before startRecording() in order
- * to be effective.
- *
- * If meta data is stored in the video buffers, it is up to the
- * receiver of the video buffers to interpret the contents and
- * to find the actual frame data with the help of the meta data
- * in the buffer. How this is done is outside of the scope of
- * this method.
- *
- * Some camera hal may not support storing meta data in the video
- * buffers, but all camera hal should support storing real YUV data
- * in the video buffers. If the camera hal does not support storing
- * the meta data in the video buffers when it is requested to do
- * do, INVALID_OPERATION must be returned. It is very useful for
- * the camera hal to pass meta data rather than the actual frame
- * data directly to the video encoder, since the amount of the
- * uncompressed frame data can be very large if video size is large.
- *
- * @param enable if true to instruct the camera hal to store
- * meta data in the video buffers; false to instruct
- * the camera hal to store real YUV data in the video
- * buffers.
- *
- * @return OK on success.
- */
- virtual status_t storeMetaDataInBuffers(bool enable) {
- return enable? INVALID_OPERATION: OK;
- }
-
- /**
- * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
- * message is sent with the corresponding frame. Every record frame must be released
- * by a cameral hal client via releaseRecordingFrame() before the client calls
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
- * to manage the life-cycle of the video recording frames, and the client must
- * not modify/access any video recording frames.
- */
- virtual status_t startRecording() = 0;
-
- /**
- * Stop a previously started recording.
- */
- virtual void stopRecording() = 0;
-
- /**
- * Returns true if recording is enabled.
- */
- virtual bool recordingEnabled() = 0;
-
- /**
- * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
- *
- * It is camera hal client's responsibility to release video recording
- * frames sent out by the camera hal before the camera hal receives
- * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
- * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
- * responsibility of managing the life-cycle of the video recording
- * frames.
- */
- virtual void releaseRecordingFrame(const sp<IMemory>& mem) = 0;
-
- /**
- * Start auto focus, the notification callback routine is called
- * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
- * will be called again if another auto focus is needed.
- */
- virtual status_t autoFocus() = 0;
-
- /**
- * Cancels auto-focus function. If the auto-focus is still in progress,
- * this function will cancel it. Whether the auto-focus is in progress
- * or not, this function will return the focus position to the default.
- * If the camera does not support auto-focus, this is a no-op.
- */
- virtual status_t cancelAutoFocus() = 0;
-
- /**
- * Take a picture.
- */
- virtual status_t takePicture() = 0;
-
- /**
- * Cancel a picture that was started with takePicture. Calling this
- * method when no picture is being taken is a no-op.
- */
- virtual status_t cancelPicture() = 0;
-
- /**
- * Set the camera parameters. This returns BAD_VALUE if any parameter is
- * invalid or not supported. */
- virtual status_t setParameters(const CameraParameters& params) = 0;
-
- /** Return the camera parameters. */
- virtual CameraParameters getParameters() const = 0;
-
- /**
- * Send command to camera driver.
- */
- virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
-
- /**
- * Release the hardware resources owned by this object. Note that this is
- * *not* done in the destructor.
- */
- virtual void release() = 0;
-
- /**
- * Dump state of the camera hardware
- */
- virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
-};
-
-/**
- * The functions need to be provided by the camera HAL.
- *
- * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
- * and openCameraHardware() is 0 to N-1.
- */
-extern "C" int HAL_getNumberOfCameras();
-extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo);
-/* HAL should return NULL if it fails to open camera hardware. */
-extern "C" sp<CameraHardwareInterface> HAL_openCameraHardware(int cameraId);
-
-}; // namespace android
-
-#endif
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index db81721..dc5fc84 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -309,6 +309,54 @@
// 0.3333, EV is -2.
// Example value: "0.333333333" or "0.5". Read only.
static const char KEY_EXPOSURE_COMPENSATION_STEP[];
+ // The state of the auto-exposure lock. "true" means that
+ // auto-exposure is locked to its current value and will not
+ // change. "false" means the auto-exposure routine is free to
+ // change exposure values. If auto-exposure is already locked,
+ // setting this to true again has no effect (the driver will not
+ // recalculate exposure values). Changing exposure compensation
+ // settings will still affect the exposure settings while
+ // auto-exposure is locked. Stopping preview or taking a still
+ // image will release the lock. However, the lock can be
+ // re-enabled prior to preview being re-started, to keep the
+ // exposure values from the previous lock. In conjunction with
+ // exposure compensation, this allows for capturing multi-exposure
+ // brackets with known relative exposure values. Locking
+ // auto-exposure after open but before the first call to
+ // startPreview may result in severely over- or under-exposed
+ // images. The driver may independently enable the AE lock after
+ // auto-focus completes. If it does so, this key must have its
+ // value updated to reflect the lock's existence. Applications are
+ // free to release such a lock, to re-enable AE without restarting
+ // preview.
+ static const char KEY_AUTO_EXPOSURE_LOCK[];
+ // Whether locking the auto-exposure is supported. "true" means it is, and
+ // "false" or this key not existing means it is not supported.
+ static const char KEY_AUTO_EXPOSURE_LOCK_SUPPORTED[];
+ // The state of the auto-white balance lock. "true" means that
+ // auto-white balance is locked to its current value and will not
+ // change. "false" means the auto-white balance routine is free to
+ // change white balance values. If auto-white balance is already
+ // locked, setting this to true again has no effect (the driver
+ // will not recalculate white balance values). Stopping preview or
+ // taking a still image will release the lock. However, the lock
+ // can be re-enabled prior to preview being re-started, to keep
+ // the white balance values from the previous lock. In conjunction
+ // with exposure compensation, this allows for capturing
+ // multi-exposure brackets with fixed white balance. Locking
+ // auto-white balance after open but before the first call to
+ // startPreview may result in severely incorrect color. The
+ // driver may independently enable the AWB lock after auto-focus
+ // completes. If it does so, this key must have its value updated
+ // to reflect the lock's existence. Applications are free to
+ // release such a lock, to re-enable AWB without restarting
+ // preview.
+ static const char KEY_AUTO_WHITEBALANCE_LOCK[];
+ // Whether locking the auto-white balance is supported. "true"
+ // means it is, and "false" or this key not existing means it is
+ // not supported.
+ static const char KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED[];
+
// The maximum number of metering areas supported. This is the maximum
// length of KEY_METERING_AREAS.
// Example value: "0" or "2". Read only.
@@ -428,6 +476,7 @@
// Value for KEY_ZOOM_SUPPORTED or KEY_SMOOTH_ZOOM_SUPPORTED.
static const char TRUE[];
+ static const char FALSE[];
// Value for KEY_FOCUS_DISTANCES.
static const char FOCUS_DISTANCE_INFINITY[];
diff --git a/include/drm/drm_framework_common.h b/include/drm/drm_framework_common.h
index 3330ebc..2632cbd 100644
--- a/include/drm/drm_framework_common.h
+++ b/include/drm/drm_framework_common.h
@@ -42,6 +42,7 @@
DRM_ERROR_DECRYPT_UNIT_NOT_INITIALIZED = ERROR_BASE - 4,
DRM_ERROR_DECRYPT = ERROR_BASE - 5,
DRM_ERROR_CANNOT_HANDLE = ERROR_BASE - 6,
+ DRM_ERROR_TAMPER_DETECTED = ERROR_BASE - 7,
DRM_NO_ERROR = NO_ERROR
};
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index def3612..baab2e8 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -30,7 +30,7 @@
#include <binder/IMemory.h>
#include <utils/threads.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index eb61a87..68cd188 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -21,7 +21,7 @@
#include <utils/threads.h>
#include <media/IAudioFlinger.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <hardware/audio_policy.h>
/* XXX: Should be include by all the users instead */
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index f2107ec..ed26e63 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -150,6 +150,12 @@
*/
Vector<int> getImageEncodingQualityLevels(int cameraId) const;
+ /**
+ * Returns the start time offset (in ms) for the given camera Id.
+ * If the given camera Id does not exist, -1 will be returned.
+ */
+ int getStartTimeOffsetMs(int cameraId) const;
+
private:
enum {
// Camcorder profiles (high/low) and timelapse profiles (high/low)
@@ -332,6 +338,8 @@
static int getCameraId(const char **atts);
+ void addStartTimeOffset(int cameraId, const char **atts);
+
ImageEncodingQualityLevels* findImageEncodingQualityLevels(int cameraId) const;
void addImageEncodingQualityLevel(int cameraId, const char** atts);
@@ -408,6 +416,7 @@
Vector<VideoDecoderCap*> mVideoDecoders;
Vector<output_format> mEncoderOutputFileFormats;
Vector<ImageEncodingQualityLevels *> mImageEncodingQualityLevels;
+ KeyedVector<int, int> mStartTimeOffsets;
typedef struct {
bool mHasRefProfile; // Refers to an existing profile
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 5fe7722..7e22a24 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -20,7 +20,7 @@
#include <media/mediarecorder.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/include/media/mediametadataretriever.h b/include/media/mediametadataretriever.h
index 3e343e0..28f305d 100644
--- a/include/media/mediametadataretriever.h
+++ b/include/media/mediametadataretriever.h
@@ -52,6 +52,7 @@
METADATA_KEY_VIDEO_WIDTH = 18,
METADATA_KEY_VIDEO_HEIGHT = 19,
METADATA_KEY_BITRATE = 20,
+ METADATA_KEY_TIMED_TEXT_LANGUAGES = 21,
// Add more here...
};
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 241626c..cfa4cfd 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -125,6 +125,9 @@
MEDIA_PLAYER_PLAYBACK_COMPLETE = 1 << 7
};
+enum media_set_parameter_keys {
+ KEY_PARAMETER_TIMED_TEXT_TRACK_INDEX = 1000,
+};
// ----------------------------------------------------------------------------
// ref-counted object for callbacks
class MediaPlayerListener: virtual public RefBase
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 18a3c6a..36bf34e 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -159,6 +159,29 @@
MEDIA_RECORDER_TRACK_INFO_LIST_START = 1000,
MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS = 1000,
MEDIA_RECORDER_TRACK_INFO_PROGRESS_IN_TIME = 1001,
+ MEDIA_RECORDER_TRACK_INFO_TYPE = 1002,
+ MEDIA_RECORDER_TRACK_INFO_DURATION_MS = 1003,
+
+ // The time to measure the max chunk duration
+ MEDIA_RECORDER_TRACK_INFO_MAX_CHUNK_DUR_MS = 1004,
+
+ MEDIA_RECORDER_TRACK_INFO_ENCODED_FRAMES = 1005,
+
+ // The time to measure how well the audio and video
+ // track data is interleaved.
+ MEDIA_RECORDER_TRACK_INTER_CHUNK_TIME_MS = 1006,
+
+ // The time to measure system response. Note that
+ // the delay does not include the intentional delay
+ // we use to eliminate the recording sound.
+ MEDIA_RECORDER_TRACK_INFO_INITIAL_DELAY_MS = 1007,
+
+ // The time used to compensate for initial A/V sync.
+ MEDIA_RECORDER_TRACK_INFO_START_OFFSET_MS = 1008,
+
+ // Total number of bytes of the media data.
+ MEDIA_RECORDER_TRACK_INFO_DATA_KBYTES = 1009,
+
MEDIA_RECORDER_TRACK_INFO_LIST_END = 2000,
};
diff --git a/include/media/stagefright/AACWriter.h b/include/media/stagefright/AACWriter.h
new file mode 100644
index 0000000..fa3ab8a
--- /dev/null
+++ b/include/media/stagefright/AACWriter.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AAC_WRITER_H_
+#define AAC_WRITER_H_
+
+#include <media/stagefright/MediaWriter.h>
+#include <utils/threads.h>
+
+namespace android {
+
+struct MediaSource;
+struct MetaData;
+
+struct AACWriter : public MediaWriter {
+ AACWriter(const char *filename);
+ AACWriter(int fd);
+
+ status_t initCheck() const;
+
+ virtual status_t addSource(const sp<MediaSource> &source);
+ virtual bool reachedEOS();
+ virtual status_t start(MetaData *params = NULL);
+ virtual status_t stop();
+ virtual status_t pause();
+
+protected:
+ virtual ~AACWriter();
+
+private:
+ enum {
+ kAdtsHeaderLength = 7, // # of bytes for the adts header
+ kSamplesPerFrame = 1024, // # of samples in a frame
+ };
+
+ int mFd;
+ status_t mInitCheck;
+ sp<MediaSource> mSource;
+ bool mStarted;
+ volatile bool mPaused;
+ volatile bool mResumed;
+ volatile bool mDone;
+ volatile bool mReachedEOS;
+ pthread_t mThread;
+ int64_t mEstimatedSizeBytes;
+ int64_t mEstimatedDurationUs;
+ int32_t mChannelCount;
+ int32_t mSampleRate;
+ int32_t mFrameDurationUs;
+
+ static void *ThreadWrapper(void *);
+ status_t threadFunc();
+ bool exceedsFileSizeLimit();
+ bool exceedsFileDurationLimit();
+ status_t writeAdtsHeader(uint32_t frameLength);
+
+ DISALLOW_EVIL_CONSTRUCTORS(AACWriter);
+};
+
+} // namespace android
+
+#endif // AAC_WRITER_H_
diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h
index 20a9e16..19bd31b 100644
--- a/include/media/stagefright/AudioSource.h
+++ b/include/media/stagefright/AudioSource.h
@@ -24,7 +24,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <utils/List.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index d1ecaaf..946a0aa 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -84,7 +84,7 @@
OMX_U32 nPortIndex;
OMX_PTR pAppPrivate;
OMX_BUFFERHEADERTYPE **bufferHeader;
- const sp<android_native_buffer_t>& nativeBuffer;
+ const sp<ANativeWindowBuffer>& nativeBuffer;
};
// A pointer to this struct is passed to OMX_GetParameter when the extension
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 15f86ea..904ce2a 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -55,6 +55,10 @@
status_t setInterleaveDuration(uint32_t duration);
int32_t getTimeScale() const { return mTimeScale; }
+ status_t setGeoData(int latitudex10000, int longitudex10000);
+ void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
+ int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
+
protected:
virtual ~MPEG4Writer();
@@ -79,6 +83,10 @@
uint32_t mInterleaveDurationUs;
int32_t mTimeScale;
int64_t mStartTimestampUs;
+ int mLatitudex10000;
+ int mLongitudex10000;
+ bool mAreGeoTagsAvailable;
+ int32_t mStartTimeOffsetMs;
Mutex mLock;
@@ -108,6 +116,13 @@
struct ChunkInfo {
Track *mTrack; // Owner
List<Chunk> mChunks; // Remaining chunks to be written
+
+ // Previous chunk timestamp that has been written
+ int64_t mPrevChunkTimestampUs;
+
+ // Max time interval between neighboring chunks
+ int64_t mMaxInterChunkDurUs;
+
};
bool mIsFirstChunk;
@@ -159,6 +174,14 @@
bool isFileStreamable() const;
void trackProgressStatus(size_t trackId, int64_t timeUs, status_t err = OK);
void writeCompositionMatrix(int32_t degrees);
+ void writeMvhdBox(int64_t durationUs);
+ void writeMoovBox(int64_t durationUs);
+ void writeFtypBox(MetaData *param);
+ void writeUdtaBox();
+ void writeGeoDataBox();
+ void writeLatitude(int degreex10000);
+ void writeLongitude(int degreex10000);
+ void sendSessionSummary();
MPEG4Writer(const MPEG4Writer &);
MPEG4Writer &operator=(const MPEG4Writer &);
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 6a21627..5e471c1 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -49,6 +49,8 @@
extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
+extern const char *MEDIA_MIMETYPE_TEXT_3GPP;
+
} // namespace android
#endif // MEDIA_DEFS_H_
diff --git a/include/media/stagefright/MediaErrors.h b/include/media/stagefright/MediaErrors.h
index 7cc993c..21d00b8 100644
--- a/include/media/stagefright/MediaErrors.h
+++ b/include/media/stagefright/MediaErrors.h
@@ -52,6 +52,7 @@
ERROR_DRM_DECRYPT_UNIT_NOT_INITIALIZED = DRM_ERROR_BASE - 4,
ERROR_DRM_DECRYPT = DRM_ERROR_BASE - 5,
ERROR_DRM_CANNOT_HANDLE = DRM_ERROR_BASE - 6,
+ ERROR_DRM_TAMPER_DETECTED = DRM_ERROR_BASE - 7,
// Heartbeat Error Codes
HEARTBEAT_ERROR_BASE = -3000,
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index f7f2235..4044c5d 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -114,6 +114,9 @@
// An indication that a video buffer has been rendered.
kKeyRendered = 'rend', // bool (int32_t)
+
+ // The language code for this media
+ kKeyMediaLanguage = 'lang', // cstring
};
enum {
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 93b5d24..70daafa 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -214,6 +214,7 @@
void setAMRFormat(bool isWAMR, int32_t bitRate);
void setAACFormat(int32_t numChannels, int32_t sampleRate, int32_t bitRate);
+ void setG711Format(int32_t numChannels);
status_t setVideoPortFormatType(
OMX_U32 portIndex,
diff --git a/include/media/stagefright/openmax/OMX_Video.h b/include/media/stagefright/openmax/OMX_Video.h
index 2738bdc..4f8485d 100644
--- a/include/media/stagefright/openmax/OMX_Video.h
+++ b/include/media/stagefright/openmax/OMX_Video.h
@@ -85,6 +85,7 @@
OMX_VIDEO_CodingRV, /**< all versions of Real Video */
OMX_VIDEO_CodingAVC, /**< H.264/AVC */
OMX_VIDEO_CodingMJPEG, /**< Motion JPEG */
+ OMX_VIDEO_CodingVPX, /**< Google VPX, formerly known as On2 VP8 */
OMX_VIDEO_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_CodingVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_CodingMax = 0x7FFFFFFF
diff --git a/include/private/opengles/gl_context.h b/include/private/opengles/gl_context.h
index 72416c1..6b1fa77 100644
--- a/include/private/opengles/gl_context.h
+++ b/include/private/opengles/gl_context.h
@@ -31,8 +31,6 @@
#include <GLES/gl.h>
#include <GLES/glext.h>
-struct android_native_buffer_t;
-
namespace android {
@@ -603,13 +601,6 @@
void (*renderTriangle)(GL, vertex_t*, vertex_t*, vertex_t*);
};
-struct copybits_context_t {
- // A handle to the blit engine, if it exists, else NULL.
- int32_t minScale;
- int32_t maxScale;
- android_native_buffer_t* drawSurfaceBuffer;
-};
-
struct ogles_context_t {
context_t rasterizer;
array_machine_t arrays __attribute__((aligned(32)));
@@ -634,13 +625,6 @@
EGLSurfaceManager* surfaceManager;
EGLBufferObjectManager* bufferObjectManager;
- // copybits is only used if LIBAGL_USE_GRALLOC_COPYBITS is
- // defined, but it is always present because ogles_context_t is a public
- // struct that is used by clients of libagl. We want the size and offsets
- // to stay the same, whether or not LIBAGL_USE_GRALLOC_COPYBITS is defined.
-
- copybits_context_t copybits;
-
GLenum error;
static inline ogles_context_t* get() {
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 8438714..446e3df 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -37,7 +37,7 @@
#include <utils/Timers.h>
#include <utils/Atomic.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <cutils/bitops.h>
#define LIKELY( exp ) (__builtin_expect( (exp) != 0, true ))
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index e08a55b..8a180d8 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -23,7 +23,7 @@
#include <media/IAudioPolicyService.h>
#include <math.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
// ----------------------------------------------------------------------------
// the sim build doesn't have gettid
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 2673df9..7520ed9 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -39,7 +39,7 @@
#include <cutils/bitops.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <hardware/audio_policy.h>
#define LIKELY( exp ) (__builtin_expect( (exp) != 0, true ))
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 88a9ae0..9fbcee0 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -25,7 +25,7 @@
#include <media/IAudioPolicyService.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index e6f3a33..069bbb7 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -356,6 +356,18 @@
return atoi(atts[1]);
}
+void MediaProfiles::addStartTimeOffset(int cameraId, const char** atts)
+{
+ int offsetTimeMs = 700;
+ if (atts[2]) {
+ CHECK(!strcmp("startOffsetMs", atts[2]));
+ offsetTimeMs = atoi(atts[3]);
+ }
+
+ LOGV("%s: cameraId=%d, offset=%d ms", __func__, cameraId, offsetTimeMs);
+ mStartTimeOffsets.replaceValueFor(cameraId, offsetTimeMs);
+}
+
/*static*/ void
MediaProfiles::startElementHandler(void *userData, const char *name, const char **atts)
{
@@ -380,6 +392,7 @@
profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts));
} else if (strcmp("CamcorderProfiles", name) == 0) {
profiles->mCurrentCameraId = getCameraId(atts);
+ profiles->addStartTimeOffset(profiles->mCurrentCameraId, atts);
} else if (strcmp("EncoderProfile", name) == 0) {
profiles->mCamcorderProfiles.add(
createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));
@@ -997,6 +1010,16 @@
return result;
}
+int MediaProfiles::getStartTimeOffsetMs(int cameraId) const {
+ int offsetTimeMs = -1;
+ ssize_t index = mStartTimeOffsets.indexOfKey(cameraId);
+ if (index >= 0) {
+ offsetTimeMs = mStartTimeOffsets.valueFor(cameraId);
+ }
+ LOGV("%s: offsetTime=%d ms and cameraId=%d", offsetTimeMs, cameraId);
+ return offsetTimeMs;
+}
+
MediaProfiles::~MediaProfiles()
{
CHECK("destructor should never be called" == 0);
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index 4e22175..28c8642 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -135,20 +135,21 @@
}
if (type == DT_REG || type == DT_DIR) {
if (type == DT_DIR) {
+ bool childNoMedia = noMedia;
// set noMedia flag on directories with a name that starts with '.'
// for example, the Mac ".Trashes" directory
if (name[0] == '.')
- noMedia = true;
+ childNoMedia = true;
// report the directory to the client
if (stat(path, &statbuf) == 0) {
- client.scanFile(path, statbuf.st_mtime, 0, true, noMedia);
+ client.scanFile(path, statbuf.st_mtime, 0, true, childNoMedia);
}
// and now process its contents
strcat(fileSpot, "/");
int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client,
- noMedia, exceptionCheck, exceptionEnv);
+ childNoMedia, exceptionCheck, exceptionEnv);
if (err) {
// pass exceptions up - ignore other errors
if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 28e07ff..7b7ba74 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -37,7 +37,7 @@
#include <utils/KeyedVector.h>
#include <utils/String8.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 3b2cf10..d51c946 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -53,7 +53,7 @@
#include <media/AudioTrack.h>
#include <media/MemoryLeakTrackUtil.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <private/android_filesystem_config.h>
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 6c4071f..8bab471 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -30,7 +30,7 @@
#include <media/MediaPlayerInterface.h>
#include <media/Metadata.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 5a47384..29cc019 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -35,7 +35,7 @@
#include <media/AudioTrack.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include "MediaRecorderClient.h"
#include "MediaPlayerService.h"
diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp
index 37a3db3..589c625 100644
--- a/media/libmediaplayerservice/MidiFile.cpp
+++ b/media/libmediaplayerservice/MidiFile.cpp
@@ -30,7 +30,7 @@
#include <sys/types.h>
#include <sys/stat.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include "MidiFile.h"
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 01fbea1..978571c 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -26,6 +26,7 @@
#include <media/IMediaPlayerService.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
+#include <media/stagefright/AACWriter.h>
#include <media/stagefright/CameraSource.h>
#include <media/stagefright/VideoSourceDownSampler.h>
#include <media/stagefright/CameraSourceTimeLapse.h>
@@ -46,7 +47,7 @@
#include <ctype.h>
#include <unistd.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include "ARTPWriter.h"
@@ -592,6 +593,26 @@
return OK;
}
+status_t StagefrightRecorder::setParamGeoDataLongitude(
+ int32_t longitudex10000) {
+
+ if (longitudex10000 > 1800000 || longitudex10000 < -1800000) {
+ return BAD_VALUE;
+ }
+ mLongitudex10000 = longitudex10000;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamGeoDataLatitude(
+ int32_t latitudex10000) {
+
+ if (latitudex10000 > 900000 || latitudex10000 < -900000) {
+ return BAD_VALUE;
+ }
+ mLatitudex10000 = latitudex10000;
+ return OK;
+}
+
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -620,6 +641,16 @@
if (safe_strtoi32(value.string(), &use64BitOffset)) {
return setParam64BitFileOffset(use64BitOffset != 0);
}
+ } else if (key == "param-geotag-longitude") {
+ int32_t longitudex10000;
+ if (safe_strtoi32(value.string(), &longitudex10000)) {
+ return setParamGeoDataLongitude(longitudex10000);
+ }
+ } else if (key == "param-geotag-latitude") {
+ int32_t latitudex10000;
+ if (safe_strtoi32(value.string(), &latitudex10000)) {
+ return setParamGeoDataLatitude(latitudex10000);
+ }
} else if (key == "param-track-time-status") {
int64_t timeDurationUs;
if (safe_strtoi64(value.string(), &timeDurationUs)) {
@@ -872,15 +903,21 @@
}
status_t StagefrightRecorder::startAACRecording() {
- CHECK(mOutputFormat == OUTPUT_FORMAT_AAC_ADIF ||
- mOutputFormat == OUTPUT_FORMAT_AAC_ADTS);
+ // FIXME:
+ // Add support for OUTPUT_FORMAT_AAC_ADIF
+ CHECK(mOutputFormat == OUTPUT_FORMAT_AAC_ADTS);
CHECK(mAudioEncoder == AUDIO_ENCODER_AAC);
CHECK(mAudioSource != AUDIO_SOURCE_CNT);
- CHECK(0 == "AACWriter is not implemented yet");
+ mWriter = new AACWriter(mOutputFd);
+ status_t status = startRawAudioRecording();
+ if (status != OK) {
+ mWriter.clear();
+ mWriter = NULL;
+ }
- return OK;
+ return status;
}
status_t StagefrightRecorder::startAMRRecording() {
@@ -902,6 +939,16 @@
}
}
+ mWriter = new AMRWriter(mOutputFd);
+ status_t status = startRawAudioRecording();
+ if (status != OK) {
+ mWriter.clear();
+ mWriter = NULL;
+ }
+ return status;
+}
+
+status_t StagefrightRecorder::startRawAudioRecording() {
if (mAudioSource >= AUDIO_SOURCE_CNT) {
LOGE("Invalid audio source: %d", mAudioSource);
return BAD_VALUE;
@@ -917,7 +964,7 @@
return UNKNOWN_ERROR;
}
- mWriter = new AMRWriter(mOutputFd);
+ CHECK(mWriter != 0);
mWriter->addSource(audioEncoder);
if (mMaxFileDurationUs != 0) {
@@ -1395,6 +1442,10 @@
reinterpret_cast<MPEG4Writer *>(writer.get())->
setInterleaveDuration(mInterleaveDurationUs);
}
+ if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) {
+ reinterpret_cast<MPEG4Writer *>(writer.get())->
+ setGeoData(mLatitudex10000, mLongitudex10000);
+ }
if (mMaxFileDurationUs != 0) {
writer->setMaxFileDuration(mMaxFileDurationUs);
}
@@ -1402,6 +1453,12 @@
writer->setMaxFileSize(mMaxFileSizeBytes);
}
+ mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+ if (mStartTimeOffsetMs > 0) {
+ reinterpret_cast<MPEG4Writer *>(writer.get())->
+ setStartTimeOffsetMs(mStartTimeOffsetMs);
+ }
+
writer->setListener(mListener);
*mediaWriter = writer;
return OK;
@@ -1608,6 +1665,7 @@
mAudioTimeScale = -1;
mVideoTimeScale = -1;
mCameraId = 0;
+ mStartTimeOffsetMs = -1;
mVideoEncoderProfile = -1;
mVideoEncoderLevel = -1;
mMaxFileDurationUs = 0;
@@ -1621,6 +1679,8 @@
mIsMetaDataStoredInVideoBuffers = false;
mEncoderProfiles = MediaProfiles::getInstance();
mRotationDegrees = 0;
+ mLatitudex10000 = -3600000;
+ mLongitudex10000 = -3600000;
mOutputFd = -1;
mOutputFdAux = -1;
@@ -1694,6 +1754,8 @@
result.append(buffer);
snprintf(buffer, SIZE, " Camera Id: %d\n", mCameraId);
result.append(buffer);
+ snprintf(buffer, SIZE, " Start time offset (ms): %d\n", mStartTimeOffsetMs);
+ result.append(buffer);
snprintf(buffer, SIZE, " Encoder: %d\n", mVideoEncoder);
result.append(buffer);
snprintf(buffer, SIZE, " Encoder profile: %d\n", mVideoEncoderProfile);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 3d463ea..aa67aa7 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -22,7 +22,7 @@
#include <camera/CameraParameters.h>
#include <utils/String8.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
@@ -69,6 +69,7 @@
sp<Surface> mPreviewSurface;
sp<IMediaRecorderClient> mListener;
sp<MediaWriter> mWriter, mWriterAux;
+ int mOutputFd, mOutputFdAux;
sp<AudioSource> mAudioSourceNode;
audio_source_t mAudioSource;
@@ -96,6 +97,9 @@
int64_t mMaxFileDurationUs;
int64_t mTrackEveryTimeDurationUs;
int32_t mRotationDegrees; // Clockwise
+ int32_t mLatitudex10000;
+ int32_t mLongitudex10000;
+ int32_t mStartTimeOffsetMs;
bool mCaptureTimeLapse;
int64_t mTimeBetweenTimeLapseFrameCaptureUs;
@@ -104,7 +108,6 @@
sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
String8 mParams;
- int mOutputFd, mOutputFdAux;
bool mIsMetaDataStoredInVideoBuffers;
MediaProfiles *mEncoderProfiles;
@@ -123,6 +126,7 @@
status_t startMPEG4Recording();
status_t startAMRRecording();
status_t startAACRecording();
+ status_t startRawAudioRecording();
status_t startRTPRecording();
status_t startMPEG2TSRecording();
sp<MediaSource> createAudioSource();
@@ -159,6 +163,8 @@
status_t setParamMaxFileDurationUs(int64_t timeUs);
status_t setParamMaxFileSizeBytes(int64_t bytes);
status_t setParamMovieTimeScale(int32_t timeScale);
+ status_t setParamGeoDataLongitude(int32_t longitudex10000);
+ status_t setParamGeoDataLatitude(int32_t latitudex10000);
void clipVideoBitRate();
void clipVideoFrameRate();
void clipVideoFrameWidth();
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index c20e279..e761509 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -8,7 +8,6 @@
NuPlayerDriver.cpp \
NuPlayerRenderer.cpp \
NuPlayerStreamListener.cpp \
- DecoderWrapper.cpp \
StreamingSource.cpp \
LOCAL_C_INCLUDES := \
diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp
deleted file mode 100644
index 802d1fb..0000000
--- a/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp
+++ /dev/null
@@ -1,576 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DecoderWrapper"
-#include <utils/Log.h>
-
-#include "DecoderWrapper.h"
-
-#include "AACDecoder.h"
-
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-struct DecoderWrapper::WrapperSource : public MediaSource {
- WrapperSource(
- const sp<MetaData> &meta,
- const sp<AMessage> ¬ify);
-
- virtual status_t start(MetaData *params);
- virtual status_t stop();
- virtual sp<MetaData> getFormat();
-
- virtual status_t read(
- MediaBuffer **buffer, const ReadOptions *options);
-
- void queueBuffer(const sp<ABuffer> &buffer);
- void queueEOS(status_t finalResult);
- void clear();
-
-protected:
- virtual ~WrapperSource();
-
-private:
- Mutex mLock;
- Condition mCondition;
-
- sp<MetaData> mMeta;
- sp<AMessage> mNotify;
-
- List<sp<ABuffer> > mQueue;
- status_t mFinalResult;
-
- DISALLOW_EVIL_CONSTRUCTORS(WrapperSource);
-};
-
-DecoderWrapper::WrapperSource::WrapperSource(
- const sp<MetaData> &meta, const sp<AMessage> ¬ify)
- : mMeta(meta),
- mNotify(notify),
- mFinalResult(OK) {
-}
-
-DecoderWrapper::WrapperSource::~WrapperSource() {
-}
-
-status_t DecoderWrapper::WrapperSource::start(MetaData *params) {
- return OK;
-}
-
-status_t DecoderWrapper::WrapperSource::stop() {
- return OK;
-}
-
-sp<MetaData> DecoderWrapper::WrapperSource::getFormat() {
- return mMeta;
-}
-
-status_t DecoderWrapper::WrapperSource::read(
- MediaBuffer **out, const ReadOptions *options) {
- Mutex::Autolock autoLock(mLock);
-
- bool requestedBuffer = false;
-
- while (mQueue.empty() && mFinalResult == OK) {
- if (!requestedBuffer) {
- mNotify->dup()->post();
- requestedBuffer = true;
- }
-
- mCondition.wait(mLock);
- }
-
- if (mQueue.empty()) {
- return mFinalResult;
- }
-
- sp<ABuffer> src = *mQueue.begin();
- mQueue.erase(mQueue.begin());
-
- MediaBuffer *dst = new MediaBuffer(src->size());
- memcpy(dst->data(), src->data(), src->size());
-
- int64_t timeUs;
- CHECK(src->meta()->findInt64("timeUs", &timeUs));
-
- dst->meta_data()->setInt64(kKeyTime, timeUs);
-
- *out = dst;
-
- return OK;
-}
-
-void DecoderWrapper::WrapperSource::queueBuffer(const sp<ABuffer> &buffer) {
- Mutex::Autolock autoLock(mLock);
- mQueue.push_back(buffer);
- mCondition.broadcast();
-}
-
-void DecoderWrapper::WrapperSource::queueEOS(status_t finalResult) {
- CHECK_NE(finalResult, (status_t)OK);
-
- Mutex::Autolock autoLock(mLock);
- mFinalResult = finalResult;
- mCondition.broadcast();
-}
-
-void DecoderWrapper::WrapperSource::clear() {
- Mutex::Autolock autoLock(mLock);
- mQueue.clear();
- mFinalResult = OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-struct DecoderWrapper::WrapperReader : public AHandler {
- WrapperReader(
- const sp<MediaSource> &decoder,
- const sp<AMessage> ¬ify);
-
- void start();
- void stop();
- void readMore(bool flush = false);
-
-protected:
- virtual ~WrapperReader();
-
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
- enum {
- kWhatRead
- };
-
- sp<MediaSource> mDecoder;
- sp<AMessage> mNotify;
- bool mEOS;
- bool mSentFormat;
-
- void sendFormatChange();
-
- DISALLOW_EVIL_CONSTRUCTORS(WrapperReader);
-};
-
-DecoderWrapper::WrapperReader::WrapperReader(
- const sp<MediaSource> &decoder, const sp<AMessage> ¬ify)
- : mDecoder(decoder),
- mNotify(notify),
- mEOS(false),
- mSentFormat(false) {
-}
-
-DecoderWrapper::WrapperReader::~WrapperReader() {
-}
-
-void DecoderWrapper::WrapperReader::start() {
- CHECK_EQ(mDecoder->start(), (status_t)OK);
- readMore();
-}
-
-void DecoderWrapper::WrapperReader::stop() {
- CHECK_EQ(mDecoder->stop(), (status_t)OK);
-}
-
-void DecoderWrapper::WrapperReader::readMore(bool flush) {
- if (!flush && mEOS) {
- return;
- }
-
- sp<AMessage> msg = new AMessage(kWhatRead, id());
- msg->setInt32("flush", static_cast<int32_t>(flush));
- msg->post();
-}
-
-void DecoderWrapper::WrapperReader::onMessageReceived(
- const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatRead:
- {
- int32_t flush;
- CHECK(msg->findInt32("flush", &flush));
-
- MediaSource::ReadOptions options;
- if (flush) {
- // Dummy seek
- options.setSeekTo(0);
- mEOS = false;
- }
-
- CHECK(!mEOS);
-
- MediaBuffer *src;
- status_t err = mDecoder->read(&src, &options);
-
- if (err == OK) {
- if (!mSentFormat) {
- sendFormatChange();
- mSentFormat = true;
- }
-
- sp<AMessage> notify = mNotify->dup();
-
- sp<AMessage> realNotify;
- CHECK(notify->findMessage("real-notify", &realNotify));
-
- realNotify->setInt32("what", ACodec::kWhatDrainThisBuffer);
-
- sp<ABuffer> dst = new ABuffer(src->range_length());
- memcpy(dst->data(),
- (const uint8_t *)src->data() + src->range_offset(),
- src->range_length());
-
- int64_t timeUs;
- CHECK(src->meta_data()->findInt64(kKeyTime, &timeUs));
- src->release();
- src = NULL;
-
- dst->meta()->setInt64("timeUs", timeUs);
-
- realNotify->setObject("buffer", dst);
-
- notify->post();
- } else if (err == INFO_FORMAT_CHANGED) {
- sendFormatChange();
-
- readMore(false /* flush */);
- } else {
- sp<AMessage> notify = mNotify->dup();
-
- sp<AMessage> realNotify;
- CHECK(notify->findMessage("real-notify", &realNotify));
-
- realNotify->setInt32("what", ACodec::kWhatEOS);
- mEOS = true;
-
- notify->post();
- }
- break;
- }
-
- default:
- TRESPASS();
- break;
- }
-}
-
-void DecoderWrapper::WrapperReader::sendFormatChange() {
- sp<AMessage> notify = mNotify->dup();
-
- sp<AMessage> realNotify;
- CHECK(notify->findMessage("real-notify", &realNotify));
-
- realNotify->setInt32("what", ACodec::kWhatOutputFormatChanged);
-
- sp<MetaData> meta = mDecoder->getFormat();
-
- const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- realNotify->setString("mime", mime);
-
- if (!strncasecmp("audio/", mime, 6)) {
- int32_t numChannels;
- CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
-
- int32_t sampleRate;
- CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
-
- realNotify->setInt32("channel-count", numChannels);
- realNotify->setInt32("sample-rate", sampleRate);
- } else {
- CHECK(!strncasecmp("video/", mime, 6));
-
- int32_t width, height;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
-
- realNotify->setInt32("width", width);
- realNotify->setInt32("height", height);
-
- int32_t cropLeft, cropTop, cropRight, cropBottom;
- if (!meta->findRect(
- kKeyCropRect,
- &cropLeft, &cropTop, &cropRight, &cropBottom)) {
- cropLeft = 0;
- cropTop = 0;
- cropRight = width - 1;
- cropBottom = height - 1;
- }
-
- realNotify->setRect("crop", cropLeft, cropTop, cropRight, cropBottom);
- }
-
- notify->post();
-
- mSentFormat = true;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DecoderWrapper::DecoderWrapper()
- : mNumOutstandingInputBuffers(0),
- mNumOutstandingOutputBuffers(0),
- mNumPendingDecodes(0),
- mFlushing(false) {
-}
-
-DecoderWrapper::~DecoderWrapper() {
-}
-
-void DecoderWrapper::setNotificationMessage(const sp<AMessage> &msg) {
- mNotify = msg;
-}
-
-void DecoderWrapper::initiateSetup(const sp<AMessage> &msg) {
- msg->setWhat(kWhatSetup);
- msg->setTarget(id());
- msg->post();
-}
-
-void DecoderWrapper::initiateShutdown() {
- (new AMessage(kWhatShutdown, id()))->post();
-}
-
-void DecoderWrapper::signalFlush() {
- (new AMessage(kWhatFlush, id()))->post();
-}
-
-void DecoderWrapper::signalResume() {
- (new AMessage(kWhatResume, id()))->post();
-}
-
-void DecoderWrapper::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatSetup:
- onSetup(msg);
- break;
-
- case kWhatShutdown:
- onShutdown();
- break;
-
- case kWhatInputDataRequested:
- {
- postFillBuffer();
- ++mNumOutstandingInputBuffers;
- break;
- }
-
- case kWhatInputBufferFilled:
- {
- CHECK_GT(mNumOutstandingInputBuffers, 0);
- --mNumOutstandingInputBuffers;
-
- if (mFlushing) {
- mSource->queueEOS(INFO_DISCONTINUITY);
-
- completeFlushIfPossible();
- break;
- }
-
- sp<RefBase> obj;
- if (!msg->findObject("buffer", &obj)) {
- int32_t err = OK;
- CHECK(msg->findInt32("err", &err));
-
- mSource->queueEOS(err);
- break;
- }
-
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
-
- mSource->queueBuffer(buffer);
- break;
- }
-
- case kWhatFillBufferDone:
- {
- sp<AMessage> notify;
- CHECK(msg->findMessage("real-notify", ¬ify));
-
- int32_t what;
- CHECK(notify->findInt32("what", &what));
-
- if (what == ACodec::kWhatDrainThisBuffer) {
- CHECK_GT(mNumPendingDecodes, 0);
- --mNumPendingDecodes;
-
- sp<AMessage> reply =
- new AMessage(kWhatOutputBufferDrained, id());
-
- notify->setMessage("reply", reply);
-
- ++mNumOutstandingOutputBuffers;
- } else if (what == ACodec::kWhatEOS) {
- CHECK_GT(mNumPendingDecodes, 0);
- --mNumPendingDecodes;
-
- if (mFlushing) {
- completeFlushIfPossible();
- break;
- }
- }
-
- notify->post();
- break;
- }
-
- case kWhatOutputBufferDrained:
- {
- CHECK_GT(mNumOutstandingOutputBuffers, 0);
- --mNumOutstandingOutputBuffers;
-
- if (mFlushing) {
- completeFlushIfPossible();
- break;
- }
-
- ++mNumPendingDecodes;
- mReader->readMore();
- break;
- }
-
- case kWhatFlush:
- {
- onFlush();
- break;
- }
-
- case kWhatResume:
- {
- onResume();
- break;
- }
-
- default:
- TRESPASS();
- break;
- }
-}
-
-void DecoderWrapper::onSetup(const sp<AMessage> &msg) {
- AString mime;
- CHECK(msg->findString("mime", &mime));
-
- CHECK(!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC));
-
- int32_t numChannels, sampleRate;
- CHECK(msg->findInt32("channel-count", &numChannels));
- CHECK(msg->findInt32("sample-rate", &sampleRate));
-
- sp<RefBase> obj;
- CHECK(msg->findObject("esds", &obj));
- sp<ABuffer> esds = static_cast<ABuffer *>(obj.get());
-
- sp<MetaData> meta = new MetaData;
- meta->setCString(kKeyMIMEType, mime.c_str());
- meta->setInt32(kKeySampleRate, sampleRate);
- meta->setInt32(kKeyChannelCount, numChannels);
- meta->setData(kKeyESDS, 0, esds->data(), esds->size());
-
- mSource = new WrapperSource(
- meta, new AMessage(kWhatInputDataRequested, id()));
-
- sp<MediaSource> decoder = new AACDecoder(mSource);
-
- mReaderLooper = new ALooper;
- mReaderLooper->setName("DecoderWrapper looper");
-
- mReaderLooper->start(
- false, /* runOnCallingThread */
- false, /* canCallJava */
- PRIORITY_AUDIO);
-
- sp<AMessage> notify = new AMessage(kWhatFillBufferDone, id());
- notify->setMessage("real-notify", mNotify);
-
- mReader = new WrapperReader(decoder, notify);
- mReaderLooper->registerHandler(mReader);
-
- mReader->start();
- ++mNumPendingDecodes;
-}
-
-void DecoderWrapper::onShutdown() {
- mReaderLooper->stop();
- mReaderLooper.clear();
-
- mReader->stop();
- mReader.clear();
-
- mSource.clear();
-
- mNumOutstandingInputBuffers = 0;
- mNumOutstandingOutputBuffers = 0;
- mNumPendingDecodes = 0;
- mFlushing = false;
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", ACodec::kWhatShutdownCompleted);
- notify->post();
-}
-
-void DecoderWrapper::postFillBuffer() {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFillThisBuffer);
- sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
- notify->setMessage("reply", reply);
- notify->post();
-}
-
-void DecoderWrapper::onFlush() {
- CHECK(!mFlushing);
- mFlushing = true;
-
- completeFlushIfPossible();
-}
-
-void DecoderWrapper::completeFlushIfPossible() {
- CHECK(mFlushing);
-
- if (mNumOutstandingInputBuffers > 0
- || mNumOutstandingOutputBuffers > 0
- || mNumPendingDecodes > 0) {
- return;
- }
-
- mFlushing = false;
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFlushCompleted);
- notify->post();
-}
-
-void DecoderWrapper::onResume() {
- CHECK(!mFlushing);
-
- ++mNumPendingDecodes;
-
- mSource->clear();
- mReader->readMore(true /* flush */);
-}
-
-} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.h b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h
deleted file mode 100644
index b9be12c..0000000
--- a/media/libmediaplayerservice/nuplayer/DecoderWrapper.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DECODER_WRAPPER_H_
-
-#define DECODER_WRAPPER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct MediaSource;
-
-struct DecoderWrapper : public AHandler {
- DecoderWrapper();
-
- void setNotificationMessage(const sp<AMessage> &msg);
- void initiateSetup(const sp<AMessage> &msg);
- void initiateShutdown();
- void signalFlush();
- void signalResume();
-
-protected:
- virtual ~DecoderWrapper();
-
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
- struct WrapperSource;
- struct WrapperReader;
-
- enum {
- kWhatSetup,
- kWhatInputBufferFilled,
- kWhatOutputBufferDrained,
- kWhatShutdown,
- kWhatFillBufferDone,
- kWhatInputDataRequested,
- kWhatFlush,
- kWhatResume,
- };
-
- sp<AMessage> mNotify;
-
- sp<WrapperSource> mSource;
-
- sp<ALooper> mReaderLooper;
- sp<WrapperReader> mReader;
-
- int32_t mNumOutstandingInputBuffers;
- int32_t mNumOutstandingOutputBuffers;
- int32_t mNumPendingDecodes;
- bool mFlushing;
-
- void onSetup(const sp<AMessage> &msg);
- void onShutdown();
- void onFlush();
- void onResume();
-
- void postFillBuffer();
- void completeFlushIfPossible();
-
- DISALLOW_EVIL_CONSTRUCTORS(DecoderWrapper);
-};
-
-} // namespace android
-
-#endif // DECODER_WRAPPER_H_
-
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 517acc9..81b41ef 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -20,7 +20,6 @@
#include "NuPlayerDecoder.h"
-#include "DecoderWrapper.h"
#include "ESDS.h"
#include <media/stagefright/foundation/ABuffer.h>
@@ -47,7 +46,6 @@
void NuPlayer::Decoder::configure(const sp<MetaData> &meta) {
CHECK(mCodec == NULL);
- CHECK(mWrapper == NULL);
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -61,19 +59,11 @@
format->setObject("native-window", mNativeWindow);
}
- if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
- mWrapper = new DecoderWrapper;
- looper()->registerHandler(mWrapper);
+ mCodec = new ACodec;
+ looper()->registerHandler(mCodec);
- mWrapper->setNotificationMessage(notifyMsg);
- mWrapper->initiateSetup(format);
- } else {
- mCodec = new ACodec;
- looper()->registerHandler(mCodec);
-
- mCodec->setNotificationMessage(notifyMsg);
- mCodec->initiateSetup(format);
- }
+ mCodec->setNotificationMessage(notifyMsg);
+ mCodec->initiateSetup(format);
}
void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
@@ -214,7 +204,6 @@
msg->setObject("csd", buffer);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
-#if 0
ESDS esds((const char *)data, size);
CHECK_EQ(esds.InitCheck(), (status_t)OK);
@@ -230,12 +219,6 @@
buffer->meta()->setInt32("csd", true);
mCSD.push(buffer);
-#else
- sp<ABuffer> buffer = new ABuffer(size);
- memcpy(buffer->data(), data, size);
-
- msg->setObject("esds", buffer);
-#endif
}
return msg;
@@ -270,27 +253,18 @@
void NuPlayer::Decoder::signalFlush() {
if (mCodec != NULL) {
mCodec->signalFlush();
- } else {
- CHECK(mWrapper != NULL);
- mWrapper->signalFlush();
}
}
void NuPlayer::Decoder::signalResume() {
if (mCodec != NULL) {
mCodec->signalResume();
- } else {
- CHECK(mWrapper != NULL);
- mWrapper->signalResume();
}
}
void NuPlayer::Decoder::initiateShutdown() {
if (mCodec != NULL) {
mCodec->initiateShutdown();
- } else {
- CHECK(mWrapper != NULL);
- mWrapper->initiateShutdown();
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index 732f090..fabc606 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -25,7 +25,6 @@
namespace android {
struct ABuffer;
-struct DecoderWrapper;
struct NuPlayer::Decoder : public AHandler {
Decoder(const sp<AMessage> ¬ify,
@@ -51,7 +50,6 @@
sp<NativeWindowWrapper> mNativeWindow;
sp<ACodec> mCodec;
- sp<DecoderWrapper> mWrapper;
Vector<sp<ABuffer> > mCSD;
size_t mCSDIndex;
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
new file mode 100644
index 0000000..8413208
--- /dev/null
+++ b/media/libstagefright/AACWriter.cpp
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AACWriter"
+#include <utils/Log.h>
+
+#include <media/stagefright/AACWriter.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/mediarecorder.h>
+#include <sys/prctl.h>
+#include <sys/resource.h>
+#include <fcntl.h>
+
+namespace android {
+
+AACWriter::AACWriter(const char *filename)
+ : mFd(-1),
+ mInitCheck(NO_INIT),
+ mStarted(false),
+ mPaused(false),
+ mResumed(false),
+ mChannelCount(-1),
+ mSampleRate(-1) {
+
+ LOGV("AACWriter Constructor");
+
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR);
+ if (mFd >= 0) {
+ mInitCheck = OK;
+ }
+}
+
+AACWriter::AACWriter(int fd)
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0? NO_INIT: OK),
+ mStarted(false),
+ mPaused(false),
+ mResumed(false),
+ mChannelCount(-1),
+ mSampleRate(-1) {
+}
+
+AACWriter::~AACWriter() {
+ if (mStarted) {
+ stop();
+ }
+
+ if (mFd != -1) {
+ close(mFd);
+ mFd = -1;
+ }
+}
+
+status_t AACWriter::initCheck() const {
+ return mInitCheck;
+}
+
+static int writeInt8(int fd, uint8_t x) {
+ return ::write(fd, &x, 1);
+}
+
+
+status_t AACWriter::addSource(const sp<MediaSource> &source) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mSource != NULL) {
+ LOGE("AAC files only support a single track of audio.");
+ return UNKNOWN_ERROR;
+ }
+
+ sp<MetaData> meta = source->getFormat();
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC));
+ CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount));
+ CHECK(meta->findInt32(kKeySampleRate, &mSampleRate));
+ CHECK(mChannelCount >= 1 && mChannelCount <= 2);
+
+ mSource = source;
+ return OK;
+}
+
+status_t AACWriter::start(MetaData *params) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mSource == NULL) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (mStarted && mPaused) {
+ mPaused = false;
+ mResumed = true;
+ return OK;
+ } else if (mStarted) {
+ // Already started, does nothing
+ return OK;
+ }
+
+ mFrameDurationUs = (kSamplesPerFrame * 1000000LL + (mSampleRate >> 1))
+ / mSampleRate;
+
+ status_t err = mSource->start();
+
+ if (err != OK) {
+ return err;
+ }
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ mReachedEOS = false;
+ mDone = false;
+
+ pthread_create(&mThread, &attr, ThreadWrapper, this);
+ pthread_attr_destroy(&attr);
+
+ mStarted = true;
+
+ return OK;
+}
+
+status_t AACWriter::pause() {
+ if (!mStarted) {
+ return OK;
+ }
+ mPaused = true;
+ return OK;
+}
+
+status_t AACWriter::stop() {
+ if (!mStarted) {
+ return OK;
+ }
+
+ mDone = true;
+
+ void *dummy;
+ pthread_join(mThread, &dummy);
+
+ status_t err = (status_t) dummy;
+ {
+ status_t status = mSource->stop();
+ if (err == OK &&
+ (status != OK && status != ERROR_END_OF_STREAM)) {
+ err = status;
+ }
+ }
+
+ mStarted = false;
+ return err;
+}
+
+bool AACWriter::exceedsFileSizeLimit() {
+ if (mMaxFileSizeLimitBytes == 0) {
+ return false;
+ }
+ return mEstimatedSizeBytes >= mMaxFileSizeLimitBytes;
+}
+
+bool AACWriter::exceedsFileDurationLimit() {
+ if (mMaxFileDurationLimitUs == 0) {
+ return false;
+ }
+ return mEstimatedDurationUs >= mMaxFileDurationLimitUs;
+}
+
+// static
+void *AACWriter::ThreadWrapper(void *me) {
+ return (void *) static_cast<AACWriter *>(me)->threadFunc();
+}
+
+/*
+* Returns an index into the sample rate table if the
+* given sample rate is found; otherwise, returns -1.
+*/
+static bool getSampleRateTableIndex(int sampleRate, uint8_t* tableIndex) {
+ static const int kSampleRateTable[] = {
+ 96000, 88200, 64000, 48000, 44100, 32000,
+ 24000, 22050, 16000, 12000, 11025, 8000
+ };
+ const int tableSize =
+ sizeof(kSampleRateTable) / sizeof(kSampleRateTable[0]);
+
+ *tableIndex = 0;
+ for (int index = 0; index < tableSize; ++index) {
+ if (sampleRate == kSampleRateTable[index]) {
+ LOGV("Sample rate: %d and index: %d",
+ sampleRate, index);
+ *tableIndex = index;
+ return true;
+ }
+ }
+
+ LOGE("Sampling rate %d bps is not supported", sampleRate);
+ return false;
+}
+
+/*
+ * ADTS (Audio data transport stream) header structure.
+ * It consists of 7 or 9 bytes (with or without CRC):
+ * 12 bits of syncword 0xFFF, all bits must be 1
+ * 1 bit of field ID. 0 for MPEG-4, and 1 for MPEG-2
+ * 2 bits of MPEG layer. If in MPEG-TS, set to 0
+ * 1 bit of protection absense. Set to 1 if no CRC.
+ * 2 bits of profile code. Set to 1 (The MPEG-4 Audio
+ * object type minus 1. We are using AAC-LC = 2)
+ * 4 bits of sampling frequency index code (15 is not allowed)
+ * 1 bit of private stream. Set to 0.
+ * 3 bits of channel configuration code. 0 resevered for inband PCM
+ * 1 bit of originality. Set to 0.
+ * 1 bit of home. Set to 0.
+ * 1 bit of copyrighted steam. Set to 0.
+ * 1 bit of copyright start. Set to 0.
+ * 13 bits of frame length. It included 7 ot 9 bytes header length.
+ * it is set to (protection absense? 7: 9) + size(AAC frame)
+ * 11 bits of buffer fullness. 0x7FF for VBR.
+ * 2 bits of frames count in one packet. Set to 0.
+ */
+status_t AACWriter::writeAdtsHeader(uint32_t frameLength) {
+ uint8_t data = 0xFF;
+ write(mFd, &data, 1);
+
+ const uint8_t kFieldId = 0;
+ const uint8_t kMpegLayer = 0;
+ const uint8_t kProtectionAbsense = 1; // 1: kAdtsHeaderLength = 7
+ data = 0xF0;
+ data |= (kFieldId << 3);
+ data |= (kMpegLayer << 1);
+ data |= kProtectionAbsense;
+ write(mFd, &data, 1);
+
+ const uint8_t kProfileCode = 1; // AAC-LC
+ uint8_t kSampleFreqIndex;
+ CHECK(getSampleRateTableIndex(mSampleRate, &kSampleFreqIndex));
+ const uint8_t kPrivateStream = 0;
+ const uint8_t kChannelConfigCode = mChannelCount;
+ data = (kProfileCode << 6);
+ data |= (kSampleFreqIndex << 2);
+ data |= (kPrivateStream << 1);
+ data |= (kChannelConfigCode >> 2);
+ write(mFd, &data, 1);
+
+ // 4 bits from originality to copyright start
+ const uint8_t kCopyright = 0;
+ const uint32_t kFrameLength = frameLength;
+ data = ((kChannelConfigCode & 3) << 6);
+ data |= (kCopyright << 2);
+ data |= ((kFrameLength & 0x1800) >> 11);
+ write(mFd, &data, 1);
+
+ data = ((kFrameLength & 0x07F8) >> 3);
+ write(mFd, &data, 1);
+
+ const uint32_t kBufferFullness = 0x7FF; // VBR
+ data = ((kFrameLength & 0x07) << 5);
+ data |= ((kBufferFullness & 0x07C0) >> 6);
+ write(mFd, &data, 1);
+
+ const uint8_t kFrameCount = 0;
+ data = ((kBufferFullness & 0x03F) << 2);
+ data |= kFrameCount;
+ write(mFd, &data, 1);
+
+ return OK;
+}
+
+status_t AACWriter::threadFunc() {
+ mEstimatedDurationUs = 0;
+ mEstimatedSizeBytes = 0;
+ int64_t previousPausedDurationUs = 0;
+ int64_t maxTimestampUs = 0;
+ status_t err = OK;
+
+ prctl(PR_SET_NAME, (unsigned long)"AACWriterThread", 0, 0, 0);
+
+ while (!mDone && err == OK) {
+ MediaBuffer *buffer;
+ err = mSource->read(&buffer);
+
+ if (err != OK) {
+ break;
+ }
+
+ if (mPaused) {
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+
+ mEstimatedSizeBytes += kAdtsHeaderLength + buffer->range_length();
+ if (exceedsFileSizeLimit()) {
+ buffer->release();
+ buffer = NULL;
+ notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
+ break;
+ }
+
+ int32_t isCodecSpecific = 0;
+ if (buffer->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecSpecific) && isCodecSpecific) {
+ LOGV("Drop codec specific info buffer");
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+
+ int64_t timestampUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs));
+ if (timestampUs > mEstimatedDurationUs) {
+ mEstimatedDurationUs = timestampUs;
+ }
+ if (mResumed) {
+ previousPausedDurationUs += (timestampUs - maxTimestampUs - mFrameDurationUs);
+ mResumed = false;
+ }
+ timestampUs -= previousPausedDurationUs;
+ LOGV("time stamp: %lld, previous paused duration: %lld",
+ timestampUs, previousPausedDurationUs);
+ if (timestampUs > maxTimestampUs) {
+ maxTimestampUs = timestampUs;
+ }
+
+ if (exceedsFileDurationLimit()) {
+ buffer->release();
+ buffer = NULL;
+ notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
+ break;
+ }
+
+ // Each output AAC audio frame to the file contains
+ // 1. an ADTS header, followed by
+ // 2. the compressed audio data.
+ ssize_t dataLength = buffer->range_length();
+ uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
+ if (writeAdtsHeader(kAdtsHeaderLength + dataLength) != OK ||
+ dataLength != write(mFd, data, dataLength)) {
+ err = ERROR_IO;
+ }
+
+ buffer->release();
+ buffer = NULL;
+ }
+
+ close(mFd);
+ mFd = -1;
+ mReachedEOS = true;
+ if (err == ERROR_END_OF_STREAM) {
+ return OK;
+ }
+ return err;
+}
+
+bool AACWriter::reachedEOS() {
+ return mReachedEOS;
+}
+
+} // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9928f44..4189354 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -505,7 +505,7 @@
// Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
- android_native_buffer_t *buf;
+ ANativeWindowBuffer *buf;
err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
if (err != 0) {
LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
@@ -574,7 +574,7 @@
}
ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
- android_native_buffer_t *buf;
+ ANativeWindowBuffer *buf;
CHECK_EQ(mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf), 0);
for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
@@ -1644,7 +1644,7 @@
if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
componentName = "OMX.Nvidia.h264.decode";
} else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
- componentName = "OMX.Nvidia.aac.decoder";
+ componentName = "OMX.google.aac.decoder";
} else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_MPEG)) {
componentName = "OMX.Nvidia.mp3.decoder";
} else {
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 2f3e141..f731dfb 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -3,9 +3,12 @@
include frameworks/base/media/libstagefright/codecs/common/Config.mk
+BUILD_WITH_SOFTWARE_DECODERS := false
+
LOCAL_SRC_FILES:= \
ACodec.cpp \
AACExtractor.cpp \
+ AACWriter.cpp \
AMRExtractor.cpp \
AMRWriter.cpp \
AVIExtractor.cpp \
@@ -44,10 +47,10 @@
ShoutcastSource.cpp \
StagefrightMediaScanner.cpp \
StagefrightMetadataRetriever.cpp \
- ThreadedSource.cpp \
ThrottledSource.cpp \
TimeSource.cpp \
TimedEventQueue.cpp \
+ TimedTextPlayer.cpp \
Utils.cpp \
VBRISeeker.cpp \
WAVExtractor.cpp \
@@ -80,28 +83,39 @@
LOCAL_STATIC_LIBRARIES := \
libstagefright_color_conversion \
- libstagefright_aacdec \
libstagefright_aacenc \
- libstagefright_amrnbdec \
libstagefright_amrnbenc \
- libstagefright_amrwbdec \
libstagefright_amrwbenc \
- libstagefright_avcdec \
libstagefright_avcenc \
- libstagefright_m4vh263dec \
libstagefright_m4vh263enc \
- libstagefright_mp3dec \
- libstagefright_vorbisdec \
libstagefright_matroska \
- libstagefright_vpxdec \
libvpx \
libstagefright_mpeg2ts \
libstagefright_httplive \
libstagefright_rtsp \
libstagefright_id3 \
- libstagefright_g711dec \
libFLAC \
+ifeq ($(BUILD_WITH_SOFTWARE_DECODERS),true)
+
+LOCAL_SRC_FILES += \
+ ThreadedSource.cpp \
+
+LOCAL_STATIC_LIBRARIES += \
+ libstagefright_aacdec \
+ libstagefright_amrnbdec \
+ libstagefright_amrwbdec \
+ libstagefright_avcdec \
+ libstagefright_g711dec \
+ libstagefright_mp3dec \
+ libstagefright_m4vh263dec \
+ libstagefright_vorbisdec \
+ libstagefright_vpxdec \
+ libvpx \
+
+endif
+
+
################################################################################
# The following was shamelessly copied from external/webkit/Android.mk and
@@ -178,6 +192,10 @@
LOCAL_CFLAGS += -Wno-multichar
+ifeq ($(BUILD_WITH_SOFTWARE_DECODERS),true)
+ LOCAL_CFLAGS += -DHAVE_SOFTWARE_DECODERS
+endif
+
LOCAL_MODULE:= libstagefright
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index 69f9c23..dd69e6b 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -84,7 +84,13 @@
CHECK(mFirstBuffer == NULL);
- mFirstBufferResult = mSource->read(&mFirstBuffer);
+ MediaSource::ReadOptions options;
+ if (mSeeking) {
+ options.setSeekTo(mSeekTimeUs);
+ mSeeking = false;
+ }
+
+ mFirstBufferResult = mSource->read(&mFirstBuffer, &options);
if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
LOGV("INFO_FORMAT_CHANGED!!!");
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 974efa7..fb7a871 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -24,10 +24,13 @@
#include "include/ARTSPController.h"
#include "include/AwesomePlayer.h"
+#include "include/DRMExtractor.h"
#include "include/SoftwareRenderer.h"
#include "include/NuCachedSource2.h"
#include "include/ThrottledSource.h"
#include "include/MPEG2TSExtractor.h"
+#include "include/TimedTextPlayer.h"
+#include "include/WVMExtractor.h"
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
@@ -185,7 +188,8 @@
mExtractorFlags(0),
mVideoBuffer(NULL),
mDecryptHandle(NULL),
- mLastVideoTimeUs(-1) {
+ mLastVideoTimeUs(-1),
+ mTextPlayer(NULL) {
CHECK_EQ(mClient.connect(), (status_t)OK);
DataSource::RegisterDefaultSniffers();
@@ -381,10 +385,8 @@
mFlags |= AUTO_LOOPING;
}
}
- }
-
- if (haveAudio && haveVideo) {
- break;
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+ addTextSource(extractor->getTrack(i));
}
}
@@ -447,6 +449,7 @@
cancelPlayerEvents();
+ mWVMExtractor.clear();
mCachedSource.clear();
mAudioTrack.clear();
mVideoTrack.clear();
@@ -469,6 +472,11 @@
delete mAudioPlayer;
mAudioPlayer = NULL;
+ if (mTextPlayer != NULL) {
+ delete mTextPlayer;
+ mTextPlayer = NULL;
+ }
+
mVideoRenderer.clear();
if (mRTSPController != NULL) {
@@ -540,6 +548,11 @@
*durationUs = cachedDataRemaining * 8000000ll / bitrate;
*eos = (finalStatus != OK);
return true;
+ } else if (mWVMExtractor != NULL) {
+ status_t finalStatus;
+ *durationUs = mWVMExtractor->getCachedDurationUs(&finalStatus);
+ *eos = (finalStatus != OK);
+ return true;
}
return false;
@@ -632,6 +645,30 @@
}
}
}
+ } else if (mWVMExtractor != NULL) {
+ status_t finalStatus;
+
+ int64_t cachedDurationUs
+ = mWVMExtractor->getCachedDurationUs(&finalStatus);
+
+ bool eos = (finalStatus != OK);
+
+ if (eos) {
+ if (finalStatus == ERROR_END_OF_STREAM) {
+ notifyListener_l(MEDIA_BUFFERING_UPDATE, 100);
+ }
+ if (mFlags & PREPARING) {
+ LOGV("cache has reached EOS, prepare is done.");
+ finishAsyncPrepare_l();
+ }
+ } else {
+ int percentage = 100.0 * (double)cachedDurationUs / mDurationUs;
+ if (percentage > 100) {
+ percentage = 100;
+ }
+
+ notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage);
+ }
}
int64_t cachedDurationUs;
@@ -826,6 +863,8 @@
if (!(mFlags & AUDIOPLAYER_STARTED)) {
mFlags |= AUDIOPLAYER_STARTED;
+ bool wasSeeking = mAudioPlayer->isSeeking();
+
// We've already started the MediaSource in order to enable
// the prefetcher to read its data.
status_t err = mAudioPlayer->start(
@@ -835,6 +874,13 @@
notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
return err;
}
+
+ if (wasSeeking) {
+ CHECK(!mAudioPlayer->isSeeking());
+
+ // We will have finished the seek while starting the audio player.
+ postAudioSeekComplete_l();
+ }
} else {
mAudioPlayer->resume();
}
@@ -928,7 +974,9 @@
// before creating a new one.
IPCThreadState::self()->flushCommands();
- if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
+ if (USE_SURFACE_ALLOC
+ && !strncmp(component, "OMX.", 4)
+ && strncmp(component, "OMX.google.", 11)) {
// Hardware decoders avoid the CPU color conversion by decoding
// directly to ANativeBuffers, so we must use a renderer that
// just pushes those buffers to the ANativeWindow.
@@ -971,6 +1019,11 @@
mFlags &= ~AUDIO_RUNNING;
}
+ if (mFlags & TEXTPLAYER_STARTED) {
+ mTextPlayer->pause();
+ mFlags &= ~TEXT_RUNNING;
+ }
+
mFlags &= ~PLAYING;
if (mDecryptHandle != NULL) {
@@ -1119,6 +1172,32 @@
return OK;
}
+status_t AwesomePlayer::setTimedTextTrackIndex(int32_t index) {
+ if (mTextPlayer != NULL) {
+ if (index >= 0) { // to turn on a text track
+ status_t err = mTextPlayer->setTimedTextTrackIndex(index);
+ if (err != OK) {
+ return err;
+ }
+
+ mFlags |= TEXT_RUNNING;
+ mFlags |= TEXTPLAYER_STARTED;
+ return OK;
+ } else { // to turn off the text track display
+ if (mFlags & TEXT_RUNNING) {
+ mFlags &= ~TEXT_RUNNING;
+ }
+ if (mFlags & TEXTPLAYER_STARTED) {
+ mFlags &= ~TEXTPLAYER_STARTED;
+ }
+
+ return mTextPlayer->setTimedTextTrackIndex(index);
+ }
+ } else {
+ return INVALID_OPERATION;
+ }
+}
+
// static
void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) {
static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone();
@@ -1155,6 +1234,10 @@
seekAudioIfNecessary_l();
+ if (mFlags & TEXTPLAYER_STARTED) {
+ mTextPlayer->seekTo(mSeekTimeUs);
+ }
+
if (!(mFlags & PLAYING)) {
LOGV("seeking while paused, sending SEEK_COMPLETE notification"
" immediately.");
@@ -1193,6 +1276,16 @@
mAudioTrack = source;
}
+void AwesomePlayer::addTextSource(sp<MediaSource> source) {
+ CHECK(source != NULL);
+
+ if (mTextPlayer == NULL) {
+ mTextPlayer = new TimedTextPlayer(this, mListener, &mQueue);
+ }
+
+ mTextPlayer->addTextSource(source);
+}
+
status_t AwesomePlayer::initAudioDecoder() {
sp<MetaData> meta = mAudioTrack->getFormat();
@@ -1364,7 +1457,7 @@
mVideoBuffer = NULL;
}
- if (mSeeking == SEEK && mCachedSource != NULL && mAudioSource != NULL
+ if (mSeeking == SEEK && isStreamingHTTP() && mAudioSource != NULL
&& !(mFlags & SEEK_PREVIEW)) {
// We're going to seek the video source first, followed by
// the audio source.
@@ -1472,6 +1565,11 @@
}
}
+ if ((mFlags & TEXTPLAYER_STARTED) && !(mFlags & (TEXT_RUNNING | SEEK_PREVIEW))) {
+ mTextPlayer->resume();
+ mFlags |= TEXT_RUNNING;
+ }
+
TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
if (mFlags & FIRST_FRAME) {
@@ -1694,8 +1792,19 @@
status_t AwesomePlayer::finishSetDataSource_l() {
sp<DataSource> dataSource;
+ bool isWidevineStreaming = false;
+ if (!strncasecmp("widevine://", mUri.string(), 11)) {
+ isWidevineStreaming = true;
+
+ String8 newURI = String8("http://");
+ newURI.append(mUri.string() + 11);
+
+ mUri = newURI;
+ }
+
if (!strncasecmp("http://", mUri.string(), 7)
- || !strncasecmp("https://", mUri.string(), 8)) {
+ || !strncasecmp("https://", mUri.string(), 8)
+ || isWidevineStreaming) {
mConnectingDataSource = HTTPBase::Create(
(mFlags & INCOGNITO)
? HTTPBase::kFlagIncognito
@@ -1712,16 +1821,24 @@
return err;
}
+ if (!isWidevineStreaming) {
+ // The widevine extractor does its own caching.
+
#if 0
- mCachedSource = new NuCachedSource2(
- new ThrottledSource(
- mConnectingDataSource, 50 * 1024 /* bytes/sec */));
+ mCachedSource = new NuCachedSource2(
+ new ThrottledSource(
+ mConnectingDataSource, 50 * 1024 /* bytes/sec */));
#else
- mCachedSource = new NuCachedSource2(mConnectingDataSource);
+ mCachedSource = new NuCachedSource2(mConnectingDataSource);
#endif
+
+ dataSource = mCachedSource;
+ } else {
+ dataSource = mConnectingDataSource;
+ }
+
mConnectingDataSource.clear();
- dataSource = mCachedSource;
String8 contentType = dataSource->getMIMEType();
@@ -1735,28 +1852,35 @@
// could block on the datasource for a significant amount of time.
// During that time we'd be unable to abort the preparation phase
// without this prefill.
+ if (mCachedSource != NULL) {
+ // We're going to prefill the cache before trying to instantiate
+ // the extractor below, as the latter is an operation that otherwise
+ // could block on the datasource for a significant amount of time.
+ // During that time we'd be unable to abort the preparation phase
+ // without this prefill.
- mLock.unlock();
+ mLock.unlock();
- for (;;) {
- status_t finalStatus;
- size_t cachedDataRemaining =
- mCachedSource->approxDataRemaining(&finalStatus);
+ for (;;) {
+ status_t finalStatus;
+ size_t cachedDataRemaining =
+ mCachedSource->approxDataRemaining(&finalStatus);
- if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes
- || (mFlags & PREPARE_CANCELLED)) {
- break;
+ if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes
+ || (mFlags & PREPARE_CANCELLED)) {
+ break;
+ }
+
+ usleep(200000);
}
- usleep(200000);
+ mLock.lock();
}
- mLock.lock();
- }
-
- if (mFlags & PREPARE_CANCELLED) {
- LOGI("Prepare cancelled while waiting for initial cache fill.");
- return UNKNOWN_ERROR;
+ if (mFlags & PREPARE_CANCELLED) {
+ LOGI("Prepare cancelled while waiting for initial cache fill.");
+ return UNKNOWN_ERROR;
+ }
}
} else if (!strncasecmp("rtsp://", mUri.string(), 7)) {
if (mLooper == NULL) {
@@ -1790,10 +1914,29 @@
return UNKNOWN_ERROR;
}
- sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+ sp<MediaExtractor> extractor;
- if (extractor == NULL) {
- return UNKNOWN_ERROR;
+ if (isWidevineStreaming) {
+ String8 mimeType;
+ float confidence;
+ sp<AMessage> dummy;
+ bool success = SniffDRM(dataSource, &mimeType, &confidence, &dummy);
+
+ if (!success
+ || strcasecmp(
+ mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ mWVMExtractor = new WVMExtractor(dataSource);
+ mWVMExtractor->setAdaptiveStreamingMode(true);
+ extractor = mWVMExtractor;
+ } else {
+ extractor = MediaExtractor::Create(dataSource);
+
+ if (extractor == NULL) {
+ return UNKNOWN_ERROR;
+ }
}
dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
@@ -1805,7 +1948,15 @@
}
}
- return setDataSource_l(extractor);
+ status_t err = setDataSource_l(extractor);
+
+ if (err != OK) {
+ mWVMExtractor.clear();
+
+ return err;
+ }
+
+ return OK;
}
void AwesomePlayer::abortPrepare(status_t err) {
@@ -1866,7 +2017,7 @@
mFlags |= PREPARING_CONNECTED;
- if (mCachedSource != NULL || mRTSPController != NULL) {
+ if (isStreamingHTTP() || mRTSPController != NULL) {
postBufferingEvent_l();
} else {
finishAsyncPrepare_l();
@@ -1902,14 +2053,26 @@
void AwesomePlayer::postAudioSeekComplete() {
Mutex::Autolock autoLock(mLock);
+ postAudioSeekComplete_l();
+}
+
+void AwesomePlayer::postAudioSeekComplete_l() {
postCheckAudioStatusEvent_l(0 /* delayUs */);
}
status_t AwesomePlayer::setParameter(int key, const Parcel &request) {
- return OK;
+ if (key == KEY_PARAMETER_TIMED_TEXT_TRACK_INDEX) {
+ return setTimedTextTrackIndex(request.readInt32());
+ }
+ return ERROR_UNSUPPORTED;
}
status_t AwesomePlayer::getParameter(int key, Parcel *reply) {
return OK;
}
+
+bool AwesomePlayer::isStreamingHTTP() const {
+ return mCachedSource != NULL || mWVMExtractor != NULL;
+}
+
} // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 8787214..6692809 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -19,6 +19,8 @@
#include "include/MPEG4Extractor.h"
#include "include/SampleTable.h"
+#include "include/ESDS.h"
+#include "include/TimedTextPlayer.h"
#include <arpa/inet.h>
@@ -29,7 +31,6 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/DataSource.h>
-#include "include/ESDS.h"
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
@@ -832,6 +833,33 @@
mLastTrack->meta->setInt64(
kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
+ uint8_t lang[2];
+ off64_t lang_offset;
+ if (version == 1) {
+ lang_offset = timescale_offset + 4 + 8;
+ } else if (version == 0) {
+ lang_offset = timescale_offset + 4 + 4;
+ } else {
+ return ERROR_IO;
+ }
+
+ if (mDataSource->readAt(lang_offset, &lang, sizeof(lang))
+ < (ssize_t)sizeof(lang)) {
+ return ERROR_IO;
+ }
+
+ // To get the ISO-639-2/T three character language code
+ // 1 bit pad followed by 3 5-bits characters. Each character
+ // is packed as the difference between its ASCII value and 0x60.
+ char lang_code[4];
+ lang_code[0] = ((lang[0] >> 2) & 0x1f) + 0x60;
+ lang_code[1] = ((lang[0] & 0x3) << 3 | (lang[1] >> 5)) + 0x60;
+ lang_code[2] = (lang[1] & 0x1f) + 0x60;
+ lang_code[3] = '\0';
+
+ mLastTrack->meta->setCString(
+ kKeyMediaLanguage, lang_code);
+
*offset += chunk_size;
break;
}
@@ -1295,6 +1323,14 @@
return parseDrmSINF(offset, data_offset);
}
+ case FOURCC('t', 'x', '3', 'g'):
+ {
+ mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_TEXT_3GPP);
+
+ *offset += chunk_size;
+ break;
+ }
+
default:
{
*offset += chunk_size;
@@ -2135,6 +2171,9 @@
// Just give these file types a chance.
FOURCC('q', 't', ' ', ' '), // Apple's QuickTime
FOURCC('M', 'S', 'N', 'V'), // Sony's PSP
+
+ FOURCC('3', 'g', '2', 'a'), // 3GPP2
+ FOURCC('3', 'g', '2', 'b'),
};
for (size_t i = 0;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index e13b67e..f6a8b17 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/Utils.h>
#include <media/mediarecorder.h>
+#include <cutils/properties.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
@@ -45,6 +46,7 @@
static const int64_t kMax32BitFileSize = 0x007fffffffLL;
static const uint8_t kNalUnitTypeSeqParamSet = 0x07;
static const uint8_t kNalUnitTypePicParamSet = 0x08;
+static const int64_t kInitialDelayTimeUs = 700000LL;
// Using longer adjustment period to suppress fluctuations in
// the audio encoding paths
@@ -63,12 +65,13 @@
int64_t getDurationUs() const;
int64_t getEstimatedTrackSizeBytes() const;
- void writeTrackHeader(int32_t trackID, bool use32BitOffset = true);
+ void writeTrackHeader(bool use32BitOffset = true);
void bufferChunk(int64_t timestampUs);
bool isAvc() const { return mIsAvc; }
bool isAudio() const { return mIsAudio; }
bool isMPEG4() const { return mIsMPEG4; }
void addChunkOffset(off64_t offset);
+ int32_t getTrackId() const { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
private:
@@ -84,6 +87,7 @@
bool mIsMPEG4;
int32_t mTrackId;
int64_t mTrackDurationUs;
+ int64_t mMaxChunkDurationUs;
// For realtime applications, we need to adjust the media clock
// for video track based on the audio media clock
@@ -156,6 +160,8 @@
bool mReachedEOS;
int64_t mStartTimestampUs;
+ int64_t mStartTimeRealUs;
+ int64_t mFirstSampleTimeRealUs;
int64_t mPreviousTrackTimeUs;
int64_t mTrackEveryTimeDurationUs;
@@ -187,12 +193,9 @@
const uint8_t *parseParamSet(
const uint8_t *data, size_t length, int type, size_t *paramSetLen);
- status_t makeAVCCodecSpecificData(
- const uint8_t *data, size_t size);
- status_t copyAVCCodecSpecificData(
- const uint8_t *data, size_t size);
- status_t parseAVCCodecSpecificData(
- const uint8_t *data, size_t size);
+ status_t makeAVCCodecSpecificData(const uint8_t *data, size_t size);
+ status_t copyAVCCodecSpecificData(const uint8_t *data, size_t size);
+ status_t parseAVCCodecSpecificData(const uint8_t *data, size_t size);
// Track authoring progress status
void trackProgressStatus(int64_t timeUs, status_t err = OK);
@@ -214,6 +217,31 @@
void addOneStscTableEntry(size_t chunkId, size_t sampleId);
void addOneStssTableEntry(size_t sampleId);
void addOneSttsTableEntry(size_t sampleCount, int64_t durationUs);
+ void sendTrackSummary(bool hasMultipleTracks);
+
+ // Write the boxes
+ void writeStcoBox(bool use32BitOffset);
+ void writeStscBox();
+ void writeStszBox();
+ void writeStssBox();
+ void writeSttsBox();
+ void writeD263Box();
+ void writePaspBox();
+ void writeAvccBox();
+ void writeUrlBox();
+ void writeDrefBox();
+ void writeDinfBox();
+ void writeDamrBox();
+ void writeMdhdBox(time_t now);
+ void writeSmhdBox();
+ void writeVmhdBox();
+ void writeHdlrBox();
+ void writeTkhdBox(time_t now);
+ void writeMp4aEsdsBox();
+ void writeMp4vEsdsBox();
+ void writeAudioFourCCBox();
+ void writeVideoFourCCBox();
+ void writeStblBox(bool use32BitOffset);
Track(const Track &);
Track &operator=(const Track &);
@@ -230,7 +258,11 @@
mOffset(0),
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
- mInterleaveDurationUs(1000000) {
+ mInterleaveDurationUs(1000000),
+ mLatitudex10000(0),
+ mLongitudex10000(0),
+ mAreGeoTagsAvailable(false),
+ mStartTimeOffsetMs(-1) {
mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR);
if (mFd >= 0) {
@@ -249,7 +281,11 @@
mOffset(0),
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
- mInterleaveDurationUs(1000000) {
+ mInterleaveDurationUs(1000000),
+ mLatitudex10000(0),
+ mLongitudex10000(0),
+ mAreGeoTagsAvailable(false),
+ mStartTimeOffsetMs(-1) {
}
MPEG4Writer::~MPEG4Writer() {
@@ -450,20 +486,7 @@
mMoovBoxBuffer = NULL;
mMoovBoxBufferOffset = 0;
- beginBox("ftyp");
- {
- int32_t fileType;
- if (param && param->findInt32(kKeyFileType, &fileType) &&
- fileType != OUTPUT_FORMAT_MPEG_4) {
- writeFourcc("3gp4");
- } else {
- writeFourcc("isom");
- }
- }
- writeInt32(0);
- writeFourcc("isom");
- writeFourcc("3gp4");
- endBox();
+ writeFtypBox(param);
mFreeBoxOffset = mOffset;
@@ -643,43 +666,12 @@
}
lseek64(mFd, mOffset, SEEK_SET);
- time_t now = time(NULL);
const off64_t moovOffset = mOffset;
mWriteMoovBoxToMemory = true;
mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
mMoovBoxBufferOffset = 0;
CHECK(mMoovBoxBuffer != NULL);
- int32_t duration = (maxDurationUs * mTimeScale + 5E5) / 1E6;
-
- beginBox("moov");
-
- beginBox("mvhd");
- writeInt32(0); // version=0, flags=0
- writeInt32(now); // creation time
- writeInt32(now); // modification time
- writeInt32(mTimeScale); // mvhd timescale
- writeInt32(duration);
- writeInt32(0x10000); // rate: 1.0
- writeInt16(0x100); // volume
- writeInt16(0); // reserved
- writeInt32(0); // reserved
- writeInt32(0); // reserved
- writeCompositionMatrix(0); // matrix
- writeInt32(0); // predefined
- writeInt32(0); // predefined
- writeInt32(0); // predefined
- writeInt32(0); // predefined
- writeInt32(0); // predefined
- writeInt32(0); // predefined
- writeInt32(mTracks.size() + 1); // nextTrackID
- endBox(); // mvhd
-
- int32_t id = 1;
- for (List<Track *>::iterator it = mTracks.begin();
- it != mTracks.end(); ++it, ++id) {
- (*it)->writeTrackHeader(id, mUse32BitOffset);
- }
- endBox(); // moov
+ writeMoovBox(maxDurationUs);
mWriteMoovBoxToMemory = false;
if (mStreamableFile) {
@@ -709,9 +701,96 @@
mFd = -1;
mInitCheck = NO_INIT;
mStarted = false;
+
return err;
}
+void MPEG4Writer::writeMvhdBox(int64_t durationUs) {
+ time_t now = time(NULL);
+ beginBox("mvhd");
+ writeInt32(0); // version=0, flags=0
+ writeInt32(now); // creation time
+ writeInt32(now); // modification time
+ writeInt32(mTimeScale); // mvhd timescale
+ int32_t duration = (durationUs * mTimeScale + 5E5) / 1E6;
+ writeInt32(duration);
+ writeInt32(0x10000); // rate: 1.0
+ writeInt16(0x100); // volume
+ writeInt16(0); // reserved
+ writeInt32(0); // reserved
+ writeInt32(0); // reserved
+ writeCompositionMatrix(0); // matrix
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(mTracks.size() + 1); // nextTrackID
+ endBox(); // mvhd
+}
+
+void MPEG4Writer::writeMoovBox(int64_t durationUs) {
+ beginBox("moov");
+ writeMvhdBox(durationUs);
+ if (mAreGeoTagsAvailable) {
+ writeUdtaBox();
+ }
+ int32_t id = 1;
+ for (List<Track *>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it, ++id) {
+ (*it)->writeTrackHeader(mUse32BitOffset);
+ }
+ endBox(); // moov
+}
+
+void MPEG4Writer::writeFtypBox(MetaData *param) {
+ beginBox("ftyp");
+
+ int32_t fileType;
+ if (param && param->findInt32(kKeyFileType, &fileType) &&
+ fileType != OUTPUT_FORMAT_MPEG_4) {
+ writeFourcc("3gp4");
+ } else {
+ writeFourcc("isom");
+ }
+
+ writeInt32(0);
+ writeFourcc("isom");
+ writeFourcc("3gp4");
+ endBox();
+}
+
+static bool isTestModeEnabled() {
+#if (PROPERTY_VALUE_MAX < 5)
+#error "PROPERTY_VALUE_MAX must be at least 5"
+#endif
+
+ // Test mode is enabled only if rw.media.record.test system
+ // property is enabled.
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("rw.media.record.test", value, NULL) &&
+ (!strcasecmp(value, "true") || !strcasecmp(value, "1"))) {
+ return true;
+ }
+ return false;
+}
+
+void MPEG4Writer::sendSessionSummary() {
+ // Send session summary only if test mode is enabled
+ if (!isTestModeEnabled()) {
+ return;
+ }
+
+ for (List<ChunkInfo>::iterator it = mChunkInfos.begin();
+ it != mChunkInfos.end(); ++it) {
+ int trackNum = it->mTrack->getTrackId() << 28;
+ notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INTER_CHUNK_TIME_MS,
+ it->mMaxInterChunkDurUs);
+ }
+}
+
status_t MPEG4Writer::setInterleaveDuration(uint32_t durationUs) {
mInterleaveDurationUs = durationUs;
return OK;
@@ -874,6 +953,77 @@
write(s, 1, 4);
}
+
+// Written in +/-DD.DDDD format
+void MPEG4Writer::writeLatitude(int degreex10000) {
+ bool isNegative = (degreex10000 < 0);
+ char sign = isNegative? '-': '+';
+
+ // Handle the whole part
+ char str[9];
+ int wholePart = degreex10000 / 10000;
+ if (wholePart == 0) {
+ snprintf(str, 5, "%c%.2d.", sign, wholePart);
+ } else {
+ snprintf(str, 5, "%+.2d.", wholePart);
+ }
+
+ // Handle the fractional part
+ int fractionalPart = degreex10000 - (wholePart * 10000);
+ if (fractionalPart < 0) {
+ fractionalPart = -fractionalPart;
+ }
+ snprintf(&str[4], 5, "%.4d", fractionalPart);
+
+ // Do not write the null terminator
+ write(str, 1, 8);
+}
+
+// Written in +/- DDD.DDDD format
+void MPEG4Writer::writeLongitude(int degreex10000) {
+ bool isNegative = (degreex10000 < 0);
+ char sign = isNegative? '-': '+';
+
+ // Handle the whole part
+ char str[10];
+ int wholePart = degreex10000 / 10000;
+ if (wholePart == 0) {
+ snprintf(str, 6, "%c%.3d.", sign, wholePart);
+ } else {
+ snprintf(str, 6, "%+.3d.", wholePart);
+ }
+
+ // Handle the fractional part
+ int fractionalPart = degreex10000 - (wholePart * 10000);
+ if (fractionalPart < 0) {
+ fractionalPart = -fractionalPart;
+ }
+ snprintf(&str[5], 5, "%.4d", fractionalPart);
+
+ // Do not write the null terminator
+ write(str, 1, 9);
+}
+
+/*
+ * Geodata is stored according to ISO-6709 standard.
+ * latitudex10000 is latitude in degrees times 10000, and
+ * longitudex10000 is longitude in degrees times 10000.
+ * The range for the latitude is in [-90, +90], and
+ * The range for the longitude is in [-180, +180]
+ */
+status_t MPEG4Writer::setGeoData(int latitudex10000, int longitudex10000) {
+ // Is latitude or longitude out of range?
+ if (latitudex10000 < -900000 || latitudex10000 > 900000 ||
+ longitudex10000 < -1800000 || longitudex10000 > 1800000) {
+ return BAD_VALUE;
+ }
+
+ mLatitudex10000 = latitudex10000;
+ mLongitudex10000 = longitudex10000;
+ mAreGeoTagsAvailable = true;
+ return OK;
+}
+
void MPEG4Writer::write(const void *data, size_t size) {
write(data, 1, size);
}
@@ -1156,18 +1306,13 @@
void MPEG4Writer::writeAllChunks() {
LOGV("writeAllChunks");
size_t outstandingChunks = 0;
- while (!mChunkInfos.empty()) {
- List<ChunkInfo>::iterator it = mChunkInfos.begin();
- while (!it->mChunks.empty()) {
- Chunk chunk;
- if (findChunkToWrite(&chunk)) {
- writeChunkToFile(&chunk);
- ++outstandingChunks;
- }
- }
- it->mTrack = NULL;
- mChunkInfos.erase(it);
+ Chunk chunk;
+ while (findChunkToWrite(&chunk)) {
+ ++outstandingChunks;
}
+
+ sendSessionSummary();
+
mChunkInfos.clear();
LOGD("%d chunks are written in the last batch", outstandingChunks);
}
@@ -1175,8 +1320,6 @@
bool MPEG4Writer::findChunkToWrite(Chunk *chunk) {
LOGV("findChunkToWrite");
- // Find the smallest timestamp, and write that chunk out
- // XXX: What if some track is just too slow?
int64_t minTimestampUs = 0x7FFFFFFFFFFFFFFFLL;
Track *track = NULL;
for (List<ChunkInfo>::iterator it = mChunkInfos.begin();
@@ -1205,6 +1348,13 @@
*chunk = *(it->mChunks.begin());
it->mChunks.erase(it->mChunks.begin());
CHECK_EQ(chunk->mTrack, track);
+
+ int64_t interChunkTimeUs =
+ chunk->mTimeStampUs - it->mPrevChunkTimestampUs;
+ if (interChunkTimeUs > it->mPrevChunkTimestampUs) {
+ it->mMaxInterChunkDurUs = interChunkTimeUs;
+ }
+
return true;
}
}
@@ -1248,6 +1398,8 @@
it != mTracks.end(); ++it) {
ChunkInfo info;
info.mTrack = *it;
+ info.mPrevChunkTimestampUs = 0;
+ info.mMaxInterChunkDurUs = 0;
mChunkInfos.push_back(info);
}
@@ -1271,6 +1423,7 @@
if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
startTimeUs = 0;
}
+ mStartTimeRealUs = startTimeUs;
int32_t rotationDegrees;
if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
@@ -1295,10 +1448,15 @@
* session, and it also helps eliminate the "recording" sound for
* camcorder applications.
*
- * Ideally, this platform-specific value should be defined
- * in media_profiles.xml file
+ * If client does not set the start time offset, we fall back to
+ * use the default initial delay value.
*/
- startTimeUs += 700000;
+ int64_t startTimeOffsetUs = mOwner->getStartTimeOffsetMs() * 1000LL;
+ if (startTimeOffsetUs < 0) { // Start time offset was not set
+ startTimeOffsetUs = kInitialDelayTimeUs;
+ }
+ startTimeUs += startTimeOffsetUs;
+ LOGI("Start time offset: %lld us", startTimeOffsetUs);
}
meta->setInt64(kKeyTime, startTimeUs);
@@ -1329,6 +1487,7 @@
mPrevMediaTimeAdjustSample = 0;
mTotalDriftTimeToAdjustUs = 0;
mPrevTotalAccumDriftTimeUs = 0;
+ mMaxChunkDurationUs = 0;
pthread_create(&mThread, &attr, ThreadWrapper, this);
pthread_attr_destroy(&attr);
@@ -1766,6 +1925,7 @@
status_t MPEG4Writer::Track::threadEntry() {
int32_t count = 0;
const int64_t interleaveDurationUs = mOwner->interleaveDuration();
+ const bool hasMultipleTracks = (mOwner->numTracks() > 1);
int64_t chunkTimestampUs = 0;
int32_t nChunks = 0;
int32_t nZeroLengthFrames = 0;
@@ -1903,7 +2063,8 @@
LOGV("%s timestampUs: %lld", mIsAudio? "Audio": "Video", timestampUs);
////////////////////////////////////////////////////////////////////////////////
- if (mSampleSizes.empty()) {
+ if (mNumSamples == 0) {
+ mFirstSampleTimeRealUs = systemTime() / 1000;
mStartTimestampUs = timestampUs;
mOwner->setStartTimestampUs(mStartTimestampUs);
previousPausedDurationUs = mStartTimestampUs;
@@ -1998,7 +2159,7 @@
}
trackProgressStatus(timestampUs);
}
- if (mOwner->numTracks() == 1) {
+ if (!hasMultipleTracks) {
off64_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
: mOwner->addSample_l(copy);
if (mChunkOffsets.empty()) {
@@ -2017,7 +2178,11 @@
if (chunkTimestampUs == 0) {
chunkTimestampUs = timestampUs;
} else {
- if (timestampUs - chunkTimestampUs > interleaveDurationUs) {
+ int64_t chunkDurationUs = timestampUs - chunkTimestampUs;
+ if (chunkDurationUs > interleaveDurationUs) {
+ if (chunkDurationUs > mMaxChunkDurationUs) {
+ mMaxChunkDurationUs = chunkDurationUs;
+ }
++nChunks;
if (nChunks == 1 || // First chunk
(--(mStscTableEntries.end()))->samplesPerChunk !=
@@ -2040,7 +2205,7 @@
mOwner->trackProgressStatus(mTrackId, -1, err);
// Last chunk
- if (mOwner->numTracks() == 1) {
+ if (!hasMultipleTracks) {
addOneStscTableEntry(1, mNumSamples);
} else if (!mChunkSamples.empty()) {
addOneStscTableEntry(++nChunks, mChunkSamples.size());
@@ -2067,6 +2232,9 @@
mTrackDurationUs += lastDurationUs;
mReachedEOS = true;
+
+ sendTrackSummary(hasMultipleTracks);
+
LOGI("Received total/0-length (%d/%d) buffers and encoded %d frames. - %s",
count, nZeroLengthFrames, mNumSamples, mIsAudio? "audio": "video");
if (mIsAudio) {
@@ -2079,6 +2247,61 @@
return err;
}
+void MPEG4Writer::Track::sendTrackSummary(bool hasMultipleTracks) {
+
+ // Send track summary only if test mode is enabled.
+ if (!isTestModeEnabled()) {
+ return;
+ }
+
+ int trackNum = (mTrackId << 28);
+
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_TYPE,
+ mIsAudio? 0: 1);
+
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_DURATION_MS,
+ mTrackDurationUs / 1000);
+
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_ENCODED_FRAMES,
+ mNumSamples);
+
+ {
+ // The system delay time excluding the requested initial delay that
+ // is used to eliminate the recording sound.
+ int64_t startTimeOffsetUs = mOwner->getStartTimeOffsetMs() * 1000LL;
+ if (startTimeOffsetUs < 0) { // Start time offset was not set
+ startTimeOffsetUs = kInitialDelayTimeUs;
+ }
+ int64_t initialDelayUs =
+ mFirstSampleTimeRealUs - mStartTimeRealUs - startTimeOffsetUs;
+
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_INITIAL_DELAY_MS,
+ (initialDelayUs) / 1000);
+ }
+
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_DATA_KBYTES,
+ mMdatSizeBytes / 1024);
+
+ if (hasMultipleTracks) {
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_MAX_CHUNK_DUR_MS,
+ mMaxChunkDurationUs / 1000);
+
+ int64_t moovStartTimeUs = mOwner->getStartTimestampUs();
+ if (mStartTimestampUs != moovStartTimeUs) {
+ int64_t startTimeOffsetUs = mStartTimestampUs - moovStartTimeUs;
+ mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_TRACK_INFO_START_OFFSET_MS,
+ startTimeOffsetUs / 1000);
+ }
+ }
+}
+
void MPEG4Writer::Track::trackProgressStatus(int64_t timeUs, status_t err) {
LOGV("trackProgressStatus: %lld us", timeUs);
if (mTrackEveryTimeDurationUs > 0 &&
@@ -2169,388 +2392,484 @@
return OK;
}
-void MPEG4Writer::Track::writeTrackHeader(
- int32_t trackID, bool use32BitOffset) {
- const char *mime;
- bool success = mMeta->findCString(kKeyMIMEType, &mime);
- CHECK(success);
+void MPEG4Writer::Track::writeTrackHeader(bool use32BitOffset) {
LOGV("%s track time scale: %d",
mIsAudio? "Audio": "Video", mTimeScale);
time_t now = time(NULL);
- int32_t mvhdTimeScale = mOwner->getTimeScale();
+ mOwner->beginBox("trak");
+ writeTkhdBox(now);
+ mOwner->beginBox("mdia");
+ writeMdhdBox(now);
+ writeHdlrBox();
+ mOwner->beginBox("minf");
+ if (mIsAudio) {
+ writeSmhdBox();
+ } else {
+ writeVmhdBox();
+ }
+ writeDinfBox();
+ writeStblBox(use32BitOffset);
+ mOwner->endBox(); // minf
+ mOwner->endBox(); // mdia
+ mOwner->endBox(); // trak
+}
+
+void MPEG4Writer::Track::writeStblBox(bool use32BitOffset) {
+ mOwner->beginBox("stbl");
+ mOwner->beginBox("stsd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(1); // entry count
+ if (mIsAudio) {
+ writeAudioFourCCBox();
+ } else {
+ writeVideoFourCCBox();
+ }
+ mOwner->endBox(); // stsd
+ writeSttsBox();
+ if (!mIsAudio) {
+ writeStssBox();
+ }
+ writeStszBox();
+ writeStscBox();
+ writeStcoBox(use32BitOffset);
+ mOwner->endBox(); // stbl
+}
+
+void MPEG4Writer::Track::writeVideoFourCCBox() {
+ const char *mime;
+ bool success = mMeta->findCString(kKeyMIMEType, &mime);
+ CHECK(success);
+ if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
+ mOwner->beginBox("mp4v");
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
+ mOwner->beginBox("s263");
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
+ mOwner->beginBox("avc1");
+ } else {
+ LOGE("Unknown mime type '%s'.", mime);
+ CHECK(!"should not be here, unknown mime type.");
+ }
+
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(0); // reserved
+ mOwner->writeInt16(1); // data ref index
+ mOwner->writeInt16(0); // predefined
+ mOwner->writeInt16(0); // reserved
+ mOwner->writeInt32(0); // predefined
+ mOwner->writeInt32(0); // predefined
+ mOwner->writeInt32(0); // predefined
+
+ int32_t width, height;
+ success = mMeta->findInt32(kKeyWidth, &width);
+ success = success && mMeta->findInt32(kKeyHeight, &height);
+ CHECK(success);
+
+ mOwner->writeInt16(width);
+ mOwner->writeInt16(height);
+ mOwner->writeInt32(0x480000); // horiz resolution
+ mOwner->writeInt32(0x480000); // vert resolution
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(1); // frame count
+ mOwner->write(" ", 32);
+ mOwner->writeInt16(0x18); // depth
+ mOwner->writeInt16(-1); // predefined
+
+ CHECK(23 + mCodecSpecificDataSize < 128);
+
+ if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
+ writeMp4vEsdsBox();
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
+ writeD263Box();
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
+ writeAvccBox();
+ }
+
+ writePaspBox();
+ mOwner->endBox(); // mp4v, s263 or avc1
+}
+
+void MPEG4Writer::Track::writeAudioFourCCBox() {
+ const char *mime;
+ bool success = mMeta->findCString(kKeyMIMEType, &mime);
+ CHECK(success);
+ const char *fourcc = NULL;
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) {
+ fourcc = "samr";
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) {
+ fourcc = "sawb";
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) {
+ fourcc = "mp4a";
+ } else {
+ LOGE("Unknown mime type '%s'.", mime);
+ CHECK(!"should not be here, unknown mime type.");
+ }
+
+ mOwner->beginBox(fourcc); // audio format
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(0); // reserved
+ mOwner->writeInt16(0x1); // data ref index
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ int32_t nChannels;
+ CHECK_EQ(true, mMeta->findInt32(kKeyChannelCount, &nChannels));
+ mOwner->writeInt16(nChannels); // channel count
+ mOwner->writeInt16(16); // sample size
+ mOwner->writeInt16(0); // predefined
+ mOwner->writeInt16(0); // reserved
+
+ int32_t samplerate;
+ success = mMeta->findInt32(kKeySampleRate, &samplerate);
+ CHECK(success);
+ mOwner->writeInt32(samplerate << 16);
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) {
+ writeMp4aEsdsBox();
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime) ||
+ !strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) {
+ writeDamrBox();
+ }
+ mOwner->endBox();
+}
+
+void MPEG4Writer::Track::writeMp4aEsdsBox() {
+ mOwner->beginBox("esds");
+ CHECK(mCodecSpecificData);
+ CHECK(mCodecSpecificDataSize > 0);
+
+ // Make sure all sizes encode to a single byte.
+ CHECK(mCodecSpecificDataSize + 23 < 128);
+
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt8(0x03); // ES_DescrTag
+ mOwner->writeInt8(23 + mCodecSpecificDataSize);
+ mOwner->writeInt16(0x0000);// ES_ID
+ mOwner->writeInt8(0x00);
+
+ mOwner->writeInt8(0x04); // DecoderConfigDescrTag
+ mOwner->writeInt8(15 + mCodecSpecificDataSize);
+ mOwner->writeInt8(0x40); // objectTypeIndication ISO/IEC 14492-2
+ mOwner->writeInt8(0x15); // streamType AudioStream
+
+ mOwner->writeInt16(0x03); // XXX
+ mOwner->writeInt8(0x00); // buffer size 24-bit
+ mOwner->writeInt32(96000); // max bit rate
+ mOwner->writeInt32(96000); // avg bit rate
+
+ mOwner->writeInt8(0x05); // DecoderSpecificInfoTag
+ mOwner->writeInt8(mCodecSpecificDataSize);
+ mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+
+ static const uint8_t kData2[] = {
+ 0x06, // SLConfigDescriptorTag
+ 0x01,
+ 0x02
+ };
+ mOwner->write(kData2, sizeof(kData2));
+
+ mOwner->endBox(); // esds
+}
+
+void MPEG4Writer::Track::writeMp4vEsdsBox() {
+ CHECK(mCodecSpecificData);
+ CHECK(mCodecSpecificDataSize > 0);
+ mOwner->beginBox("esds");
+
+ mOwner->writeInt32(0); // version=0, flags=0
+
+ mOwner->writeInt8(0x03); // ES_DescrTag
+ mOwner->writeInt8(23 + mCodecSpecificDataSize);
+ mOwner->writeInt16(0x0000); // ES_ID
+ mOwner->writeInt8(0x1f);
+
+ mOwner->writeInt8(0x04); // DecoderConfigDescrTag
+ mOwner->writeInt8(15 + mCodecSpecificDataSize);
+ mOwner->writeInt8(0x20); // objectTypeIndication ISO/IEC 14492-2
+ mOwner->writeInt8(0x11); // streamType VisualStream
+
+ static const uint8_t kData[] = {
+ 0x01, 0x77, 0x00,
+ 0x00, 0x03, 0xe8, 0x00,
+ 0x00, 0x03, 0xe8, 0x00
+ };
+ mOwner->write(kData, sizeof(kData));
+
+ mOwner->writeInt8(0x05); // DecoderSpecificInfoTag
+
+ mOwner->writeInt8(mCodecSpecificDataSize);
+ mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+
+ static const uint8_t kData2[] = {
+ 0x06, // SLConfigDescriptorTag
+ 0x01,
+ 0x02
+ };
+ mOwner->write(kData2, sizeof(kData2));
+
+ mOwner->endBox(); // esds
+}
+
+void MPEG4Writer::Track::writeTkhdBox(time_t now) {
+ mOwner->beginBox("tkhd");
+ // Flags = 7 to indicate that the track is enabled, and
+ // part of the presentation
+ mOwner->writeInt32(0x07); // version=0, flags=7
+ mOwner->writeInt32(now); // creation time
+ mOwner->writeInt32(now); // modification time
+ mOwner->writeInt32(mTrackId + 1); // track id starts with 1
+ mOwner->writeInt32(0); // reserved
int64_t trakDurationUs = getDurationUs();
+ int32_t mvhdTimeScale = mOwner->getTimeScale();
+ int32_t tkhdDuration =
+ (trakDurationUs * mvhdTimeScale + 5E5) / 1E6;
+ mOwner->writeInt32(tkhdDuration); // in mvhd timescale
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(0); // layer
+ mOwner->writeInt16(0); // alternate group
+ mOwner->writeInt16(mIsAudio ? 0x100 : 0); // volume
+ mOwner->writeInt16(0); // reserved
+
+ mOwner->writeCompositionMatrix(mRotation); // matrix
+
+ if (mIsAudio) {
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0);
+ } else {
+ int32_t width, height;
+ bool success = mMeta->findInt32(kKeyWidth, &width);
+ success = success && mMeta->findInt32(kKeyHeight, &height);
+ CHECK(success);
+
+ mOwner->writeInt32(width << 16); // 32-bit fixed-point value
+ mOwner->writeInt32(height << 16); // 32-bit fixed-point value
+ }
+ mOwner->endBox(); // tkhd
+}
+
+void MPEG4Writer::Track::writeVmhdBox() {
+ mOwner->beginBox("vmhd");
+ mOwner->writeInt32(0x01); // version=0, flags=1
+ mOwner->writeInt16(0); // graphics mode
+ mOwner->writeInt16(0); // opcolor
+ mOwner->writeInt16(0);
+ mOwner->writeInt16(0);
+ mOwner->endBox();
+}
+
+void MPEG4Writer::Track::writeSmhdBox() {
+ mOwner->beginBox("smhd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt16(0); // balance
+ mOwner->writeInt16(0); // reserved
+ mOwner->endBox();
+}
+
+void MPEG4Writer::Track::writeHdlrBox() {
+ mOwner->beginBox("hdlr");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(0); // component type: should be mhlr
+ mOwner->writeFourcc(mIsAudio ? "soun" : "vide"); // component subtype
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ // Removing "r" for the name string just makes the string 4 byte aligned
+ mOwner->writeCString(mIsAudio ? "SoundHandle": "VideoHandle"); // name
+ mOwner->endBox();
+}
+
+void MPEG4Writer::Track::writeMdhdBox(time_t now) {
+ int64_t trakDurationUs = getDurationUs();
+ mOwner->beginBox("mdhd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(now); // creation time
+ mOwner->writeInt32(now); // modification time
+ mOwner->writeInt32(mTimeScale); // media timescale
+ int32_t mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6;
+ mOwner->writeInt32(mdhdDuration); // use media timescale
+ // Language follows the three letter standard ISO-639-2/T
+ // 'e', 'n', 'g' for "English", for instance.
+ // Each character is packed as the difference between its ASCII value and 0x60.
+ // For "English", these are 00101, 01110, 00111.
+ // XXX: Where is the padding bit located: 0x15C7?
+ mOwner->writeInt16(0); // language code
+ mOwner->writeInt16(0); // predefined
+ mOwner->endBox();
+}
+
+void MPEG4Writer::Track::writeDamrBox() {
+ // 3gpp2 Spec AMRSampleEntry fields
+ mOwner->beginBox("damr");
+ mOwner->writeCString(" "); // vendor: 4 bytes
+ mOwner->writeInt8(0); // decoder version
+ mOwner->writeInt16(0x83FF); // mode set: all enabled
+ mOwner->writeInt8(0); // mode change period
+ mOwner->writeInt8(1); // frames per sample
+ mOwner->endBox();
+}
+
+void MPEG4Writer::Track::writeUrlBox() {
+ // The table index here refers to the sample description index
+ // in the sample table entries.
+ mOwner->beginBox("url ");
+ mOwner->writeInt32(1); // version=0, flags=1 (self-contained)
+ mOwner->endBox(); // url
+}
+
+void MPEG4Writer::Track::writeDrefBox() {
+ mOwner->beginBox("dref");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(1); // entry count (either url or urn)
+ writeUrlBox();
+ mOwner->endBox(); // dref
+}
+
+void MPEG4Writer::Track::writeDinfBox() {
+ mOwner->beginBox("dinf");
+ writeDrefBox();
+ mOwner->endBox(); // dinf
+}
+
+void MPEG4Writer::Track::writeAvccBox() {
+ CHECK(mCodecSpecificData);
+ CHECK(mCodecSpecificDataSize >= 5);
+
+ // Patch avcc's lengthSize field to match the number
+ // of bytes we use to indicate the size of a nal unit.
+ uint8_t *ptr = (uint8_t *)mCodecSpecificData;
+ ptr[4] = (ptr[4] & 0xfc) | (mOwner->useNalLengthFour() ? 3 : 1);
+ mOwner->beginBox("avcC");
+ mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+ mOwner->endBox(); // avcC
+}
+
+void MPEG4Writer::Track::writeD263Box() {
+ mOwner->beginBox("d263");
+ mOwner->writeInt32(0); // vendor
+ mOwner->writeInt8(0); // decoder version
+ mOwner->writeInt8(10); // level: 10
+ mOwner->writeInt8(0); // profile: 0
+ mOwner->endBox(); // d263
+}
+
+// This is useful if the pixel is not square
+void MPEG4Writer::Track::writePaspBox() {
+ mOwner->beginBox("pasp");
+ mOwner->writeInt32(1 << 16); // hspacing
+ mOwner->writeInt32(1 << 16); // vspacing
+ mOwner->endBox(); // pasp
+}
+
+void MPEG4Writer::Track::writeSttsBox() {
+ mOwner->beginBox("stts");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mNumSttsTableEntries);
// Compensate for small start time difference from different media tracks
int64_t trackStartTimeOffsetUs = 0;
+ int64_t moovStartTimeUs = mOwner->getStartTimestampUs();
+ if (mStartTimestampUs != moovStartTimeUs) {
+ CHECK(mStartTimestampUs > moovStartTimeUs);
+ trackStartTimeOffsetUs = mStartTimestampUs - moovStartTimeUs;
+ }
+ int64_t prevTimestampUs = trackStartTimeOffsetUs;
+ for (List<SttsTableEntry>::iterator it = mSttsTableEntries.begin();
+ it != mSttsTableEntries.end(); ++it) {
+ mOwner->writeInt32(it->sampleCount);
- mOwner->beginBox("trak");
+ // Make sure that we are calculating the sample duration the exactly
+ // same way as we made decision on how to create stts entries.
+ int64_t currTimestampUs = prevTimestampUs + it->sampleDurationUs;
+ int32_t dur = ((currTimestampUs * mTimeScale + 500000LL) / 1000000LL -
+ (prevTimestampUs * mTimeScale + 500000LL) / 1000000LL);
+ prevTimestampUs += (it->sampleCount * it->sampleDurationUs);
- mOwner->beginBox("tkhd");
- // Flags = 7 to indicate that the track is enabled, and
- // part of the presentation
- mOwner->writeInt32(0x07); // version=0, flags=7
- mOwner->writeInt32(now); // creation time
- mOwner->writeInt32(now); // modification time
- mOwner->writeInt32(trackID);
- mOwner->writeInt32(0); // reserved
- int32_t tkhdDuration =
- (trakDurationUs * mvhdTimeScale + 5E5) / 1E6;
- mOwner->writeInt32(tkhdDuration); // in mvhd timescale
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt16(0); // layer
- mOwner->writeInt16(0); // alternate group
- mOwner->writeInt16(mIsAudio ? 0x100 : 0); // volume
- mOwner->writeInt16(0); // reserved
+ mOwner->writeInt32(dur);
+ }
+ mOwner->endBox(); // stts
+}
- mOwner->writeCompositionMatrix(mRotation); // matrix
+void MPEG4Writer::Track::writeStssBox() {
+ mOwner->beginBox("stss");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mNumStssTableEntries); // number of sync frames
+ for (List<int32_t>::iterator it = mStssTableEntries.begin();
+ it != mStssTableEntries.end(); ++it) {
+ mOwner->writeInt32(*it);
+ }
+ mOwner->endBox(); // stss
+}
- if (mIsAudio) {
- mOwner->writeInt32(0);
- mOwner->writeInt32(0);
- } else {
- int32_t width, height;
- bool success = mMeta->findInt32(kKeyWidth, &width);
- success = success && mMeta->findInt32(kKeyHeight, &height);
- CHECK(success);
-
- mOwner->writeInt32(width << 16); // 32-bit fixed-point value
- mOwner->writeInt32(height << 16); // 32-bit fixed-point value
+void MPEG4Writer::Track::writeStszBox() {
+ mOwner->beginBox("stsz");
+ mOwner->writeInt32(0); // version=0, flags=0
+ if (mSamplesHaveSameSize) {
+ List<size_t>::iterator it = mSampleSizes.begin();
+ mOwner->writeInt32(*it); // default sample size
+ } else {
+ mOwner->writeInt32(0);
+ }
+ mOwner->writeInt32(mNumSamples);
+ if (!mSamplesHaveSameSize) {
+ for (List<size_t>::iterator it = mSampleSizes.begin();
+ it != mSampleSizes.end(); ++it) {
+ mOwner->writeInt32(*it);
}
- mOwner->endBox(); // tkhd
+ }
+ mOwner->endBox(); // stsz
+}
- int64_t moovStartTimeUs = mOwner->getStartTimestampUs();
- if (mStartTimestampUs != moovStartTimeUs) {
- CHECK(mStartTimestampUs > moovStartTimeUs);
- trackStartTimeOffsetUs = mStartTimestampUs - moovStartTimeUs;
- }
+void MPEG4Writer::Track::writeStscBox() {
+ mOwner->beginBox("stsc");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mNumStscTableEntries);
+ for (List<StscTableEntry>::iterator it = mStscTableEntries.begin();
+ it != mStscTableEntries.end(); ++it) {
+ mOwner->writeInt32(it->firstChunk);
+ mOwner->writeInt32(it->samplesPerChunk);
+ mOwner->writeInt32(it->sampleDescriptionId);
+ }
+ mOwner->endBox(); // stsc
+}
- mOwner->beginBox("mdia");
+void MPEG4Writer::Track::writeStcoBox(bool use32BitOffset) {
+ mOwner->beginBox(use32BitOffset? "stco": "co64");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mNumStcoTableEntries);
+ for (List<off64_t>::iterator it = mChunkOffsets.begin();
+ it != mChunkOffsets.end(); ++it) {
+ if (use32BitOffset) {
+ mOwner->writeInt32(static_cast<int32_t>(*it));
+ } else {
+ mOwner->writeInt64((*it));
+ }
+ }
+ mOwner->endBox(); // stco or co64
+}
- mOwner->beginBox("mdhd");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(now); // creation time
- mOwner->writeInt32(now); // modification time
- mOwner->writeInt32(mTimeScale); // media timescale
- int32_t mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6;
- mOwner->writeInt32(mdhdDuration); // use media timescale
- // Language follows the three letter standard ISO-639-2/T
- // 'e', 'n', 'g' for "English", for instance.
- // Each character is packed as the difference between its ASCII value and 0x60.
- // For "English", these are 00101, 01110, 00111.
- // XXX: Where is the padding bit located: 0x15C7?
- mOwner->writeInt16(0); // language code
- mOwner->writeInt16(0); // predefined
- mOwner->endBox();
+void MPEG4Writer::writeUdtaBox() {
+ beginBox("udta");
+ writeGeoDataBox();
+ endBox();
+}
- mOwner->beginBox("hdlr");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(0); // component type: should be mhlr
- mOwner->writeFourcc(mIsAudio ? "soun" : "vide"); // component subtype
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt32(0); // reserved
- // Removing "r" for the name string just makes the string 4 byte aligned
- mOwner->writeCString(mIsAudio ? "SoundHandle": "VideoHandle"); // name
- mOwner->endBox();
-
- mOwner->beginBox("minf");
- if (mIsAudio) {
- mOwner->beginBox("smhd");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt16(0); // balance
- mOwner->writeInt16(0); // reserved
- mOwner->endBox();
- } else {
- mOwner->beginBox("vmhd");
- mOwner->writeInt32(0x01); // version=0, flags=1
- mOwner->writeInt16(0); // graphics mode
- mOwner->writeInt16(0); // opcolor
- mOwner->writeInt16(0);
- mOwner->writeInt16(0);
- mOwner->endBox();
- }
-
- mOwner->beginBox("dinf");
- mOwner->beginBox("dref");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(1); // entry count (either url or urn)
- // The table index here refers to the sample description index
- // in the sample table entries.
- mOwner->beginBox("url ");
- mOwner->writeInt32(1); // version=0, flags=1 (self-contained)
- mOwner->endBox(); // url
- mOwner->endBox(); // dref
- mOwner->endBox(); // dinf
-
- mOwner->beginBox("stbl");
-
- mOwner->beginBox("stsd");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(1); // entry count
- if (mIsAudio) {
- const char *fourcc = NULL;
- if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) {
- fourcc = "samr";
- } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) {
- fourcc = "sawb";
- } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) {
- fourcc = "mp4a";
- } else {
- LOGE("Unknown mime type '%s'.", mime);
- CHECK(!"should not be here, unknown mime type.");
- }
-
- mOwner->beginBox(fourcc); // audio format
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt16(0); // reserved
- mOwner->writeInt16(0x1); // data ref index
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt32(0); // reserved
- int32_t nChannels;
- CHECK_EQ(true, mMeta->findInt32(kKeyChannelCount, &nChannels));
- mOwner->writeInt16(nChannels); // channel count
- mOwner->writeInt16(16); // sample size
- mOwner->writeInt16(0); // predefined
- mOwner->writeInt16(0); // reserved
-
- int32_t samplerate;
- bool success = mMeta->findInt32(kKeySampleRate, &samplerate);
- CHECK(success);
- mOwner->writeInt32(samplerate << 16);
- if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) {
- mOwner->beginBox("esds");
- CHECK(mCodecSpecificData);
- CHECK(mCodecSpecificDataSize > 0);
-
- // Make sure all sizes encode to a single byte.
- CHECK(mCodecSpecificDataSize + 23 < 128);
-
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt8(0x03); // ES_DescrTag
- mOwner->writeInt8(23 + mCodecSpecificDataSize);
- mOwner->writeInt16(0x0000);// ES_ID
- mOwner->writeInt8(0x00);
-
- mOwner->writeInt8(0x04); // DecoderConfigDescrTag
- mOwner->writeInt8(15 + mCodecSpecificDataSize);
- mOwner->writeInt8(0x40); // objectTypeIndication ISO/IEC 14492-2
- mOwner->writeInt8(0x15); // streamType AudioStream
-
- mOwner->writeInt16(0x03); // XXX
- mOwner->writeInt8(0x00); // buffer size 24-bit
- mOwner->writeInt32(96000); // max bit rate
- mOwner->writeInt32(96000); // avg bit rate
-
- mOwner->writeInt8(0x05); // DecoderSpecificInfoTag
- mOwner->writeInt8(mCodecSpecificDataSize);
- mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
-
- static const uint8_t kData2[] = {
- 0x06, // SLConfigDescriptorTag
- 0x01,
- 0x02
- };
- mOwner->write(kData2, sizeof(kData2));
-
- mOwner->endBox(); // esds
- } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime) ||
- !strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) {
- // 3gpp2 Spec AMRSampleEntry fields
- mOwner->beginBox("damr");
- mOwner->writeCString(" "); // vendor: 4 bytes
- mOwner->writeInt8(0); // decoder version
- mOwner->writeInt16(0x83FF); // mode set: all enabled
- mOwner->writeInt8(0); // mode change period
- mOwner->writeInt8(1); // frames per sample
- mOwner->endBox();
- }
- mOwner->endBox();
- } else {
- if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
- mOwner->beginBox("mp4v");
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
- mOwner->beginBox("s263");
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
- mOwner->beginBox("avc1");
- } else {
- LOGE("Unknown mime type '%s'.", mime);
- CHECK(!"should not be here, unknown mime type.");
- }
-
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt16(0); // reserved
- mOwner->writeInt16(1); // data ref index
- mOwner->writeInt16(0); // predefined
- mOwner->writeInt16(0); // reserved
- mOwner->writeInt32(0); // predefined
- mOwner->writeInt32(0); // predefined
- mOwner->writeInt32(0); // predefined
-
- int32_t width, height;
- bool success = mMeta->findInt32(kKeyWidth, &width);
- success = success && mMeta->findInt32(kKeyHeight, &height);
- CHECK(success);
-
- mOwner->writeInt16(width);
- mOwner->writeInt16(height);
- mOwner->writeInt32(0x480000); // horiz resolution
- mOwner->writeInt32(0x480000); // vert resolution
- mOwner->writeInt32(0); // reserved
- mOwner->writeInt16(1); // frame count
- mOwner->write(" ", 32);
- mOwner->writeInt16(0x18); // depth
- mOwner->writeInt16(-1); // predefined
-
- CHECK(23 + mCodecSpecificDataSize < 128);
-
- if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
- CHECK(mCodecSpecificData);
- CHECK(mCodecSpecificDataSize > 0);
- mOwner->beginBox("esds");
-
- mOwner->writeInt32(0); // version=0, flags=0
-
- mOwner->writeInt8(0x03); // ES_DescrTag
- mOwner->writeInt8(23 + mCodecSpecificDataSize);
- mOwner->writeInt16(0x0000); // ES_ID
- mOwner->writeInt8(0x1f);
-
- mOwner->writeInt8(0x04); // DecoderConfigDescrTag
- mOwner->writeInt8(15 + mCodecSpecificDataSize);
- mOwner->writeInt8(0x20); // objectTypeIndication ISO/IEC 14492-2
- mOwner->writeInt8(0x11); // streamType VisualStream
-
- static const uint8_t kData[] = {
- 0x01, 0x77, 0x00,
- 0x00, 0x03, 0xe8, 0x00,
- 0x00, 0x03, 0xe8, 0x00
- };
- mOwner->write(kData, sizeof(kData));
-
- mOwner->writeInt8(0x05); // DecoderSpecificInfoTag
-
- mOwner->writeInt8(mCodecSpecificDataSize);
- mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
-
- static const uint8_t kData2[] = {
- 0x06, // SLConfigDescriptorTag
- 0x01,
- 0x02
- };
- mOwner->write(kData2, sizeof(kData2));
-
- mOwner->endBox(); // esds
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
- mOwner->beginBox("d263");
-
- mOwner->writeInt32(0); // vendor
- mOwner->writeInt8(0); // decoder version
- mOwner->writeInt8(10); // level: 10
- mOwner->writeInt8(0); // profile: 0
-
- mOwner->endBox(); // d263
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
- CHECK(mCodecSpecificData);
- CHECK(mCodecSpecificDataSize >= 5);
-
- // Patch avcc's lengthSize field to match the number
- // of bytes we use to indicate the size of a nal unit.
- uint8_t *ptr = (uint8_t *)mCodecSpecificData;
- ptr[4] =
- (ptr[4] & 0xfc)
- | (mOwner->useNalLengthFour() ? 3 : 1);
-
- mOwner->beginBox("avcC");
- mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
- mOwner->endBox(); // avcC
- }
-
- mOwner->beginBox("pasp");
- // This is useful if the pixel is not square
- mOwner->writeInt32(1 << 16); // hspacing
- mOwner->writeInt32(1 << 16); // vspacing
- mOwner->endBox(); // pasp
- mOwner->endBox(); // mp4v, s263 or avc1
- }
- mOwner->endBox(); // stsd
-
- mOwner->beginBox("stts");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(mNumSttsTableEntries);
- int64_t prevTimestampUs = trackStartTimeOffsetUs;
- for (List<SttsTableEntry>::iterator it = mSttsTableEntries.begin();
- it != mSttsTableEntries.end(); ++it) {
- mOwner->writeInt32(it->sampleCount);
-
- // Make sure that we are calculating the sample duration the exactly
- // same way as we made decision on how to create stts entries.
- int64_t currTimestampUs = prevTimestampUs + it->sampleDurationUs;
- int32_t dur = ((currTimestampUs * mTimeScale + 500000LL) / 1000000LL -
- (prevTimestampUs * mTimeScale + 500000LL) / 1000000LL);
- prevTimestampUs += (it->sampleCount * it->sampleDurationUs);
-
- mOwner->writeInt32(dur);
- }
- mOwner->endBox(); // stts
-
- if (!mIsAudio) {
- mOwner->beginBox("stss");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(mNumStssTableEntries); // number of sync frames
- for (List<int32_t>::iterator it = mStssTableEntries.begin();
- it != mStssTableEntries.end(); ++it) {
- mOwner->writeInt32(*it);
- }
- mOwner->endBox(); // stss
- }
-
- mOwner->beginBox("stsz");
- mOwner->writeInt32(0); // version=0, flags=0
- if (mSamplesHaveSameSize) {
- List<size_t>::iterator it = mSampleSizes.begin();
- mOwner->writeInt32(*it); // default sample size
- } else {
- mOwner->writeInt32(0);
- }
- mOwner->writeInt32(mNumSamples);
- if (!mSamplesHaveSameSize) {
- for (List<size_t>::iterator it = mSampleSizes.begin();
- it != mSampleSizes.end(); ++it) {
- mOwner->writeInt32(*it);
- }
- }
- mOwner->endBox(); // stsz
-
- mOwner->beginBox("stsc");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(mNumStscTableEntries);
- for (List<StscTableEntry>::iterator it = mStscTableEntries.begin();
- it != mStscTableEntries.end(); ++it) {
- mOwner->writeInt32(it->firstChunk);
- mOwner->writeInt32(it->samplesPerChunk);
- mOwner->writeInt32(it->sampleDescriptionId);
- }
- mOwner->endBox(); // stsc
- mOwner->beginBox(use32BitOffset? "stco": "co64");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(mNumStcoTableEntries);
- for (List<off64_t>::iterator it = mChunkOffsets.begin();
- it != mChunkOffsets.end(); ++it) {
- if (use32BitOffset) {
- mOwner->writeInt32(static_cast<int32_t>(*it));
- } else {
- mOwner->writeInt64((*it));
- }
- }
- mOwner->endBox(); // stco or co64
-
- mOwner->endBox(); // stbl
- mOwner->endBox(); // minf
- mOwner->endBox(); // mdia
- mOwner->endBox(); // trak
+/*
+ * Geodata is stored according to ISO-6709 standard.
+ */
+void MPEG4Writer::writeGeoDataBox() {
+ beginBox("\xA9xyz");
+ /*
+ * For historical reasons, any user data start
+ * with "\0xA9", must be followed by its assoicated
+ * language code.
+ * 0x0012: locale en
+ * 0x15c7: language 5575
+ */
+ writeInt32(0x001215c7);
+ writeLatitude(mLatitudex10000);
+ writeLongitude(mLongitudex10000);
+ writeInt8(0x2F);
+ endBox();
}
} // namespace android
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 8ca6ee8..8cd08bc 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -47,4 +47,6 @@
const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
+const char *MEDIA_MIMETYPE_TEXT_3GPP = "text/3gpp-tt";
+
} // namespace android
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index dc86885..81f2e47 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -450,6 +450,11 @@
}
size_t avail = mCache->totalSize() - delta;
+
+ if (avail > size) {
+ avail = size;
+ }
+
mCache->copy(delta, data, avail);
return avail;
diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp
index 821ba9b..ce30fc8 100644
--- a/media/libstagefright/NuHTTPDataSource.cpp
+++ b/media/libstagefright/NuHTTPDataSource.cpp
@@ -392,6 +392,13 @@
Mutex::Autolock autoLock(mLock);
+ // if it's a DRM container based streaming, call pread() of the DRM plugin
+ // to get the decrypted data
+ if (mDecryptHandle != NULL && DecryptApiType::CONTAINER_BASED
+ == mDecryptHandle->decryptApiType) {
+ return mDrmManagerClient->pread(mDecryptHandle, data, size, offset);
+ }
+
if (offset != mOffset) {
String8 host = mHost;
String8 path = mPath;
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 06352f4..0f0ffd4 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -53,7 +53,10 @@
#include <OMX_Audio.h>
#include <OMX_Component.h>
+#if HAVE_SOFTWARE_DECODERS
#include "include/ThreadedSource.h"
+#endif
+
#include "include/avc_utils.h"
namespace android {
@@ -65,11 +68,6 @@
const char *codec;
};
-#define FACTORY_CREATE(name) \
-static sp<MediaSource> Make##name(const sp<MediaSource> &source) { \
- return new name(source); \
-}
-
#define FACTORY_CREATE_ENCODER(name) \
static sp<MediaSource> Make##name(const sp<MediaSource> &source, const sp<MetaData> &meta) { \
return new name(source, meta); \
@@ -77,21 +75,30 @@
#define FACTORY_REF(name) { #name, Make##name },
-FACTORY_CREATE(MP3Decoder)
-FACTORY_CREATE(AMRNBDecoder)
-FACTORY_CREATE(AMRWBDecoder)
-FACTORY_CREATE(AACDecoder)
-FACTORY_CREATE(AVCDecoder)
-FACTORY_CREATE(G711Decoder)
-FACTORY_CREATE(M4vH263Decoder)
-FACTORY_CREATE(VorbisDecoder)
-FACTORY_CREATE(VPXDecoder)
FACTORY_CREATE_ENCODER(AMRNBEncoder)
FACTORY_CREATE_ENCODER(AMRWBEncoder)
FACTORY_CREATE_ENCODER(AACEncoder)
FACTORY_CREATE_ENCODER(AVCEncoder)
FACTORY_CREATE_ENCODER(M4vH263Encoder)
+#if HAVE_SOFTWARE_DECODERS
+
+#define FACTORY_CREATE(name) \
+static sp<MediaSource> Make##name(const sp<MediaSource> &source) { \
+ return new name(source); \
+}
+
+FACTORY_CREATE(AMRNBDecoder)
+FACTORY_CREATE(AMRWBDecoder)
+FACTORY_CREATE(AACDecoder)
+FACTORY_CREATE(AVCDecoder)
+FACTORY_CREATE(G711Decoder)
+FACTORY_CREATE(MP3Decoder)
+FACTORY_CREATE(M4vH263Decoder)
+FACTORY_CREATE(VorbisDecoder)
+FACTORY_CREATE(VPXDecoder)
+#endif
+
static sp<MediaSource> InstantiateSoftwareEncoder(
const char *name, const sp<MediaSource> &source,
const sp<MetaData> &meta) {
@@ -119,18 +126,19 @@
static sp<MediaSource> InstantiateSoftwareCodec(
const char *name, const sp<MediaSource> &source) {
+#if HAVE_SOFTWARE_DECODERS
struct FactoryInfo {
const char *name;
sp<MediaSource> (*CreateFunc)(const sp<MediaSource> &);
};
static const FactoryInfo kFactoryInfo[] = {
- FACTORY_REF(MP3Decoder)
FACTORY_REF(AMRNBDecoder)
FACTORY_REF(AMRWBDecoder)
FACTORY_REF(AACDecoder)
FACTORY_REF(AVCDecoder)
FACTORY_REF(G711Decoder)
+ FACTORY_REF(MP3Decoder)
FACTORY_REF(M4vH263Decoder)
FACTORY_REF(VorbisDecoder)
FACTORY_REF(VPXDecoder)
@@ -145,6 +153,7 @@
return (*kFactoryInfo[i].CreateFunc)(source);
}
}
+#endif
return NULL;
}
@@ -156,36 +165,47 @@
{ MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.google.mp3.decoder" },
{ MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.google.amrwb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
+ { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.google.aac.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
+ { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "OMX.google.g711.alaw.decoder" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
+ { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "OMX.google.g711.mlaw.decoder" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.google.mpeg4.decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.google.h263.decoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.google.avc.decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
+ { MEDIA_MIMETYPE_AUDIO_VORBIS, "OMX.google.vorbis.decoder" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
+ { MEDIA_MIMETYPE_VIDEO_VPX, "OMX.google.vpx.decoder" },
{ MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
};
@@ -277,6 +297,10 @@
}
static bool IsSoftwareCodec(const char *componentName) {
+ if (!strncmp("OMX.google.", componentName, 11)) {
+ return true;
+ }
+
if (!strncmp("OMX.", componentName, 4)) {
return false;
}
@@ -284,26 +308,29 @@
return true;
}
-// A sort order in which non-OMX components are first,
-// followed by software codecs, and followed by all the others.
+// A sort order in which OMX software codecs are first, followed
+// by other (non-OMX) software codecs, followed by everything else.
static int CompareSoftwareCodecsFirst(
const String8 *elem1, const String8 *elem2) {
- bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
- bool isNotOMX2 = strncmp(elem2->string(), "OMX.", 4);
-
- if (isNotOMX1) {
- if (isNotOMX2) { return 0; }
- return -1;
- }
- if (isNotOMX2) {
- return 1;
- }
+ bool isOMX1 = !strncmp(elem1->string(), "OMX.", 4);
+ bool isOMX2 = !strncmp(elem2->string(), "OMX.", 4);
bool isSoftwareCodec1 = IsSoftwareCodec(elem1->string());
bool isSoftwareCodec2 = IsSoftwareCodec(elem2->string());
if (isSoftwareCodec1) {
- if (isSoftwareCodec2) { return 0; }
+ if (!isSoftwareCodec2) { return -1; }
+
+ if (isOMX1) {
+ if (isOMX2) { return 0; }
+
+ return -1;
+ } else {
+ if (isOMX2) { return 0; }
+
+ return 1;
+ }
+
return -1;
}
@@ -622,6 +649,11 @@
LOGE("Profile and/or level exceed the decoder's capabilities.");
return ERROR_UNSUPPORTED;
}
+ } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
+ addCodecSpecificData(data, size);
+
+ CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size));
+ addCodecSpecificData(data, size);
}
}
@@ -631,16 +663,23 @@
}
if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mMIME)) {
setAMRFormat(false /* isWAMR */, bitRate);
- }
- if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mMIME)) {
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mMIME)) {
setAMRFormat(true /* isWAMR */, bitRate);
- }
- if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mMIME)) {
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mMIME)) {
int32_t numChannels, sampleRate;
CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
setAACFormat(numChannels, sampleRate, bitRate);
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME)
+ || !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) {
+ // These are PCM-like formats with a fixed sample rate but
+ // a variable number of channels.
+
+ int32_t numChannels;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+
+ setG711Format(numChannels);
}
if (!strncasecmp(mMIME, "video/", 6)) {
@@ -1316,6 +1355,8 @@
compressionFormat = OMX_VIDEO_CodingMPEG4;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
compressionFormat = OMX_VIDEO_CodingH263;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) {
+ compressionFormat = OMX_VIDEO_CodingVPX;
} else {
LOGE("Not a supported video mime type: %s", mime);
CHECK(!"Should not be here. Not a supported video mime type.");
@@ -1443,7 +1484,8 @@
mOutputPortSettingsChangedPending(false),
mLeftOverBuffer(NULL),
mPaused(false),
- mNativeWindow(nativeWindow) {
+ mNativeWindow(!strncmp(componentName, "OMX.google.", 11)
+ ? NULL : nativeWindow) {
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1830,7 +1872,7 @@
// Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
- android_native_buffer_t* buf;
+ ANativeWindowBuffer* buf;
err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
if (err != 0) {
LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
@@ -1900,7 +1942,7 @@
OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
// Dequeue the next buffer from the native window.
- android_native_buffer_t* buf;
+ ANativeWindowBuffer* buf;
int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
if (err != 0) {
CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
@@ -2899,6 +2941,23 @@
offset += srcBuffer->range_length();
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_VORBIS, mMIME)) {
+ CHECK(!(mQuirks & kSupportsMultipleFramesPerInputBuffer));
+ CHECK_GE(info->mSize, offset + sizeof(int32_t));
+
+ int32_t numPageSamples;
+ if (!srcBuffer->meta_data()->findInt32(
+ kKeyValidSamples, &numPageSamples)) {
+ numPageSamples = -1;
+ }
+
+ memcpy((uint8_t *)info->mData + offset,
+ &numPageSamples,
+ sizeof(numPageSamples));
+
+ offset += sizeof(numPageSamples);
+ }
+
if (releaseBuffer) {
srcBuffer->release();
srcBuffer = NULL;
@@ -3224,6 +3283,11 @@
}
}
+void OMXCodec::setG711Format(int32_t numChannels) {
+ CHECK(!mIsEncoder);
+ setRawAudioFormat(kPortIndexInput, 8000, numChannels);
+}
+
void OMXCodec::setImageOutputFormat(
OMX_COLOR_FORMATTYPE format, OMX_U32 width, OMX_U32 height) {
CODEC_LOGV("setImageOutputFormat(%ld, %ld)", width, height);
@@ -4013,6 +4077,13 @@
numChannels, params.nChannels);
}
+ if (sampleRate != (int32_t)params.nSamplingRate) {
+ LOGW("Codec outputs at different sampling rate than "
+ "what the input stream contains (contains data at "
+ "%d Hz, codec outputs %lu Hz)",
+ sampleRate, params.nSamplingRate);
+ }
+
mOutputFormat->setCString(
kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
@@ -4025,8 +4096,7 @@
(mQuirks & kDecoderLiesAboutNumberOfChannels)
? numChannels : params.nChannels);
- // The codec-reported sampleRate is not reliable...
- mOutputFormat->setInt32(kKeySampleRate, sampleRate);
+ mOutputFormat->setInt32(kKeySampleRate, params.nSamplingRate);
} else if (audio_def->eEncoding == OMX_AUDIO_CodingAMR) {
OMX_AUDIO_PARAM_AMRTYPE amr;
InitOMXParams(&amr);
@@ -4133,6 +4203,14 @@
break;
}
}
+
+ // If the input format contains rotation information, flag the output
+ // format accordingly.
+
+ int32_t rotationDegrees;
+ if (mSource->getFormat()->findInt32(kKeyRotation, &rotationDegrees)) {
+ mOutputFormat->setInt32(kKeyRotation, rotationDegrees);
+ }
}
status_t OMXCodec::pause() {
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 6538a05..1560b8e 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -378,12 +378,19 @@
ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
uint8_t header[27];
- if (mSource->readAt(offset, header, sizeof(header))
+ ssize_t n;
+ if ((n = mSource->readAt(offset, header, sizeof(header)))
< (ssize_t)sizeof(header)) {
- LOGV("failed to read %d bytes at offset 0x%016llx",
- sizeof(header), offset);
+ LOGV("failed to read %d bytes at offset 0x%016llx, got %ld bytes",
+ sizeof(header), offset, n);
- return ERROR_IO;
+ if (n < 0) {
+ return n;
+ } else if (n == 0) {
+ return ERROR_END_OF_STREAM;
+ } else {
+ return ERROR_IO;
+ }
}
if (memcmp(header, "OggS", 4)) {
@@ -498,8 +505,8 @@
packetSize);
if (n < (ssize_t)packetSize) {
- LOGV("failed to read %d bytes at 0x%016llx",
- packetSize, dataOffset);
+ LOGV("failed to read %d bytes at 0x%016llx, got %ld bytes",
+ packetSize, dataOffset, n);
return ERROR_IO;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index ef4d3d0..eb135ab 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -220,7 +220,7 @@
return ERROR_MALFORMED;
}
- mSampleSizeFieldSize = mDefaultSampleSize & 0xf;
+ mSampleSizeFieldSize = mDefaultSampleSize & 0xff;
mDefaultSampleSize = 0;
if (mSampleSizeFieldSize != 4 && mSampleSizeFieldSize != 8
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 7621f2c..4c3dc47 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -27,6 +27,7 @@
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
+#include <media/stagefright/MediaDefs.h>
namespace android {
@@ -429,6 +430,7 @@
// The overall duration is the duration of the longest track.
int64_t maxDurationUs = 0;
+ String8 timedTextLang;
for (size_t i = 0; i < numTracks; ++i) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
@@ -452,10 +454,22 @@
CHECK(trackMeta->findInt32(kKeyWidth, &videoWidth));
CHECK(trackMeta->findInt32(kKeyHeight, &videoHeight));
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+ const char *lang;
+ trackMeta->findCString(kKeyMediaLanguage, &lang);
+ timedTextLang.append(String8(lang));
+ timedTextLang.append(String8(":"));
}
}
}
+ // To save the language codes for all timed text tracks
+ // If multiple text tracks present, the format will look
+ // like "eng:chi"
+ if (!timedTextLang.isEmpty()) {
+ mMetaData.add(METADATA_KEY_TIMED_TEXT_LANGUAGES, timedTextLang);
+ }
+
// The duration value is a string representing the duration in ms.
sprintf(tmp, "%lld", (maxDurationUs + 500) / 1000);
mMetaData.add(METADATA_KEY_DURATION, String8(tmp));
diff --git a/media/libstagefright/TimedTextPlayer.cpp b/media/libstagefright/TimedTextPlayer.cpp
new file mode 100644
index 0000000..1ac22cb
--- /dev/null
+++ b/media/libstagefright/TimedTextPlayer.cpp
@@ -0,0 +1,252 @@
+ /*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TimedTextPlayer"
+#include <utils/Log.h>
+
+#include <binder/IPCThreadState.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/Utils.h>
+#include "include/AwesomePlayer.h"
+#include "include/TimedTextPlayer.h"
+
+namespace android {
+
+struct TimedTextEvent : public TimedEventQueue::Event {
+ TimedTextEvent(
+ TimedTextPlayer *player,
+ void (TimedTextPlayer::*method)())
+ : mPlayer(player),
+ mMethod(method) {
+ }
+
+protected:
+ virtual ~TimedTextEvent() {}
+
+ virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
+ (mPlayer->*mMethod)();
+ }
+
+private:
+ TimedTextPlayer *mPlayer;
+ void (TimedTextPlayer::*mMethod)();
+
+ TimedTextEvent(const TimedTextEvent &);
+ TimedTextEvent &operator=(const TimedTextEvent &);
+};
+
+TimedTextPlayer::TimedTextPlayer(
+ AwesomePlayer *observer,
+ const wp<MediaPlayerBase> &listener,
+ TimedEventQueue *queue)
+ : mSource(NULL),
+ mSeekTimeUs(0),
+ mStarted(false),
+ mTextEventPending(false),
+ mQueue(queue),
+ mListener(listener),
+ mObserver(observer),
+ mTextBuffer(NULL) {
+ mTextEvent = new TimedTextEvent(this, &TimedTextPlayer::onTextEvent);
+}
+
+TimedTextPlayer::~TimedTextPlayer() {
+ if (mStarted) {
+ reset();
+ }
+
+ mTextTrackVector.clear();
+}
+
+status_t TimedTextPlayer::start(uint8_t index) {
+ CHECK(!mStarted);
+
+ if (index >= mTextTrackVector.size()) {
+ LOGE("Incorrect text track index");
+ return BAD_VALUE;
+ }
+
+ mSource = mTextTrackVector.itemAt(index);
+
+ status_t err = mSource->start();
+
+ if (err != OK) {
+ return err;
+ }
+
+ int64_t positionUs;
+ mObserver->getPosition(&positionUs);
+ seekTo(positionUs);
+
+ postTextEvent();
+
+ mStarted = true;
+
+ return OK;
+}
+
+void TimedTextPlayer::pause() {
+ CHECK(mStarted);
+
+ cancelTextEvent();
+}
+
+void TimedTextPlayer::resume() {
+ CHECK(mStarted);
+
+ postTextEvent();
+}
+
+void TimedTextPlayer::reset() {
+ CHECK(mStarted);
+
+ // send an empty text to clear the screen
+ notifyListener(MEDIA_TIMED_TEXT);
+
+ cancelTextEvent();
+
+ mSeeking = false;
+ mStarted = false;
+
+ if (mTextBuffer != NULL) {
+ mTextBuffer->release();
+ mTextBuffer = NULL;
+ }
+
+ if (mSource != NULL) {
+ mSource->stop();
+ mSource.clear();
+ mSource = NULL;
+ }
+}
+
+status_t TimedTextPlayer::seekTo(int64_t time_us) {
+ Mutex::Autolock autoLock(mLock);
+
+ mSeeking = true;
+ mSeekTimeUs = time_us;
+
+ return OK;
+}
+
+status_t TimedTextPlayer::setTimedTextTrackIndex(int32_t index) {
+ if (index >= (int)(mTextTrackVector.size())) {
+ return BAD_VALUE;
+ }
+
+ if (mStarted) {
+ reset();
+ }
+
+ if (index >= 0) {
+ return start(index);
+ }
+ return OK;
+}
+
+void TimedTextPlayer::onTextEvent() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (!mTextEventPending) {
+ return;
+ }
+ mTextEventPending = false;
+
+ MediaSource::ReadOptions options;
+ if (mSeeking) {
+ options.setSeekTo(mSeekTimeUs,
+ MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+ mSeeking = false;
+
+ if (mTextBuffer != NULL) {
+ mTextBuffer->release();
+ mTextBuffer = NULL;
+ }
+
+ notifyListener(MEDIA_TIMED_TEXT); //empty text to clear the screen
+ }
+
+ if (mTextBuffer != NULL) {
+ uint8_t *tmp = (uint8_t *)(mTextBuffer->data());
+ size_t len = (*tmp) << 8 | (*(tmp + 1));
+
+ notifyListener(MEDIA_TIMED_TEXT,
+ tmp + 2,
+ len);
+
+ mTextBuffer->release();
+ mTextBuffer = NULL;
+
+ }
+
+ if (mSource->read(&mTextBuffer, &options) != OK) {
+ return;
+ }
+
+ int64_t positionUs, timeUs;
+ mObserver->getPosition(&positionUs);
+ mTextBuffer->meta_data()->findInt64(kKeyTime, &timeUs);
+
+ //send the text now
+ if (timeUs <= positionUs + 100000ll) {
+ postTextEvent();
+ } else {
+ postTextEvent(timeUs - positionUs - 100000ll);
+ }
+}
+
+void TimedTextPlayer::postTextEvent(int64_t delayUs) {
+ if (mTextEventPending) {
+ return;
+ }
+
+ mTextEventPending = true;
+ mQueue->postEventWithDelay(mTextEvent, delayUs < 0 ? 10000 : delayUs);
+}
+
+void TimedTextPlayer::cancelTextEvent() {
+ mQueue->cancelEvent(mTextEvent->eventID());
+ mTextEventPending = false;
+}
+
+void TimedTextPlayer::addTextSource(sp<MediaSource> source) {
+ mTextTrackVector.add(source);
+}
+
+void TimedTextPlayer::notifyListener(
+ int msg, const void *data, size_t size) {
+ if (mListener != NULL) {
+ sp<MediaPlayerBase> listener = mListener.promote();
+
+ if (listener != NULL) {
+ if (size > 0) {
+ mData.freeData();
+ mData.write(data, size);
+
+ listener->sendEvent(msg, 0, 0, &mData);
+ } else { // send an empty timed text to clear the screen
+ listener->sendEvent(msg);
+ }
+ }
+ }
+}
+}
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index 76f47f7..bf978d7 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -264,6 +264,8 @@
mGroup(NULL) {
CHECK(mMeta->findInt32(kKeySampleRate, &mSampleRate));
CHECK(mMeta->findInt32(kKeyChannelCount, &mNumChannels));
+
+ mMeta->setInt32(kKeyMaxInputSize, kMaxFrameSize);
}
WAVSource::~WAVSource() {
diff --git a/media/libstagefright/WVMExtractor.cpp b/media/libstagefright/WVMExtractor.cpp
index 7c72852..83a1eaa 100644
--- a/media/libstagefright/WVMExtractor.cpp
+++ b/media/libstagefright/WVMExtractor.cpp
@@ -45,7 +45,8 @@
static Mutex gWVMutex;
WVMExtractor::WVMExtractor(const sp<DataSource> &source)
- : mDataSource(source) {
+ : mDataSource(source),
+ mUseAdaptiveStreaming(false) {
{
Mutex::Autolock autoLock(gWVMutex);
if (gVendorLibHandle == NULL) {
@@ -100,5 +101,21 @@
return mImpl->getMetaData();
}
+int64_t WVMExtractor::getCachedDurationUs(status_t *finalStatus) {
+ // TODO: Fill this with life.
+
+ *finalStatus = OK;
+
+ return 0;
+}
+
+void WVMExtractor::setAdaptiveStreamingMode(bool adaptive) {
+ mUseAdaptiveStreaming = adaptive;
+}
+
+bool WVMExtractor::getAdaptiveStreamingMode() const {
+ return mUseAdaptiveStreaming;
+}
+
} //namespace android
diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp
index af2f6ac..3e4e4937 100644
--- a/media/libstagefright/chromium_http/support.cpp
+++ b/media/libstagefright/chromium_http/support.cpp
@@ -24,6 +24,7 @@
#include "android/net/android_network_library_impl.h"
#include "base/thread.h"
+#include "net/base/cert_verifier.h"
#include "net/base/host_resolver.h"
#include "net/base/ssl_config_service.h"
#include "net/http/http_auth_handler_factory.h"
@@ -127,6 +128,7 @@
http_transaction_factory_ = new net::HttpCache(
host_resolver_,
+ new net::CertVerifier(),
dnsrr_resolver_,
dns_cert_checker_.get(),
proxy_service_.get(),
@@ -174,44 +176,44 @@
}
void SfDelegate::OnReceivedRedirect(
- URLRequest *request, const GURL &new_url, bool *defer_redirect) {
+ net::URLRequest *request, const GURL &new_url, bool *defer_redirect) {
MY_LOGI("OnReceivedRedirect");
}
void SfDelegate::OnAuthRequired(
- URLRequest *request, net::AuthChallengeInfo *auth_info) {
+ net::URLRequest *request, net::AuthChallengeInfo *auth_info) {
MY_LOGI("OnAuthRequired");
inherited::OnAuthRequired(request, auth_info);
}
void SfDelegate::OnCertificateRequested(
- URLRequest *request, net::SSLCertRequestInfo *cert_request_info) {
+ net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info) {
MY_LOGI("OnCertificateRequested");
inherited::OnCertificateRequested(request, cert_request_info);
}
void SfDelegate::OnSSLCertificateError(
- URLRequest *request, int cert_error, net::X509Certificate *cert) {
+ net::URLRequest *request, int cert_error, net::X509Certificate *cert) {
fprintf(stderr, "OnSSLCertificateError cert_error=%d\n", cert_error);
inherited::OnSSLCertificateError(request, cert_error, cert);
}
-void SfDelegate::OnGetCookies(URLRequest *request, bool blocked_by_policy) {
+void SfDelegate::OnGetCookies(net::URLRequest *request, bool blocked_by_policy) {
MY_LOGI("OnGetCookies");
}
void SfDelegate::OnSetCookie(
- URLRequest *request,
+ net::URLRequest *request,
const std::string &cookie_line,
const net::CookieOptions &options,
bool blocked_by_policy) {
MY_LOGI("OnSetCookie");
}
-void SfDelegate::OnResponseStarted(URLRequest *request) {
+void SfDelegate::OnResponseStarted(net::URLRequest *request) {
if (request->status().status() != URLRequestStatus::SUCCESS) {
MY_LOGI(StringPrintf(
"Request failed with status %d and os_error %d",
@@ -260,7 +262,7 @@
request->GetExpectedContentSize(), contentType.c_str());
}
-void SfDelegate::OnReadCompleted(URLRequest *request, int bytes_read) {
+void SfDelegate::OnReadCompleted(net::URLRequest *request, int bytes_read) {
if (bytes_read == -1) {
MY_LOGI(StringPrintf(
"OnReadCompleted, read failed, status %d",
@@ -297,7 +299,7 @@
readMore(request);
}
-void SfDelegate::readMore(URLRequest *request) {
+void SfDelegate::readMore(net::URLRequest *request) {
while (mNumBytesRead < mNumBytesTotal) {
size_t copy = mNumBytesTotal - mNumBytesRead;
if (copy > mReadBuffer->size()) {
@@ -371,7 +373,7 @@
off64_t offset) {
CHECK(mURLRequest == NULL);
- mURLRequest = new URLRequest(url, this);
+ mURLRequest = new net::URLRequest(url, this);
mAtEOS = false;
mRangeRequested = false;
diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h
index 634ac93..4d03493 100644
--- a/media/libstagefright/chromium_http/support.h
+++ b/media/libstagefright/chromium_http/support.h
@@ -77,7 +77,7 @@
struct ChromiumHTTPDataSource;
-struct SfDelegate : public URLRequest::Delegate {
+struct SfDelegate : public net::URLRequest::Delegate {
SfDelegate();
virtual ~SfDelegate();
@@ -92,35 +92,35 @@
void setOwner(ChromiumHTTPDataSource *mOwner);
virtual void OnReceivedRedirect(
- URLRequest *request, const GURL &new_url, bool *defer_redirect);
+ net::URLRequest *request, const GURL &new_url, bool *defer_redirect);
virtual void OnAuthRequired(
- URLRequest *request, net::AuthChallengeInfo *auth_info);
+ net::URLRequest *request, net::AuthChallengeInfo *auth_info);
virtual void OnCertificateRequested(
- URLRequest *request, net::SSLCertRequestInfo *cert_request_info);
+ net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info);
virtual void OnSSLCertificateError(
- URLRequest *request, int cert_error, net::X509Certificate *cert);
+ net::URLRequest *request, int cert_error, net::X509Certificate *cert);
- virtual void OnGetCookies(URLRequest *request, bool blocked_by_policy);
+ virtual void OnGetCookies(net::URLRequest *request, bool blocked_by_policy);
virtual void OnSetCookie(
- URLRequest *request,
+ net::URLRequest *request,
const std::string &cookie_line,
const net::CookieOptions &options,
bool blocked_by_policy);
- virtual void OnResponseStarted(URLRequest *request);
+ virtual void OnResponseStarted(net::URLRequest *request);
- virtual void OnReadCompleted(URLRequest *request, int bytes_read);
+ virtual void OnReadCompleted(net::URLRequest *request, int bytes_read);
private:
typedef Delegate inherited;
ChromiumHTTPDataSource *mOwner;
- URLRequest *mURLRequest;
+ net::URLRequest *mURLRequest;
scoped_refptr<net::IOBufferWithSize> mReadBuffer;
size_t mNumBytesRead;
@@ -130,7 +130,7 @@
bool mRangeRequested;
bool mAtEOS;
- void readMore(URLRequest *request);
+ void readMore(net::URLRequest *request);
static void OnInitiateConnectionWrapper(
SfDelegate *me,
diff --git a/media/libstagefright/codecs/aacdec/AACDecoder.cpp b/media/libstagefright/codecs/aacdec/AACDecoder.cpp
index 208431c..d2e3eaa 100644
--- a/media/libstagefright/codecs/aacdec/AACDecoder.cpp
+++ b/media/libstagefright/codecs/aacdec/AACDecoder.cpp
@@ -234,6 +234,23 @@
mConfig->aacPlusUpsamplingFactor, mConfig->desiredChannels);
CHECK(mNumDecodedBuffers > 0);
+
+ if (decoderErr != MP4AUDEC_SUCCESS) {
+ // If decoding fails this early, the fields in mConfig may
+ // not be valid and we cannot recover.
+
+ LOGE("Unable to decode aac content, decoder returned error %d",
+ decoderErr);
+
+ buffer->release();
+ buffer = NULL;
+
+ mInputBuffer->release();
+ mInputBuffer = NULL;
+
+ return ERROR_UNSUPPORTED;
+ }
+
if (mNumDecodedBuffers == 1) {
mUpsamplingFactor = mConfig->aacPlusUpsamplingFactor;
// Check on the sampling rate to see whether it is changed.
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index 69e331f..359a2ec 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -143,14 +143,39 @@
unpack_idx.cpp \
window_tables_fxp.cpp \
pvmp4setaudioconfig.cpp \
- AACDecoder.cpp
+ AACDecoder.cpp \
LOCAL_CFLAGS := -DAAC_PLUS -DHQ_SBR -DPARAMETRICSTEREO -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF= -DOSCL_UNUSED_ARG=
-LOCAL_C_INCLUDES := frameworks/base/media/libstagefright/include
+LOCAL_C_INCLUDES := \
+ frameworks/base/media/libstagefright/include \
LOCAL_ARM_MODE := arm
LOCAL_MODULE := libstagefright_aacdec
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftAAC.cpp
+
+LOCAL_C_INCLUDES := \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+
+LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_aacdec
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright_omx libstagefright_foundation libutils
+
+LOCAL_MODULE := libstagefright_soft_aacdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC.cpp b/media/libstagefright/codecs/aacdec/SoftAAC.cpp
new file mode 100644
index 0000000..7ce6128
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/SoftAAC.cpp
@@ -0,0 +1,449 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAAC"
+#include <utils/Log.h>
+
+#include "SoftAAC.h"
+
+#include "pvmp4audiodecoder_api.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftAAC::SoftAAC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mConfig(new tPVMP4AudioDecoderExternal),
+ mDecoderBuf(NULL),
+ mInputBufferCount(0),
+ mUpsamplingFactor(2),
+ mAnchorTimeUs(0),
+ mNumSamplesOutput(0),
+ mSignalledError(false),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftAAC::~SoftAAC() {
+ free(mDecoderBuf);
+ mDecoderBuf = NULL;
+
+ delete mConfig;
+ mConfig = NULL;
+}
+
+void SoftAAC::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/aac");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+status_t SoftAAC::initDecoder() {
+ memset(mConfig, 0, sizeof(tPVMP4AudioDecoderExternal));
+ mConfig->outputFormat = OUTPUTFORMAT_16PCM_INTERLEAVED;
+ mConfig->aacPlusEnabled = 1;
+
+ // The software decoder doesn't properly support mono output on
+ // AACplus files. Always output stereo.
+ mConfig->desiredChannels = 2;
+
+ UInt32 memRequirements = PVMP4AudioDecoderGetMemRequirements();
+ mDecoderBuf = malloc(memRequirements);
+
+ Int err = PVMP4AudioDecoderInitLibrary(mConfig, mDecoderBuf);
+ if (err != MP4AUDEC_SUCCESS) {
+ LOGE("Failed to initialize MP4 audio decoder");
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+OMX_ERRORTYPE SoftAAC::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioAac:
+ {
+ OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
+ (OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
+
+ if (aacParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ aacParams->nBitRate = 0;
+ aacParams->nAudioBandWidth = 0;
+ aacParams->nAACtools = 0;
+ aacParams->nAACERtools = 0;
+ aacParams->eAACProfile = OMX_AUDIO_AACObjectMain;
+ aacParams->eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
+ aacParams->eChannelMode = OMX_AUDIO_ChannelModeStereo;
+
+ if (!isConfigured()) {
+ aacParams->nChannels = 1;
+ aacParams->nSampleRate = 44100;
+ aacParams->nFrameLength = 0;
+ } else {
+ aacParams->nChannels = mConfig->encodedChannels;
+ aacParams->nSampleRate = mConfig->samplingRate;
+ aacParams->nFrameLength = mConfig->frameLength;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+
+ if (!isConfigured()) {
+ pcmParams->nChannels = 1;
+ pcmParams->nSamplingRate = 44100;
+ } else {
+ pcmParams->nChannels = mConfig->desiredChannels;
+ pcmParams->nSamplingRate = mConfig->samplingRate;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAAC::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.aac",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioAac:
+ {
+ const OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
+ (const OMX_AUDIO_PARAM_AACPROFILETYPE *)params;
+
+ if (aacParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+bool SoftAAC::isConfigured() const {
+ return mInputBufferCount > 0;
+}
+
+void SoftAAC::onQueueFilled(OMX_U32 portIndex) {
+ if (mSignalledError || mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (portIndex == 0 && mInputBufferCount == 0) {
+ ++mInputBufferCount;
+
+ BufferInfo *info = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *header = info->mHeader;
+
+ mConfig->pInputBuffer = header->pBuffer + header->nOffset;
+ mConfig->inputBufferCurrentLength = header->nFilledLen;
+ mConfig->inputBufferMaxLength = 0;
+
+ Int err = PVMP4AudioDecoderConfig(mConfig, mDecoderBuf);
+ if (err != MP4AUDEC_SUCCESS) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
+ return;
+ }
+
+ inQueue.erase(inQueue.begin());
+ info->mOwnedByUs = false;
+ notifyEmptyBufferDone(header);
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return;
+ }
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumSamplesOutput = 0;
+ }
+
+ mConfig->pInputBuffer = inHeader->pBuffer + inHeader->nOffset;
+ mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
+ mConfig->inputBufferMaxLength = 0;
+ mConfig->inputBufferUsedLength = 0;
+ mConfig->remainderBits = 0;
+
+ mConfig->pOutputBuffer =
+ reinterpret_cast<Int16 *>(outHeader->pBuffer + outHeader->nOffset);
+
+ mConfig->pOutputBuffer_plus = &mConfig->pOutputBuffer[2048];
+ mConfig->repositionFlag = false;
+
+ Int32 prevSamplingRate = mConfig->samplingRate;
+ Int decoderErr = PVMP4AudioDecodeFrame(mConfig, mDecoderBuf);
+
+ /*
+ * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+ * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+ * rate system and the sampling rate in the final output is actually
+ * doubled compared with the core AAC decoder sampling rate.
+ *
+ * Explicit signalling is done by explicitly defining SBR audio object
+ * type in the bitstream. Implicit signalling is done by embedding
+ * SBR content in AAC extension payload specific to SBR, and hence
+ * requires an AAC decoder to perform pre-checks on actual audio frames.
+ *
+ * Thus, we could not say for sure whether a stream is
+ * AAC+/eAAC+ until the first data frame is decoded.
+ */
+ if (mInputBufferCount <= 2) {
+ LOGV("audio/extended audio object type: %d + %d",
+ mConfig->audioObjectType, mConfig->extendedAudioObjectType);
+ LOGV("aac+ upsampling factor: %d desired channels: %d",
+ mConfig->aacPlusUpsamplingFactor, mConfig->desiredChannels);
+
+ if (mInputBufferCount == 1) {
+ mUpsamplingFactor = mConfig->aacPlusUpsamplingFactor;
+ // Check on the sampling rate to see whether it is changed.
+ if (mConfig->samplingRate != prevSamplingRate) {
+ LOGW("Sample rate was %d Hz, but now is %d Hz",
+ prevSamplingRate, mConfig->samplingRate);
+
+ // We'll hold onto the input buffer and will decode
+ // it again once the output port has been reconfigured.
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return;
+ }
+ } else { // mInputBufferCount == 2
+ if (mConfig->extendedAudioObjectType == MP4AUDIO_AAC_LC ||
+ mConfig->extendedAudioObjectType == MP4AUDIO_LTP) {
+ if (mUpsamplingFactor == 2) {
+ // The stream turns out to be not aacPlus mode anyway
+ LOGW("Disable AAC+/eAAC+ since extended audio object "
+ "type is %d",
+ mConfig->extendedAudioObjectType);
+ mConfig->aacPlusEnabled = 0;
+ }
+ } else {
+ if (mUpsamplingFactor == 1) {
+ // aacPlus mode does not buy us anything, but to cause
+ // 1. CPU load to increase, and
+ // 2. a half speed of decoding
+ LOGW("Disable AAC+/eAAC+ since upsampling factor is 1");
+ mConfig->aacPlusEnabled = 0;
+ }
+ }
+ }
+ }
+
+ size_t numOutBytes =
+ mConfig->frameLength * sizeof(int16_t) * mConfig->desiredChannels;
+
+ if (decoderErr == MP4AUDEC_SUCCESS) {
+ CHECK_LE(mConfig->inputBufferUsedLength, inHeader->nFilledLen);
+
+ inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
+ inHeader->nOffset += mConfig->inputBufferUsedLength;
+ } else {
+ memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
+ }
+
+ if (mUpsamplingFactor == 2) {
+ if (mConfig->desiredChannels == 1) {
+ memcpy(&mConfig->pOutputBuffer[1024],
+ &mConfig->pOutputBuffer[2048],
+ numOutBytes * 2);
+ }
+ numOutBytes *= 2;
+ }
+
+ outHeader->nFilledLen = numOutBytes;
+ outHeader->nFlags = 0;
+
+ outHeader->nTimeStamp =
+ mAnchorTimeUs
+ + (mNumSamplesOutput * 1000000ll) / mConfig->samplingRate;
+
+ mNumSamplesOutput += mConfig->frameLength * mUpsamplingFactor;
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mInputBufferCount;
+ }
+}
+
+void SoftAAC::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0) {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+ PVMP4AudioDecoderResetBuffer(mDecoderBuf);
+ }
+}
+
+void SoftAAC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftAAC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC.h b/media/libstagefright/codecs/aacdec/SoftAAC.h
new file mode 100644
index 0000000..963fd27
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/SoftAAC.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_AAC_H_
+
+#define SOFT_AAC_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct tPVMP4AudioDecoderExternal;
+
+namespace android {
+
+struct SoftAAC : public SimpleSoftOMXComponent {
+ SoftAAC(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftAAC();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumBuffers = 4
+ };
+
+ tPVMP4AudioDecoderExternal *mConfig;
+ void *mDecoderBuf;
+
+ size_t mInputBufferCount;
+ size_t mUpsamplingFactor;
+ int64_t mAnchorTimeUs;
+ int64_t mNumSamplesOutput;
+
+ bool mSignalledError;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftAAC);
+};
+
+} // namespace android
+
+#endif // SOFT_AAC_H_
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index e4ff128..0bff52d 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -84,7 +84,7 @@
params.sampleRate = mSampleRate;
params.bitRate = mBitRate;
params.nChannels = mChannels;
- params.adtsUsed = 0; // For MP4 file, don't use adts format$
+ params.adtsUsed = 0; // We add adts header in the file writer if needed.
if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AAC_ENCPARAM, ¶ms)) {
LOGE("Failed to set AAC encoder parameters");
return UNKNOWN_ERROR;
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index a545762..5862abc 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -52,3 +52,33 @@
LOCAL_MODULE := libstagefright_amrnbdec
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftAMR.cpp
+
+LOCAL_C_INCLUDES := \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+ $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/include \
+ $(LOCAL_PATH)/../common/include \
+ $(LOCAL_PATH)/../common \
+ frameworks/base/media/libstagefright/codecs/amrwb/src \
+
+LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_amrnbdec libstagefright_amrwbdec
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright_omx libstagefright_foundation libutils \
+ libstagefright_amrnb_common
+
+LOCAL_MODULE := libstagefright_soft_amrdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
new file mode 100644
index 0000000..c0a588f
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
@@ -0,0 +1,434 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAMR"
+#include <utils/Log.h>
+
+#include "SoftAMR.h"
+
+#include "gsmamr_dec.h"
+#include "pvamrwbdecoder.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftAMR::SoftAMR(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mMode(MODE_NARROW),
+ mState(NULL),
+ mDecoderBuf(NULL),
+ mDecoderCookie(NULL),
+ mInputBufferCount(0),
+ mAnchorTimeUs(0),
+ mNumSamplesOutput(0),
+ mSignalledError(false),
+ mOutputPortSettingsChange(NONE) {
+ if (!strcmp(name, "OMX.google.amrwb.decoder")) {
+ mMode = MODE_WIDE;
+ } else {
+ CHECK(!strcmp(name, "OMX.google.amrnb.decoder"));
+ }
+
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftAMR::~SoftAMR() {
+ if (mMode == MODE_NARROW) {
+ GSMDecodeFrameExit(&mState);
+ mState = NULL;
+ } else {
+ free(mDecoderBuf);
+ mDecoderBuf = NULL;
+
+ mState = NULL;
+ mDecoderCookie = NULL;
+ }
+}
+
+void SoftAMR::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ mMode == MODE_NARROW
+ ? const_cast<char *>("audio/amr")
+ : const_cast<char *>("audio/amrwb");
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingAMR;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+
+ def.nBufferSize =
+ (mMode == MODE_NARROW ? kNumSamplesPerFrameNB : kNumSamplesPerFrameWB)
+ * sizeof(int16_t);
+
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+status_t SoftAMR::initDecoder() {
+ if (mMode == MODE_NARROW) {
+ Word16 err = GSMInitDecode(&mState, (Word8 *)"AMRNBDecoder");
+
+ if (err != 0) {
+ return UNKNOWN_ERROR;
+ }
+ } else {
+ int32_t memReq = pvDecoder_AmrWbMemRequirements();
+ mDecoderBuf = malloc(memReq);
+
+ pvDecoder_AmrWb_Init(&mState, mDecoderBuf, &mDecoderCookie);
+ }
+
+ return OK;
+}
+
+OMX_ERRORTYPE SoftAMR::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioAmr:
+ {
+ OMX_AUDIO_PARAM_AMRTYPE *amrParams =
+ (OMX_AUDIO_PARAM_AMRTYPE *)params;
+
+ if (amrParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ amrParams->nChannels = 1;
+ amrParams->eAMRDTXMode = OMX_AUDIO_AMRDTXModeOff;
+ amrParams->eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatConformance;
+
+ if (!isConfigured()) {
+ amrParams->nBitRate = 0;
+ amrParams->eAMRBandMode = OMX_AUDIO_AMRBandModeUnused;
+ } else {
+ amrParams->nBitRate = 0;
+ amrParams->eAMRBandMode =
+ mMode == MODE_NARROW
+ ? OMX_AUDIO_AMRBandModeNB0 : OMX_AUDIO_AMRBandModeWB0;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->nChannels = 1;
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+
+ pcmParams->nSamplingRate =
+ (mMode == MODE_NARROW) ? kSampleRateNB : kSampleRateWB;
+
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAMR::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (mMode == MODE_NARROW) {
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.amrnb",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+ } else {
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.amrwb",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioAmr:
+ {
+ const OMX_AUDIO_PARAM_AMRTYPE *aacParams =
+ (const OMX_AUDIO_PARAM_AMRTYPE *)params;
+
+ if (aacParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+bool SoftAMR::isConfigured() const {
+ return mInputBufferCount > 0;
+}
+
+static size_t getFrameSize(unsigned FT) {
+ static const size_t kFrameSizeWB[9] = {
+ 132, 177, 253, 285, 317, 365, 397, 461, 477
+ };
+
+ size_t frameSize = kFrameSizeWB[FT];
+
+ // Round up bits to bytes and add 1 for the header byte.
+ frameSize = (frameSize + 7) / 8 + 1;
+
+ return frameSize;
+}
+
+void SoftAMR::onQueueFilled(OMX_U32 portIndex) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (mSignalledError || mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumSamplesOutput = 0;
+ }
+
+ const uint8_t *inputPtr = inHeader->pBuffer + inHeader->nOffset;
+ int32_t numBytesRead;
+
+ if (mMode == MODE_NARROW) {
+ numBytesRead =
+ AMRDecode(mState,
+ (Frame_Type_3GPP)((inputPtr[0] >> 3) & 0x0f),
+ (UWord8 *)&inputPtr[1],
+ reinterpret_cast<int16_t *>(outHeader->pBuffer),
+ MIME_IETF);
+
+ if (numBytesRead == -1) {
+ LOGE("PV AMR decoder AMRDecode() call failed");
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+
+ return;
+ }
+
+ ++numBytesRead; // Include the frame type header byte.
+
+ if (static_cast<size_t>(numBytesRead) > inHeader->nFilledLen) {
+ // This is bad, should never have happened, but did. Abort now.
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+
+ return;
+ }
+ } else {
+ int16 mode = ((inputPtr[0] >> 3) & 0x0f);
+ size_t frameSize = getFrameSize(mode);
+ CHECK_GE(inHeader->nFilledLen, frameSize);
+
+ int16 frameType;
+ RX_State_wb rx_state;
+ mime_unsorting(
+ const_cast<uint8_t *>(&inputPtr[1]),
+ mInputSampleBuffer,
+ &frameType, &mode, 1, &rx_state);
+
+ int16_t *outPtr = (int16_t *)outHeader->pBuffer;
+
+ int16_t numSamplesOutput;
+ pvDecoder_AmrWb(
+ mode, mInputSampleBuffer,
+ outPtr,
+ &numSamplesOutput,
+ mDecoderBuf, frameType, mDecoderCookie);
+
+ CHECK_EQ((int)numSamplesOutput, (int)kNumSamplesPerFrameWB);
+
+ for (int i = 0; i < kNumSamplesPerFrameWB; ++i) {
+ /* Delete the 2 LSBs (14-bit output) */
+ outPtr[i] &= 0xfffC;
+ }
+
+ numBytesRead = frameSize;
+ }
+
+ inHeader->nOffset += numBytesRead;
+ inHeader->nFilledLen -= numBytesRead;
+
+ outHeader->nFlags = 0;
+ outHeader->nOffset = 0;
+
+ if (mMode == MODE_NARROW) {
+ outHeader->nFilledLen = kNumSamplesPerFrameNB * sizeof(int16_t);
+
+ outHeader->nTimeStamp =
+ mAnchorTimeUs
+ + (mNumSamplesOutput * 1000000ll) / kSampleRateNB;
+
+ mNumSamplesOutput += kNumSamplesPerFrameNB;
+ } else {
+ outHeader->nFilledLen = kNumSamplesPerFrameWB * sizeof(int16_t);
+
+ outHeader->nTimeStamp =
+ mAnchorTimeUs
+ + (mNumSamplesOutput * 1000000ll) / kSampleRateWB;
+
+ mNumSamplesOutput += kNumSamplesPerFrameWB;
+ }
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mInputBufferCount;
+ }
+}
+
+void SoftAMR::onPortFlushCompleted(OMX_U32 portIndex) {
+}
+
+void SoftAMR::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftAMR(name, callbacks, appData, component);
+}
+
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
new file mode 100644
index 0000000..9a596e5
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_AMR_H_
+
+#define SOFT_AMR_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+namespace android {
+
+struct SoftAMR : public SimpleSoftOMXComponent {
+ SoftAMR(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftAMR();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kSampleRateNB = 8000,
+ kSampleRateWB = 16000,
+ kNumSamplesPerFrameNB = 160,
+ kNumSamplesPerFrameWB = 320,
+ };
+
+ enum {
+ MODE_NARROW,
+ MODE_WIDE
+
+ } mMode;
+
+ void *mState;
+ void *mDecoderBuf;
+ int16_t *mDecoderCookie;
+
+ size_t mInputBufferCount;
+ int64_t mAnchorTimeUs;
+ int64_t mNumSamplesOutput;
+
+ bool mSignalledError;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ int16_t mInputSampleBuffer[477];
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftAMR);
+};
+
+} // namespace android
+
+#endif // SOFT_AMR_H_
+
diff --git a/media/libstagefright/codecs/amrwb/AMRWBDecoder.cpp b/media/libstagefright/codecs/amrwb/AMRWBDecoder.cpp
index 2a21472..5b111ef 100644
--- a/media/libstagefright/codecs/amrwb/AMRWBDecoder.cpp
+++ b/media/libstagefright/codecs/amrwb/AMRWBDecoder.cpp
@@ -177,7 +177,7 @@
CHECK(mInputBuffer->range_length() >= frameSize);
int16 frameType;
- RX_State rx_state;
+ RX_State_wb rx_state;
mime_unsorting(
const_cast<uint8_t *>(&inputPtr[1]),
mInputSampleBuffer,
diff --git a/media/libstagefright/codecs/amrwb/src/mime_io.cpp b/media/libstagefright/codecs/amrwb/src/mime_io.cpp
index 9ff8816..e1966c6 100644
--- a/media/libstagefright/codecs/amrwb/src/mime_io.cpp
+++ b/media/libstagefright/codecs/amrwb/src/mime_io.cpp
@@ -531,7 +531,7 @@
int16 * frame_type,
int16 * mode,
uint8 quality,
- RX_State *st)
+ RX_State_wb *st)
{
int16 i;
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h b/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h
index 433fc92..c40bc10 100644
--- a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h
+++ b/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h
@@ -101,7 +101,7 @@
{
int16 prev_ft;
int16 prev_mode;
-} RX_State;
+} RX_State_wb;
/*----------------------------------------------------------------------------
; ENUMERATED TYPEDEF'S
@@ -141,7 +141,7 @@
int16 *frame_type,
int16 *mode,
uint8 q,
- RX_State *st);
+ RX_State_wb *st);
/*----------------------------------------------------------------------------
diff --git a/media/libstagefright/codecs/avc/dec/Android.mk b/media/libstagefright/codecs/avc/dec/Android.mk
index 1b00347..4d4533b 100644
--- a/media/libstagefright/codecs/avc/dec/Android.mk
+++ b/media/libstagefright/codecs/avc/dec/Android.mk
@@ -3,25 +3,54 @@
LOCAL_SRC_FILES := \
AVCDecoder.cpp \
- src/avcdec_api.cpp \
- src/avc_bitstream.cpp \
- src/header.cpp \
- src/itrans.cpp \
- src/pred_inter.cpp \
- src/pred_intra.cpp \
- src/residual.cpp \
- src/slice.cpp \
- src/vlc.cpp
+ src/avcdec_api.cpp \
+ src/avc_bitstream.cpp \
+ src/header.cpp \
+ src/itrans.cpp \
+ src/pred_inter.cpp \
+ src/pred_intra.cpp \
+ src/residual.cpp \
+ src/slice.cpp \
+ src/vlc.cpp
LOCAL_MODULE := libstagefright_avcdec
LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/src \
- $(LOCAL_PATH)/include \
- $(LOCAL_PATH)/../common/include \
+ $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/include \
+ $(LOCAL_PATH)/../common/include \
$(TOP)/frameworks/base/media/libstagefright/include \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
+ frameworks/base/include/media/stagefright/openmax \
LOCAL_CFLAGS := -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftAVC.cpp
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/include \
+ $(LOCAL_PATH)/../common/include \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+
+LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_avcdec
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright_avc_common \
+ libstagefright libstagefright_omx libstagefright_foundation libutils
+
+LOCAL_MODULE := libstagefright_soft_avcdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/media/libstagefright/codecs/avc/dec/SoftAVC.cpp b/media/libstagefright/codecs/avc/dec/SoftAVC.cpp
new file mode 100644
index 0000000..9f141ac
--- /dev/null
+++ b/media/libstagefright/codecs/avc/dec/SoftAVC.cpp
@@ -0,0 +1,690 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAVC"
+#include <utils/Log.h>
+
+#include "SoftAVC.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "avcdec_api.h"
+#include "avcdec_int.h"
+
+namespace android {
+
+static const char kStartCode[4] = { 0x00, 0x00, 0x00, 0x01 };
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+static int32_t Malloc(void *userData, int32_t size, int32_t attrs) {
+ return reinterpret_cast<int32_t>(malloc(size));
+}
+
+static void Free(void *userData, int32_t ptr) {
+ free(reinterpret_cast<void *>(ptr));
+}
+
+SoftAVC::SoftAVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mHandle(new tagAVCHandle),
+ mInputBufferCount(0),
+ mWidth(160),
+ mHeight(120),
+ mCropLeft(0),
+ mCropTop(0),
+ mCropRight(mWidth - 1),
+ mCropBottom(mHeight - 1),
+ mSPSSeen(false),
+ mPPSSeen(false),
+ mCurrentTimeUs(-1),
+ mEOSStatus(INPUT_DATA_AVAILABLE),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftAVC::~SoftAVC() {
+ PVAVCCleanUpDecoder(mHandle);
+
+ delete mHandle;
+ mHandle = NULL;
+}
+
+void SoftAVC::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumInputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_AVC);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
+ def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ def.format.video.pNativeWindow = NULL;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumOutputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ def.format.video.pNativeWindow = NULL;
+
+ def.nBufferSize =
+ (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
+
+ addPort(def);
+}
+
+status_t SoftAVC::initDecoder() {
+ memset(mHandle, 0, sizeof(tagAVCHandle));
+ mHandle->AVCObject = NULL;
+ mHandle->userData = this;
+ mHandle->CBAVC_DPBAlloc = ActivateSPSWrapper;
+ mHandle->CBAVC_FrameBind = BindFrameWrapper;
+ mHandle->CBAVC_FrameUnbind = UnbindFrame;
+ mHandle->CBAVC_Malloc = Malloc;
+ mHandle->CBAVC_Free = Free;
+
+ return OK;
+}
+
+OMX_ERRORTYPE SoftAVC::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ if (formatParams->nPortIndex == 0) {
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ } else {
+ CHECK_EQ(formatParams->nPortIndex, 1u);
+
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ formatParams->xFramerate = 0;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "video_decoder.avc",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::getConfig(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexConfigCommonOutputCrop:
+ {
+ OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
+
+ if (rectParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ rectParams->nLeft = mCropLeft;
+ rectParams->nTop = mCropTop;
+ rectParams->nWidth = mCropRight - mCropLeft + 1;
+ rectParams->nHeight = mCropBottom - mCropTop + 1;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return OMX_ErrorUnsupportedIndex;
+ }
+}
+
+static void findNALFragment(
+ const OMX_BUFFERHEADERTYPE *inHeader,
+ const uint8_t **fragPtr, size_t *fragSize) {
+ const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+
+ size_t size = inHeader->nFilledLen;
+
+ CHECK(size >= 4);
+ CHECK(!memcmp(kStartCode, data, 4));
+
+ size_t offset = 4;
+ while (offset + 3 < size && memcmp(kStartCode, &data[offset], 4)) {
+ ++offset;
+ }
+
+ *fragPtr = &data[4];
+ if (offset + 3 >= size) {
+ *fragSize = size - 4;
+ } else {
+ *fragSize = offset - 4;
+ }
+}
+
+void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
+ return;
+ }
+
+ while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
+ && outQueue.size() == kNumOutputBuffers) {
+ if (mEOSStatus == INPUT_EOS_SEEN) {
+ OMX_BUFFERHEADERTYPE *outHeader;
+ if (drainOutputBuffer(&outHeader)) {
+ List<BufferInfo *>::iterator it = outQueue.begin();
+ while ((*it)->mHeader != outHeader) {
+ ++it;
+ }
+
+ BufferInfo *outInfo = *it;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(it);
+ outInfo = NULL;
+
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ return;
+ }
+
+ BufferInfo *outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ outHeader->nTimeStamp = 0;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+
+ mEOSStatus = OUTPUT_FRAMES_FLUSHED;
+ return;
+ }
+
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ mEOSStatus = INPUT_EOS_SEEN;
+ continue;
+ }
+
+ mCurrentTimeUs = inHeader->nTimeStamp;
+
+ const uint8_t *fragPtr;
+ size_t fragSize;
+ findNALFragment(inHeader, &fragPtr, &fragSize);
+
+ bool releaseFragment;
+ OMX_BUFFERHEADERTYPE *outHeader;
+ status_t err = decodeFragment(
+ fragPtr, fragSize,
+ &releaseFragment, &outHeader);
+
+ if (releaseFragment) {
+ CHECK_GE(inHeader->nFilledLen, fragSize + 4);
+
+ inHeader->nOffset += fragSize + 4;
+ inHeader->nFilledLen -= fragSize + 4;
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ }
+
+ if (outHeader != NULL) {
+ List<BufferInfo *>::iterator it = outQueue.begin();
+ while ((*it)->mHeader != outHeader) {
+ ++it;
+ }
+
+ BufferInfo *outInfo = *it;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(it);
+ outInfo = NULL;
+
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ return;
+ }
+
+ if (err == INFO_FORMAT_CHANGED) {
+ return;
+ }
+
+ if (err != OK) {
+ notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
+ return;
+ }
+ }
+}
+
+status_t SoftAVC::decodeFragment(
+ const uint8_t *fragPtr, size_t fragSize,
+ bool *releaseFragment,
+ OMX_BUFFERHEADERTYPE **outHeader) {
+ *releaseFragment = true;
+ *outHeader = NULL;
+
+ int nalType;
+ int nalRefIdc;
+ AVCDec_Status res =
+ PVAVCDecGetNALType(
+ const_cast<uint8_t *>(fragPtr), fragSize,
+ &nalType, &nalRefIdc);
+
+ if (res != AVCDEC_SUCCESS) {
+ LOGV("cannot determine nal type");
+ return ERROR_MALFORMED;
+ }
+
+ if (nalType != AVC_NALTYPE_SPS && nalType != AVC_NALTYPE_PPS
+ && (!mSPSSeen || !mPPSSeen)) {
+ // We haven't seen SPS or PPS yet.
+ return OK;
+ }
+
+ switch (nalType) {
+ case AVC_NALTYPE_SPS:
+ {
+ mSPSSeen = true;
+
+ res = PVAVCDecSeqParamSet(
+ mHandle, const_cast<uint8_t *>(fragPtr),
+ fragSize);
+
+ if (res != AVCDEC_SUCCESS) {
+ return ERROR_MALFORMED;
+ }
+
+ AVCDecObject *pDecVid = (AVCDecObject *)mHandle->AVCObject;
+
+ int32_t width =
+ (pDecVid->seqParams[0]->pic_width_in_mbs_minus1 + 1) * 16;
+
+ int32_t height =
+ (pDecVid->seqParams[0]->pic_height_in_map_units_minus1 + 1) * 16;
+
+ int32_t crop_left, crop_right, crop_top, crop_bottom;
+ if (pDecVid->seqParams[0]->frame_cropping_flag)
+ {
+ crop_left = 2 * pDecVid->seqParams[0]->frame_crop_left_offset;
+ crop_right =
+ width - (2 * pDecVid->seqParams[0]->frame_crop_right_offset + 1);
+
+ if (pDecVid->seqParams[0]->frame_mbs_only_flag)
+ {
+ crop_top = 2 * pDecVid->seqParams[0]->frame_crop_top_offset;
+ crop_bottom =
+ height -
+ (2 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
+ }
+ else
+ {
+ crop_top = 4 * pDecVid->seqParams[0]->frame_crop_top_offset;
+ crop_bottom =
+ height -
+ (4 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
+ }
+ } else {
+ crop_bottom = height - 1;
+ crop_right = width - 1;
+ crop_top = crop_left = 0;
+ }
+
+ status_t err = OK;
+
+ if (mWidth != width || mHeight != height) {
+ mWidth = width;
+ mHeight = height;
+
+ updatePortDefinitions();
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+
+ err = INFO_FORMAT_CHANGED;
+ }
+
+ if (mCropLeft != crop_left
+ || mCropTop != crop_top
+ || mCropRight != crop_right
+ || mCropBottom != crop_bottom) {
+ mCropLeft = crop_left;
+ mCropTop = crop_top;
+ mCropRight = crop_right;
+ mCropBottom = crop_bottom;
+
+ notify(OMX_EventPortSettingsChanged,
+ 1,
+ OMX_IndexConfigCommonOutputCrop,
+ NULL);
+ }
+
+ return err;
+ }
+
+ case AVC_NALTYPE_PPS:
+ {
+ mPPSSeen = true;
+
+ res = PVAVCDecPicParamSet(
+ mHandle, const_cast<uint8_t *>(fragPtr),
+ fragSize);
+
+ if (res != AVCDEC_SUCCESS) {
+ LOGV("PVAVCDecPicParamSet returned error %d", res);
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+ }
+
+ case AVC_NALTYPE_SLICE:
+ case AVC_NALTYPE_IDR:
+ {
+ res = PVAVCDecodeSlice(
+ mHandle, const_cast<uint8_t *>(fragPtr),
+ fragSize);
+
+ if (res == AVCDEC_PICTURE_OUTPUT_READY) {
+ *releaseFragment = false;
+
+ if (!drainOutputBuffer(outHeader)) {
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+ }
+
+ if (res == AVCDEC_PICTURE_READY || res == AVCDEC_SUCCESS) {
+ return OK;
+ } else {
+ LOGV("PVAVCDecodeSlice returned error %d", res);
+ return ERROR_MALFORMED;
+ }
+ }
+
+ case AVC_NALTYPE_SEI:
+ {
+ res = PVAVCDecSEI(
+ mHandle, const_cast<uint8_t *>(fragPtr),
+ fragSize);
+
+ if (res != AVCDEC_SUCCESS) {
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+ }
+
+ case AVC_NALTYPE_AUD:
+ case AVC_NALTYPE_FILL:
+ case AVC_NALTYPE_EOSEQ:
+ {
+ return OK;
+ }
+
+ default:
+ {
+ LOGE("Should not be here, unknown nalType %d", nalType);
+
+ return ERROR_MALFORMED;
+ }
+ }
+
+ return OK;
+}
+
+bool SoftAVC::drainOutputBuffer(OMX_BUFFERHEADERTYPE **outHeader) {
+ int32_t index;
+ int32_t Release;
+ AVCFrameIO Output;
+ Output.YCbCr[0] = Output.YCbCr[1] = Output.YCbCr[2] = NULL;
+ AVCDec_Status status =
+ PVAVCDecGetOutput(mHandle, &index, &Release, &Output);
+
+ if (status != AVCDEC_SUCCESS) {
+ return false;
+ }
+
+ PortInfo *port = editPortInfo(1);
+ CHECK_GE(index, 0);
+ CHECK_LT((size_t)index, port->mBuffers.size());
+ CHECK(port->mBuffers.editItemAt(index).mOwnedByUs);
+
+ *outHeader = port->mBuffers.editItemAt(index).mHeader;
+ (*outHeader)->nOffset = 0;
+ (*outHeader)->nFilledLen = port->mDef.nBufferSize;
+ (*outHeader)->nFlags = 0;
+
+ return true;
+}
+
+void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0) {
+ PVAVCDecReset(mHandle);
+
+ mEOSStatus = INPUT_DATA_AVAILABLE;
+ }
+}
+
+void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+void SoftAVC::updatePortDefinitions() {
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def = &editPortInfo(1)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def->nBufferSize =
+ (def->format.video.nFrameWidth
+ * def->format.video.nFrameHeight * 3) / 2;
+}
+
+// static
+int32_t SoftAVC::ActivateSPSWrapper(
+ void *userData, unsigned int sizeInMbs, unsigned int numBuffers) {
+ return static_cast<SoftAVC *>(userData)->activateSPS(sizeInMbs, numBuffers);
+}
+
+// static
+int32_t SoftAVC::BindFrameWrapper(
+ void *userData, int32_t index, uint8_t **yuv) {
+ return static_cast<SoftAVC *>(userData)->bindFrame(index, yuv);
+}
+
+// static
+void SoftAVC::UnbindFrame(void *userData, int32_t index) {
+}
+
+int32_t SoftAVC::activateSPS(
+ unsigned int sizeInMbs, unsigned int numBuffers) {
+ PortInfo *port = editPortInfo(1);
+ CHECK_GE(port->mBuffers.size(), numBuffers);
+ CHECK_GE(port->mDef.nBufferSize, (sizeInMbs << 7) * 3);
+
+ return 1;
+}
+
+int32_t SoftAVC::bindFrame(int32_t index, uint8_t **yuv) {
+ PortInfo *port = editPortInfo(1);
+
+ CHECK_GE(index, 0);
+ CHECK_LT((size_t)index, port->mBuffers.size());
+
+ BufferInfo *outBuffer =
+ &port->mBuffers.editItemAt(index);
+
+ CHECK(outBuffer->mOwnedByUs);
+
+ outBuffer->mHeader->nTimeStamp = mCurrentTimeUs;
+ *yuv = outBuffer->mHeader->pBuffer;
+
+ return 1;
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftAVC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/avc/dec/SoftAVC.h b/media/libstagefright/codecs/avc/dec/SoftAVC.h
new file mode 100644
index 0000000..1594b4d
--- /dev/null
+++ b/media/libstagefright/codecs/avc/dec/SoftAVC.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_AVC_H_
+
+#define SOFT_AVC_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct tagAVCHandle;
+
+namespace android {
+
+struct SoftAVC : public SimpleSoftOMXComponent {
+ SoftAVC(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftAVC();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual OMX_ERRORTYPE getConfig(OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumInputBuffers = 4,
+ kNumOutputBuffers = 18,
+ };
+
+ enum EOSStatus {
+ INPUT_DATA_AVAILABLE,
+ INPUT_EOS_SEEN,
+ OUTPUT_FRAMES_FLUSHED,
+ };
+
+ tagAVCHandle *mHandle;
+
+ size_t mInputBufferCount;
+
+ int32_t mWidth, mHeight;
+ int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+
+ bool mSPSSeen, mPPSSeen;
+
+ int64_t mCurrentTimeUs;
+
+ EOSStatus mEOSStatus;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+
+ status_t decodeFragment(
+ const uint8_t *fragPtr, size_t fragSize,
+ bool *releaseFrames,
+ OMX_BUFFERHEADERTYPE **outHeader);
+
+ void updatePortDefinitions();
+ bool drainOutputBuffer(OMX_BUFFERHEADERTYPE **outHeader);
+
+ static int32_t ActivateSPSWrapper(
+ void *userData, unsigned int sizeInMbs, unsigned int numBuffers);
+
+ static int32_t BindFrameWrapper(
+ void *userData, int32_t index, uint8_t **yuv);
+
+ static void UnbindFrame(void *userData, int32_t index);
+
+ int32_t activateSPS(
+ unsigned int sizeInMbs, unsigned int numBuffers);
+
+ int32_t bindFrame(int32_t index, uint8_t **yuv);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
+};
+
+} // namespace android
+
+#endif // SOFT_AVC_H_
+
diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk
index cfb9fe4..6e98559 100644
--- a/media/libstagefright/codecs/g711/dec/Android.mk
+++ b/media/libstagefright/codecs/g711/dec/Android.mk
@@ -10,3 +10,22 @@
LOCAL_MODULE := libstagefright_g711dec
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftG711.cpp
+
+LOCAL_C_INCLUDES := \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libstagefright_omx libstagefright_foundation libutils
+
+LOCAL_MODULE := libstagefright_soft_g711dec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.cpp b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
new file mode 100644
index 0000000..15e2c26
--- /dev/null
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
@@ -0,0 +1,302 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftG711"
+#include <utils/Log.h>
+
+#include "SoftG711.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftG711::SoftG711(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mIsMLaw(true),
+ mNumChannels(1),
+ mSignalledError(false) {
+ if (!strcmp(name, "OMX.google.g711.alaw.decoder")) {
+ mIsMLaw = false;
+ } else {
+ CHECK(!strcmp(name, "OMX.google.g711.mlaw.decoder"));
+ }
+
+ initPorts();
+}
+
+SoftG711::~SoftG711() {
+}
+
+void SoftG711::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(
+ mIsMLaw
+ ? MEDIA_MIMETYPE_AUDIO_G711_MLAW
+ : MEDIA_MIMETYPE_AUDIO_G711_ALAW);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingG711;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+OMX_ERRORTYPE SoftG711::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+
+ pcmParams->nChannels = mNumChannels;
+ pcmParams->nSamplingRate = 8000;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftG711::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (pcmParams->nChannels < 1 || pcmParams->nChannels > 2) {
+ return OMX_ErrorUndefined;
+ }
+
+ mNumChannels = pcmParams->nChannels;
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (mIsMLaw) {
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.g711mlaw",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+ } else {
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.g711alaw",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+void SoftG711::onQueueFilled(OMX_U32 portIndex) {
+ if (mSignalledError) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) {
+ LOGE("input buffer too large (%ld).", inHeader->nFilledLen);
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ }
+
+ const uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset;
+
+ if (mIsMLaw) {
+ DecodeMLaw(
+ reinterpret_cast<int16_t *>(outHeader->pBuffer),
+ inputptr, inHeader->nFilledLen);
+ } else {
+ DecodeALaw(
+ reinterpret_cast<int16_t *>(outHeader->pBuffer),
+ inputptr, inHeader->nFilledLen);
+ }
+
+ outHeader->nTimeStamp = inHeader->nTimeStamp;
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = inHeader->nFilledLen * sizeof(int16_t);
+ outHeader->nFlags = 0;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+}
+
+// static
+void SoftG711::DecodeALaw(
+ int16_t *out, const uint8_t *in, size_t inSize) {
+ while (inSize-- > 0) {
+ int32_t x = *in++;
+
+ int32_t ix = x ^ 0x55;
+ ix &= 0x7f;
+
+ int32_t iexp = ix >> 4;
+ int32_t mant = ix & 0x0f;
+
+ if (iexp > 0) {
+ mant += 16;
+ }
+
+ mant = (mant << 4) + 8;
+
+ if (iexp > 1) {
+ mant = mant << (iexp - 1);
+ }
+
+ *out++ = (x > 127) ? mant : -mant;
+ }
+}
+
+// static
+void SoftG711::DecodeMLaw(
+ int16_t *out, const uint8_t *in, size_t inSize) {
+ while (inSize-- > 0) {
+ int32_t x = *in++;
+
+ int32_t mantissa = ~x;
+ int32_t exponent = (mantissa >> 4) & 7;
+ int32_t segment = exponent + 1;
+ mantissa &= 0x0f;
+
+ int32_t step = 4 << segment;
+
+ int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33;
+
+ *out++ = (x < 0x80) ? -abs : abs;
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftG711(name, callbacks, appData, component);
+}
+
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.h b/media/libstagefright/codecs/g711/dec/SoftG711.h
new file mode 100644
index 0000000..bff0c68
--- /dev/null
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_G711_H_
+
+#define SOFT_G711_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+namespace android {
+
+struct SoftG711 : public SimpleSoftOMXComponent {
+ SoftG711(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftG711();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kMaxNumSamplesPerFrame = 16384,
+ };
+
+ bool mIsMLaw;
+ OMX_U32 mNumChannels;
+ bool mSignalledError;
+
+ void initPorts();
+
+ static void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
+ static void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftG711);
+};
+
+} // namespace android
+
+#endif // SOFT_G711_H_
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
index 2d9bcc6..f1bec08 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
@@ -48,3 +48,29 @@
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftMPEG4.cpp
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/include \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+
+LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_m4vh263dec
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libstagefright_omx libstagefright_foundation libutils
+
+LOCAL_MODULE := libstagefright_soft_mpeg4dec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
new file mode 100644
index 0000000..13e1662
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -0,0 +1,528 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftMPEG4"
+#include <utils/Log.h>
+
+#include "SoftMPEG4.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "mp4dec_api.h"
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftMPEG4::SoftMPEG4(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mMode(MODE_MPEG4),
+ mHandle(new tagvideoDecControls),
+ mInputBufferCount(0),
+ mWidth(352),
+ mHeight(288),
+ mCropLeft(0),
+ mCropTop(0),
+ mCropRight(mWidth - 1),
+ mCropBottom(mHeight - 1),
+ mSignalledError(false),
+ mInitialized(false),
+ mFramesConfigured(false),
+ mNumSamplesOutput(0),
+ mOutputPortSettingsChange(NONE) {
+ if (!strcmp(name, "OMX.google.h263.decoder")) {
+ mMode = MODE_H263;
+ } else {
+ CHECK(!strcmp(name, "OMX.google.mpeg4.decoder"));
+ }
+
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftMPEG4::~SoftMPEG4() {
+ if (mInitialized) {
+ PVCleanUpVideoDecoder(mHandle);
+ }
+
+ delete mHandle;
+ mHandle = NULL;
+}
+
+void SoftMPEG4::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumInputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.video.cMIMEType =
+ (mMode == MODE_MPEG4)
+ ? const_cast<char *>(MEDIA_MIMETYPE_VIDEO_MPEG4)
+ : const_cast<char *>(MEDIA_MIMETYPE_VIDEO_H263);
+
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+
+ def.format.video.eCompressionFormat =
+ mMode == MODE_MPEG4 ? OMX_VIDEO_CodingMPEG4 : OMX_VIDEO_CodingH263;
+
+ def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ def.format.video.pNativeWindow = NULL;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumOutputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ def.format.video.pNativeWindow = NULL;
+
+ def.nBufferSize =
+ (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
+
+ addPort(def);
+}
+
+status_t SoftMPEG4::initDecoder() {
+ memset(mHandle, 0, sizeof(tagvideoDecControls));
+ return OK;
+}
+
+OMX_ERRORTYPE SoftMPEG4::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ if (formatParams->nPortIndex == 0) {
+ formatParams->eCompressionFormat =
+ (mMode == MODE_MPEG4)
+ ? OMX_VIDEO_CodingMPEG4 : OMX_VIDEO_CodingH263;
+
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ } else {
+ CHECK_EQ(formatParams->nPortIndex, 1u);
+
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ formatParams->xFramerate = 0;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftMPEG4::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (mMode == MODE_MPEG4) {
+ if (strncmp((const char *)roleParams->cRole,
+ "video_decoder.mpeg4",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+ } else {
+ if (strncmp((const char *)roleParams->cRole,
+ "video_decoder.h263",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftMPEG4::getConfig(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexConfigCommonOutputCrop:
+ {
+ OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
+
+ if (rectParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ rectParams->nLeft = mCropLeft;
+ rectParams->nTop = mCropTop;
+ rectParams->nWidth = mCropRight - mCropLeft + 1;
+ rectParams->nHeight = mCropBottom - mCropTop + 1;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return OMX_ErrorUnsupportedIndex;
+ }
+}
+
+void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
+ if (mSignalledError || mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while (!inQueue.empty() && outQueue.size() == kNumOutputBuffers) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ PortInfo *port = editPortInfo(1);
+
+ OMX_BUFFERHEADERTYPE *outHeader =
+ port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ ++mInputBufferCount;
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ List<BufferInfo *>::iterator it = outQueue.begin();
+ while ((*it)->mHeader != outHeader) {
+ ++it;
+ }
+
+ BufferInfo *outInfo = *it;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(it);
+ outInfo = NULL;
+
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ return;
+ }
+
+ uint8_t *bitstream = inHeader->pBuffer + inHeader->nOffset;
+
+ if (!mInitialized) {
+ uint8_t *vol_data[1];
+ int32_t vol_size = 0;
+
+ vol_data[0] = NULL;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ vol_data[0] = bitstream;
+ vol_size = inHeader->nFilledLen;
+ }
+
+ MP4DecodingMode mode =
+ (mMode == MODE_MPEG4) ? MPEG4_MODE : H263_MODE;
+
+ Bool success = PVInitVideoDecoder(
+ mHandle, vol_data, &vol_size, 1, mWidth, mHeight, mode);
+
+ if (!success) {
+ LOGW("PVInitVideoDecoder failed. Unsupported content?");
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle);
+ if (mode != actualMode) {
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ PVSetPostProcType((VideoDecControls *) mHandle, 0);
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+
+ mInitialized = true;
+
+ if (mode == MPEG4_MODE && portSettingsChanged()) {
+ return;
+ }
+
+ continue;
+ }
+
+ if (!mFramesConfigured) {
+ PortInfo *port = editPortInfo(1);
+ OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(1).mHeader;
+
+ PVSetReferenceYUV(mHandle, outHeader->pBuffer);
+
+ mFramesConfigured = true;
+ }
+
+ uint32_t timestamp = 0xFFFFFFFF;
+ int32_t bufferSize = inHeader->nFilledLen;
+
+ uint32_t useExtTimestamp = 0;
+ if (PVDecodeVideoFrame(
+ mHandle, &bitstream, ×tamp, &bufferSize,
+ &useExtTimestamp,
+ outHeader->pBuffer) != PV_TRUE) {
+ LOGE("failed to decode video frame.");
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ if (portSettingsChanged()) {
+ return;
+ }
+
+ outHeader->nTimeStamp = inHeader->nTimeStamp;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ ++mInputBufferCount;
+
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+ outHeader->nFlags = 0;
+
+ List<BufferInfo *>::iterator it = outQueue.begin();
+ while ((*it)->mHeader != outHeader) {
+ ++it;
+ }
+
+ BufferInfo *outInfo = *it;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(it);
+ outInfo = NULL;
+
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mNumSamplesOutput;
+ }
+}
+
+bool SoftMPEG4::portSettingsChanged() {
+ int32_t disp_width, disp_height;
+ PVGetVideoDimensions(mHandle, &disp_width, &disp_height);
+
+ int32_t buf_width, buf_height;
+ PVGetBufferDimensions(mHandle, &buf_width, &buf_height);
+
+ CHECK_LE(disp_width, buf_width);
+ CHECK_LE(disp_height, buf_height);
+
+ LOGV("disp_width = %d, disp_height = %d, buf_width = %d, buf_height = %d",
+ disp_width, disp_height, buf_width, buf_height);
+
+ if (mCropRight != disp_width - 1
+ || mCropBottom != disp_height - 1) {
+ mCropLeft = 0;
+ mCropTop = 0;
+ mCropRight = disp_width - 1;
+ mCropBottom = disp_height - 1;
+
+ notify(OMX_EventPortSettingsChanged,
+ 1,
+ OMX_IndexConfigCommonOutputCrop,
+ NULL);
+ }
+
+ if (buf_width != mWidth || buf_height != mHeight) {
+ mWidth = buf_width;
+ mHeight = buf_height;
+
+ updatePortDefinitions();
+
+ if (mMode == MODE_H263) {
+ PVCleanUpVideoDecoder(mHandle);
+
+ uint8_t *vol_data[1];
+ int32_t vol_size = 0;
+
+ vol_data[0] = NULL;
+ if (!PVInitVideoDecoder(
+ mHandle, vol_data, &vol_size, 1, mWidth, mHeight,
+ H263_MODE)) {
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return true;
+ }
+ }
+
+ mFramesConfigured = false;
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return true;
+ }
+
+ return false;
+}
+
+void SoftMPEG4::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0 && mInitialized) {
+ CHECK_EQ((int)PVResetVideoDecoder(mHandle), (int)PV_TRUE);
+ }
+}
+
+void SoftMPEG4::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+void SoftMPEG4::updatePortDefinitions() {
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def = &editPortInfo(1)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def->nBufferSize =
+ (((def->format.video.nFrameWidth + 15) & -16)
+ * ((def->format.video.nFrameHeight + 15) & -16) * 3) / 2;
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftMPEG4(name, callbacks, appData, component);
+}
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
new file mode 100644
index 0000000..dff08a7
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_MPEG4_H_
+
+#define SOFT_MPEG4_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct tagvideoDecControls;
+
+namespace android {
+
+struct SoftMPEG4 : public SimpleSoftOMXComponent {
+ SoftMPEG4(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftMPEG4();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual OMX_ERRORTYPE getConfig(OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumInputBuffers = 4,
+ kNumOutputBuffers = 2,
+ };
+
+ enum {
+ MODE_MPEG4,
+ MODE_H263,
+
+ } mMode;
+
+ tagvideoDecControls *mHandle;
+
+ size_t mInputBufferCount;
+
+ int32_t mWidth, mHeight;
+ int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+
+ bool mSignalledError;
+ bool mInitialized;
+ bool mFramesConfigured;
+
+ int32_t mNumSamplesOutput;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+
+ void updatePortDefinitions();
+ bool portSettingsChanged();
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4);
+};
+
+} // namespace android
+
+#endif // SOFT_MPEG4_H_
+
+
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index 753500e..229988e 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -57,3 +57,26 @@
include $(BUILD_STATIC_LIBRARY)
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftMP3.cpp
+
+LOCAL_C_INCLUDES := \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+ $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/include
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libstagefright_omx libstagefright_foundation libutils
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_mp3dec
+
+LOCAL_MODULE := libstagefright_soft_mp3dec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
new file mode 100644
index 0000000..f6770b0
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -0,0 +1,325 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftMP3"
+#include <utils/Log.h>
+
+#include "SoftMP3.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+#include "include/pvmp3decoder_api.h"
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftMP3::SoftMP3(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mConfig(new tPVMP3DecoderExternal),
+ mDecoderBuf(NULL),
+ mAnchorTimeUs(0),
+ mNumFramesOutput(0),
+ mNumChannels(2),
+ mSamplingRate(44100),
+ mSignalledError(false),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ initDecoder();
+}
+
+SoftMP3::~SoftMP3() {
+ if (mDecoderBuf != NULL) {
+ free(mDecoderBuf);
+ mDecoderBuf = NULL;
+ }
+
+ delete mConfig;
+ mConfig = NULL;
+}
+
+void SoftMP3::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_AUDIO_MPEG);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingMP3;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kOutputBufferSize;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+void SoftMP3::initDecoder() {
+ mConfig->equalizerType = flat;
+ mConfig->crcEnabled = false;
+
+ uint32_t memRequirements = pvmp3_decoderMemRequirements();
+ mDecoderBuf = malloc(memRequirements);
+
+ pvmp3_InitDecoder(mConfig, mDecoderBuf);
+}
+
+OMX_ERRORTYPE SoftMP3::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+
+ pcmParams->nChannels = mNumChannels;
+ pcmParams->nSamplingRate = mSamplingRate;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftMP3::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.mp3",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+void SoftMP3::onQueueFilled(OMX_U32 portIndex) {
+ if (mSignalledError || mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
+ }
+
+ mConfig->pInputBuffer =
+ inHeader->pBuffer + inHeader->nOffset;
+
+ mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
+ mConfig->inputBufferMaxLength = 0;
+ mConfig->inputBufferUsedLength = 0;
+
+ mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
+
+ mConfig->pOutputBuffer =
+ reinterpret_cast<int16_t *>(outHeader->pBuffer);
+
+ ERROR_CODE decoderErr;
+ if ((decoderErr = pvmp3_framedecoder(mConfig, mDecoderBuf))
+ != NO_DECODING_ERROR) {
+ LOGV("mp3 decoder returned error %d", decoderErr);
+
+ if (decoderErr != NO_ENOUGH_MAIN_DATA_ERROR ||
+ mConfig->outputFrameSize == 0) {
+
+ if (mConfig->outputFrameSize == 0) {
+ LOGE("Output frame size is 0");
+ }
+
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ // This is recoverable, just ignore the current frame and
+ // play silence instead.
+ memset(outHeader->pBuffer,
+ 0,
+ mConfig->outputFrameSize * sizeof(int16_t));
+
+ mConfig->inputBufferUsedLength = inHeader->nFilledLen;
+ } else if (mConfig->samplingRate != mSamplingRate
+ || mConfig->num_channels != mNumChannels) {
+ mSamplingRate = mConfig->samplingRate;
+ mNumChannels = mConfig->num_channels;
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return;
+ }
+
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t);
+
+ outHeader->nTimeStamp =
+ mAnchorTimeUs
+ + (mNumFramesOutput * 1000000ll) / mConfig->samplingRate;
+
+ outHeader->nFlags = 0;
+
+ CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength);
+
+ inHeader->nOffset += mConfig->inputBufferUsedLength;
+ inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
+
+ mNumFramesOutput += mConfig->outputFrameSize / mNumChannels;
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+}
+
+void SoftMP3::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0) {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+ pvmp3_InitDecoder(mConfig, mDecoderBuf);
+ }
+}
+
+void SoftMP3::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftMP3(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
new file mode 100644
index 0000000..70d0682
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_MP3_H_
+
+#define SOFT_MP3_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct tPVMP3DecoderExternal;
+
+namespace android {
+
+struct SoftMP3 : public SimpleSoftOMXComponent {
+ SoftMP3(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftMP3();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kOutputBufferSize = 4608 * 2
+ };
+
+ tPVMP3DecoderExternal *mConfig;
+ void *mDecoderBuf;
+ int64_t mAnchorTimeUs;
+ int64_t mNumFramesOutput;
+
+ int32_t mNumChannels;
+ int32_t mSamplingRate;
+
+ bool mConfigured;
+
+ bool mSignalledError;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ void initDecoder();
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftMP3);
+};
+
+} // namespace android
+
+#endif // SOFT_MP3_H_
+
+
diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk
index b769f0d..832b885 100644
--- a/media/libstagefright/codecs/on2/dec/Android.mk
+++ b/media/libstagefright/codecs/on2/dec/Android.mk
@@ -2,15 +2,42 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- VPXDecoder.cpp
+ VPXDecoder.cpp \
LOCAL_MODULE := libstagefright_vpxdec
LOCAL_C_INCLUDES := \
$(TOP)/frameworks/base/media/libstagefright/include \
- $(TOP)/frameworks/base/include/media/stagefright/openmax \
+ frameworks/base/include/media/stagefright/openmax \
$(TOP)/external/libvpx \
$(TOP)/external/libvpx/vpx_codec \
$(TOP)/external/libvpx/vpx_ports
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftVPX.cpp
+
+LOCAL_C_INCLUDES := \
+ $(TOP)/external/libvpx \
+ $(TOP)/external/libvpx/vpx_codec \
+ $(TOP)/external/libvpx/vpx_ports \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_vpxdec \
+ libvpx
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libstagefright_omx libstagefright_foundation libutils
+
+LOCAL_MODULE := libstagefright_soft_vpxdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
new file mode 100644
index 0000000..e9ce719
--- /dev/null
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -0,0 +1,366 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftVPX"
+#include <utils/Log.h>
+
+#include "SoftVPX.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+#include "vpx/vpx_decoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vp8dx.h"
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftVPX::SoftVPX(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mCtx(NULL),
+ mWidth(320),
+ mHeight(240),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftVPX::~SoftVPX() {
+ vpx_codec_destroy((vpx_codec_ctx_t *)mCtx);
+ delete (vpx_codec_ctx_t *)mCtx;
+ mCtx = NULL;
+}
+
+void SoftVPX::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_VPX);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX;
+ def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ def.format.video.pNativeWindow = NULL;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ def.format.video.pNativeWindow = NULL;
+
+ def.nBufferSize =
+ (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
+
+ addPort(def);
+}
+
+status_t SoftVPX::initDecoder() {
+ mCtx = new vpx_codec_ctx_t;
+ vpx_codec_err_t vpx_err;
+ if ((vpx_err = vpx_codec_dec_init(
+ (vpx_codec_ctx_t *)mCtx, &vpx_codec_vp8_dx_algo, NULL, 0))) {
+ LOGE("on2 decoder failed to initialize. (%d)", vpx_err);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+OMX_ERRORTYPE SoftVPX::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ if (formatParams->nPortIndex == 0) {
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX;
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ } else {
+ CHECK_EQ(formatParams->nPortIndex, 1u);
+
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ formatParams->xFramerate = 0;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftVPX::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "video_decoder.vpx",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+void SoftVPX::onQueueFilled(OMX_U32 portIndex) {
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (vpx_codec_decode(
+ (vpx_codec_ctx_t *)mCtx,
+ inHeader->pBuffer + inHeader->nOffset,
+ inHeader->nFilledLen,
+ NULL,
+ 0)) {
+ LOGE("on2 decoder failed to decode frame.");
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ vpx_codec_iter_t iter = NULL;
+ vpx_image_t *img = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
+
+ if (img != NULL) {
+ CHECK_EQ(img->fmt, IMG_FMT_I420);
+
+ int32_t width = img->d_w;
+ int32_t height = img->d_h;
+
+ if (width != mWidth || height != mHeight) {
+ mWidth = width;
+ mHeight = height;
+
+ updatePortDefinitions();
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return;
+ }
+
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = (width * height * 3) / 2;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = inHeader->nTimeStamp;
+
+ const uint8_t *srcLine = (const uint8_t *)img->planes[PLANE_Y];
+ uint8_t *dst = outHeader->pBuffer;
+ for (size_t i = 0; i < img->d_h; ++i) {
+ memcpy(dst, srcLine, img->d_w);
+
+ srcLine += img->stride[PLANE_Y];
+ dst += img->d_w;
+ }
+
+ srcLine = (const uint8_t *)img->planes[PLANE_U];
+ for (size_t i = 0; i < img->d_h / 2; ++i) {
+ memcpy(dst, srcLine, img->d_w / 2);
+
+ srcLine += img->stride[PLANE_U];
+ dst += img->d_w / 2;
+ }
+
+ srcLine = (const uint8_t *)img->planes[PLANE_V];
+ for (size_t i = 0; i < img->d_h / 2; ++i) {
+ memcpy(dst, srcLine, img->d_w / 2);
+
+ srcLine += img->stride[PLANE_V];
+ dst += img->d_w / 2;
+ }
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+}
+
+void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) {
+}
+
+void SoftVPX::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+void SoftVPX::updatePortDefinitions() {
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def = &editPortInfo(1)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def->nBufferSize =
+ (def->format.video.nFrameWidth
+ * def->format.video.nFrameHeight * 3) / 2;
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftVPX(name, callbacks, appData, component);
+}
+
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
new file mode 100644
index 0000000..3e814a2
--- /dev/null
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VPX_H_
+
+#define SOFT_VPX_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+namespace android {
+
+struct SoftVPX : public SimpleSoftOMXComponent {
+ SoftVPX(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftVPX();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumBuffers = 4
+ };
+
+ void *mCtx;
+
+ int32_t mWidth;
+ int32_t mHeight;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+
+ void updatePortDefinitions();
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);
+};
+
+} // namespace android
+
+#endif // SOFT_VPX_H_
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk
index 5c768c8..9251229 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.mk
+++ b/media/libstagefright/codecs/vorbis/dec/Android.mk
@@ -6,8 +6,33 @@
LOCAL_C_INCLUDES := \
frameworks/base/media/libstagefright/include \
- external/tremolo
+ external/tremolo \
LOCAL_MODULE := libstagefright_vorbisdec
include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftVorbis.cpp
+
+LOCAL_C_INCLUDES := \
+ external/tremolo \
+ frameworks/base/media/libstagefright/include \
+ frameworks/base/include/media/stagefright/openmax \
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_vorbisdec
+
+LOCAL_SHARED_LIBRARIES := \
+ libvorbisidec libstagefright libstagefright_omx \
+ libstagefright_foundation libutils
+
+LOCAL_MODULE := libstagefright_soft_vorbisdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
new file mode 100644
index 0000000..4091111
--- /dev/null
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -0,0 +1,445 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftVorbis"
+#include <utils/Log.h>
+
+#include "SoftVorbis.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+extern "C" {
+ #include <Tremolo/codec_internal.h>
+
+ int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb);
+ int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb);
+ int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb);
+}
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftVorbis::SoftVorbis(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mInputBufferCount(0),
+ mState(NULL),
+ mVi(NULL),
+ mAnchorTimeUs(0),
+ mNumFramesOutput(0),
+ mNumFramesLeftOnPage(-1),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftVorbis::~SoftVorbis() {
+ if (mState != NULL) {
+ vorbis_dsp_clear(mState);
+ delete mState;
+ mState = NULL;
+ }
+
+ if (mVi != NULL) {
+ vorbis_info_clear(mVi);
+ delete mVi;
+ mVi = NULL;
+ }
+}
+
+void SoftVorbis::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_AUDIO_VORBIS);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+status_t SoftVorbis::initDecoder() {
+ return OK;
+}
+
+OMX_ERRORTYPE SoftVorbis::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioVorbis:
+ {
+ OMX_AUDIO_PARAM_VORBISTYPE *vorbisParams =
+ (OMX_AUDIO_PARAM_VORBISTYPE *)params;
+
+ if (vorbisParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ vorbisParams->nBitRate = 0;
+ vorbisParams->nMinBitRate = 0;
+ vorbisParams->nMaxBitRate = 0;
+ vorbisParams->nAudioBandWidth = 0;
+ vorbisParams->nQuality = 3;
+ vorbisParams->bManaged = OMX_FALSE;
+ vorbisParams->bDownmix = OMX_FALSE;
+
+ if (!isConfigured()) {
+ vorbisParams->nChannels = 1;
+ vorbisParams->nSampleRate = 44100;
+ } else {
+ vorbisParams->nChannels = mVi->channels;
+ vorbisParams->nSampleRate = mVi->rate;
+ vorbisParams->nBitRate = mVi->bitrate_nominal;
+ vorbisParams->nMinBitRate = mVi->bitrate_lower;
+ vorbisParams->nMaxBitRate = mVi->bitrate_upper;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+
+ if (!isConfigured()) {
+ pcmParams->nChannels = 1;
+ pcmParams->nSamplingRate = 44100;
+ } else {
+ pcmParams->nChannels = mVi->channels;
+ pcmParams->nSamplingRate = mVi->rate;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftVorbis::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.vorbis",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioVorbis:
+ {
+ const OMX_AUDIO_PARAM_VORBISTYPE *vorbisParams =
+ (const OMX_AUDIO_PARAM_VORBISTYPE *)params;
+
+ if (vorbisParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+bool SoftVorbis::isConfigured() const {
+ return mInputBufferCount >= 2;
+}
+
+static void makeBitReader(
+ const void *data, size_t size,
+ ogg_buffer *buf, ogg_reference *ref, oggpack_buffer *bits) {
+ buf->data = (uint8_t *)data;
+ buf->size = size;
+ buf->refcount = 1;
+ buf->ptr.owner = NULL;
+
+ ref->buffer = buf;
+ ref->begin = 0;
+ ref->length = size;
+ ref->next = NULL;
+
+ oggpack_readinit(bits, ref);
+}
+
+void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ if (portIndex == 0 && mInputBufferCount < 2) {
+ BufferInfo *info = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *header = info->mHeader;
+
+ const uint8_t *data = header->pBuffer + header->nOffset;
+ size_t size = header->nFilledLen;
+
+ ogg_buffer buf;
+ ogg_reference ref;
+ oggpack_buffer bits;
+
+ makeBitReader(
+ (const uint8_t *)data + 7, size - 7,
+ &buf, &ref, &bits);
+
+ if (mInputBufferCount == 0) {
+ CHECK(mVi == NULL);
+ mVi = new vorbis_info;
+ vorbis_info_init(mVi);
+
+ CHECK_EQ(0, _vorbis_unpack_info(mVi, &bits));
+ } else {
+ CHECK_EQ(0, _vorbis_unpack_books(mVi, &bits));
+
+ CHECK(mState == NULL);
+ mState = new vorbis_dsp_state;
+ CHECK_EQ(0, vorbis_dsp_init(mState, mVi));
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+
+ inQueue.erase(inQueue.begin());
+ info->mOwnedByUs = false;
+ notifyEmptyBufferDone(header);
+
+ ++mInputBufferCount;
+
+ return;
+ }
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ int32_t numPageSamples;
+ CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples));
+ memcpy(&numPageSamples,
+ inHeader->pBuffer
+ + inHeader->nOffset + inHeader->nFilledLen - 4,
+ sizeof(numPageSamples));
+
+ if (numPageSamples >= 0) {
+ mNumFramesLeftOnPage = numPageSamples;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
+ }
+
+ inHeader->nFilledLen -= sizeof(numPageSamples);;
+
+ ogg_buffer buf;
+ buf.data = inHeader->pBuffer + inHeader->nOffset;
+ buf.size = inHeader->nFilledLen;
+ buf.refcount = 1;
+ buf.ptr.owner = NULL;
+
+ ogg_reference ref;
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = buf.size;
+ ref.next = NULL;
+
+ ogg_packet pack;
+ pack.packet = &ref;
+ pack.bytes = ref.length;
+ pack.b_o_s = 0;
+ pack.e_o_s = 0;
+ pack.granulepos = 0;
+ pack.packetno = 0;
+
+ int numFrames = 0;
+
+ int err = vorbis_dsp_synthesis(mState, &pack, 1);
+ if (err != 0) {
+ LOGW("vorbis_dsp_synthesis returned %d", err);
+ } else {
+ numFrames = vorbis_dsp_pcmout(
+ mState, (int16_t *)outHeader->pBuffer,
+ kMaxNumSamplesPerBuffer);
+
+ if (numFrames < 0) {
+ LOGE("vorbis_dsp_pcmout returned %d", numFrames);
+ numFrames = 0;
+ }
+ }
+
+ if (mNumFramesLeftOnPage >= 0) {
+ if (numFrames > mNumFramesLeftOnPage) {
+ LOGV("discarding %d frames at end of page",
+ numFrames - mNumFramesLeftOnPage);
+ numFrames = mNumFramesLeftOnPage;
+ }
+ mNumFramesLeftOnPage -= numFrames;
+ }
+
+ outHeader->nFilledLen = numFrames * sizeof(int16_t) * mVi->channels;
+ outHeader->nOffset = 0;
+ outHeader->nFlags = 0;
+
+ outHeader->nTimeStamp =
+ mAnchorTimeUs
+ + (mNumFramesOutput * 1000000ll) / mVi->rate;
+
+ mNumFramesOutput += numFrames;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mInputBufferCount;
+ }
+}
+
+void SoftVorbis::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0 && mState != NULL) {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+
+ mNumFramesOutput = 0;
+ vorbis_dsp_restart(mState);
+ }
+}
+
+void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftVorbis(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
new file mode 100644
index 0000000..e252f55
--- /dev/null
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VORBIS_H_
+
+#define SOFT_VORBIS_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct vorbis_dsp_state;
+struct vorbis_info;
+
+namespace android {
+
+struct SoftVorbis : public SimpleSoftOMXComponent {
+ SoftVorbis(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftVorbis();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kMaxNumSamplesPerBuffer = 8192 * 2
+ };
+
+ size_t mInputBufferCount;
+
+ vorbis_dsp_state *mState;
+ vorbis_info *mVi;
+
+ int64_t mAnchorTimeUs;
+ int64_t mNumFramesOutput;
+ int32_t mNumFramesLeftOnPage;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVorbis);
+};
+
+} // namespace android
+
+#endif // SOFT_VORBIS_H_
+
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 62ba40f..702a7b4 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -9,6 +9,10 @@
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/hardware/msm7k
+ifneq ($(filter crespo crespo4g,$(TARGET_DEVICE)),)
+LOCAL_CFLAGS += -DTHIS_IS_CRESPO=1
+endif
+
LOCAL_MODULE:= libstagefright_color_conversion
include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 3b92e5d..4b72a53 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -187,8 +187,7 @@
status_t ColorConverter::convertYUV420Planar(
const BitmapParams &src, const BitmapParams &dst) {
- if (!((dst.mWidth & 1) == 0
- && (src.mCropLeft & 1) == 0
+ if (!((src.mCropLeft & 1) == 0
&& src.cropWidth() == dst.cropWidth()
&& src.cropHeight() == dst.cropHeight())) {
return ERROR_UNSUPPORTED;
@@ -196,8 +195,8 @@
uint8_t *kAdjustedClip = initClip();
- uint32_t *dst_ptr = (uint32_t *)dst.mBits
- + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+ uint16_t *dst_ptr = (uint16_t *)dst.mBits
+ + dst.mCropTop * dst.mWidth + dst.mCropLeft;
const uint8_t *src_y =
(const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
@@ -260,7 +259,11 @@
| ((kAdjustedClip[g2] >> 2) << 5)
| (kAdjustedClip[b2] >> 3);
- dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
+ if (x + 1 < src.cropWidth()) {
+ *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
+ } else {
+ dst_ptr[x] = rgb1;
+ }
}
src_y += src.mWidth;
@@ -270,7 +273,7 @@
src_v += src.mWidth / 2;
}
- dst_ptr += dst.mWidth / 2;
+ dst_ptr += dst.mWidth;
}
return OK;
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 31afc43..1828ac8 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -56,9 +56,23 @@
}
int halFormat;
+ size_t bufWidth, bufHeight;
+
switch (mColorFormat) {
+#ifndef THIS_IS_CRESPO
+ case OMX_COLOR_FormatYUV420Planar:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YV12;
+ bufWidth = (mWidth + 1) & ~1;
+ bufHeight = (mHeight + 1) & ~1;
+ break;
+ }
+#endif
+
default:
halFormat = HAL_PIXEL_FORMAT_RGB_565;
+ bufWidth = mWidth;
+ bufHeight = mHeight;
mConverter = new ColorConverter(
mColorFormat, OMX_COLOR_Format16bitRGB565);
@@ -75,15 +89,17 @@
native_window_set_usage(
mNativeWindow.get(),
GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
- | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
-
- CHECK_EQ(0, native_window_set_buffer_count(mNativeWindow.get(), 2));
+ | GRALLOC_USAGE_HW_TEXTURE
+#ifndef THIS_IS_CRESPO
+ | GRALLOC_USAGE_EXTERNAL_DISP
+#endif
+ ));
// Width must be multiple of 32???
CHECK_EQ(0, native_window_set_buffers_geometry(
mNativeWindow.get(),
- mCropRight - mCropLeft + 1,
- mCropBottom - mCropTop + 1,
+ bufWidth,
+ bufHeight,
halFormat));
uint32_t transform;
@@ -99,6 +115,14 @@
CHECK_EQ(0, native_window_set_buffers_transform(
mNativeWindow.get(), transform));
}
+
+ android_native_rect_t crop;
+ crop.left = mCropLeft;
+ crop.top = mCropTop;
+ crop.right = mCropRight + 1;
+ crop.bottom = mCropBottom + 1;
+
+ CHECK_EQ(0, native_window_set_crop(mNativeWindow.get(), &crop));
}
SoftwareRenderer::~SoftwareRenderer() {
@@ -106,9 +130,14 @@
mConverter = NULL;
}
+static int ALIGN(int x, int y) {
+ // y must be a power of 2.
+ return (x + y - 1) & ~(y - 1);
+}
+
void SoftwareRenderer::render(
const void *data, size_t size, void *platformPrivate) {
- android_native_buffer_t *buf;
+ ANativeWindowBuffer *buf;
int err;
if ((err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf)) != 0) {
LOGW("Surface::dequeueBuffer returned error %d", err);
@@ -129,14 +158,40 @@
mConverter->convert(
data,
mWidth, mHeight,
- mCropLeft, mCropTop, mCropRight, mCropBottom,
+ 0, 0, mWidth - 1, mHeight - 1,
dst,
buf->stride, buf->height,
- 0, 0,
- mCropRight - mCropLeft,
- mCropBottom - mCropTop);
+ 0, 0, mWidth - 1, mHeight - 1);
} else {
- TRESPASS();
+ CHECK_EQ(mColorFormat, OMX_COLOR_FormatYUV420Planar);
+
+ const uint8_t *src_y = (const uint8_t *)data;
+ const uint8_t *src_u = (const uint8_t *)data + mWidth * mHeight;
+ const uint8_t *src_v = src_u + (mWidth / 2 * mHeight / 2);
+
+ uint8_t *dst_y = (uint8_t *)dst;
+ size_t dst_y_size = buf->stride * buf->height;
+ size_t dst_c_stride = ALIGN(buf->stride / 2, 16);
+ size_t dst_c_size = dst_c_stride * buf->height / 2;
+ uint8_t *dst_v = dst_y + dst_y_size;
+ uint8_t *dst_u = dst_v + dst_c_size;
+
+ for (int y = 0; y < mHeight; ++y) {
+ memcpy(dst_y, src_y, mWidth);
+
+ src_y += mWidth;
+ dst_y += buf->stride;
+ }
+
+ for (int y = 0; y < (mHeight + 1) / 2; ++y) {
+ memcpy(dst_u, src_u, (mWidth + 1) / 2);
+ memcpy(dst_v, src_v, (mWidth + 1) / 2);
+
+ src_u += mWidth / 2;
+ src_v += mWidth / 2;
+ dst_u += dst_c_stride;
+ dst_v += dst_c_stride;
+ }
}
CHECK_EQ(0, mapper.unlock(buf->handle));
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 2c17d92..3c9a121 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -44,6 +44,9 @@
class DrmManagerClinet;
class DecryptHandle;
+class TimedTextPlayer;
+struct WVMExtractor;
+
struct AwesomeRenderer : public RefBase {
AwesomeRenderer() {}
@@ -99,36 +102,41 @@
void postAudioEOS(int64_t delayUs = 0ll);
void postAudioSeekComplete();
+ status_t setTimedTextTrackIndex(int32_t index);
+
private:
friend struct AwesomeEvent;
friend struct PreviewPlayer;
enum {
- PLAYING = 1,
- LOOPING = 2,
- FIRST_FRAME = 4,
- PREPARING = 8,
- PREPARED = 16,
- AT_EOS = 32,
- PREPARE_CANCELLED = 64,
- CACHE_UNDERRUN = 128,
- AUDIO_AT_EOS = 256,
- VIDEO_AT_EOS = 512,
- AUTO_LOOPING = 1024,
+ PLAYING = 0x01,
+ LOOPING = 0x02,
+ FIRST_FRAME = 0x04,
+ PREPARING = 0x08,
+ PREPARED = 0x10,
+ AT_EOS = 0x20,
+ PREPARE_CANCELLED = 0x40,
+ CACHE_UNDERRUN = 0x80,
+ AUDIO_AT_EOS = 0x0100,
+ VIDEO_AT_EOS = 0x0200,
+ AUTO_LOOPING = 0x0400,
// We are basically done preparing but are currently buffering
// sufficient data to begin playback and finish the preparation phase
// for good.
- PREPARING_CONNECTED = 2048,
+ PREPARING_CONNECTED = 0x0800,
// We're triggering a single video event to display the first frame
// after the seekpoint.
- SEEK_PREVIEW = 4096,
+ SEEK_PREVIEW = 0x1000,
- AUDIO_RUNNING = 8192,
- AUDIOPLAYER_STARTED = 16384,
+ AUDIO_RUNNING = 0x2000,
+ AUDIOPLAYER_STARTED = 0x4000,
- INCOGNITO = 32768,
+ INCOGNITO = 0x8000,
+
+ TEXT_RUNNING = 0x10000,
+ TEXTPLAYER_STARTED = 0x20000,
};
mutable Mutex mLock;
@@ -222,6 +230,9 @@
sp<DecryptHandle> mDecryptHandle;
int64_t mLastVideoTimeUs;
+ TimedTextPlayer *mTextPlayer;
+
+ sp<WVMExtractor> mWVMExtractor;
status_t setDataSource_l(
const char *uri,
@@ -244,6 +255,8 @@
void setVideoSource(sp<MediaSource> source);
status_t initVideoDecoder(uint32_t flags = 0);
+ void addTextSource(sp<MediaSource> source);
+
void onStreamDone();
void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0);
@@ -271,10 +284,13 @@
void ensureCacheIsFetching_l();
status_t startAudioPlayer_l();
+ void postAudioSeekComplete_l();
void shutdownVideoDecoder_l();
void setNativeWindow_l(const sp<ANativeWindow> &native);
+ bool isStreamingHTTP() const;
+
AwesomePlayer(const AwesomePlayer &);
AwesomePlayer &operator=(const AwesomePlayer &);
};
diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/include/SimpleSoftOMXComponent.h
new file mode 100644
index 0000000..2a29a7d
--- /dev/null
+++ b/media/libstagefright/include/SimpleSoftOMXComponent.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_SOFT_OMX_COMPONENT_H_
+
+#define SIMPLE_SOFT_OMX_COMPONENT_H_
+
+#include "SoftOMXComponent.h"
+
+#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ALooper;
+
+struct SimpleSoftOMXComponent : public SoftOMXComponent {
+ SimpleSoftOMXComponent(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+ virtual ~SimpleSoftOMXComponent();
+
+ void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+ struct BufferInfo {
+ OMX_BUFFERHEADERTYPE *mHeader;
+ bool mOwnedByUs;
+ };
+
+ struct PortInfo {
+ OMX_PARAM_PORTDEFINITIONTYPE mDef;
+ Vector<BufferInfo> mBuffers;
+ List<BufferInfo *> mQueue;
+
+ enum {
+ NONE,
+ DISABLING,
+ ENABLING,
+ } mTransition;
+ };
+
+ void addPort(const OMX_PARAM_PORTDEFINITIONTYPE &def);
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ List<BufferInfo *> &getPortQueue(OMX_U32 portIndex);
+
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+
+ PortInfo *editPortInfo(OMX_U32 portIndex);
+
+private:
+ enum {
+ kWhatSendCommand,
+ kWhatEmptyThisBuffer,
+ kWhatFillThisBuffer,
+ };
+
+ Mutex mLock;
+
+ sp<ALooper> mLooper;
+ sp<AHandlerReflector<SimpleSoftOMXComponent> > mHandler;
+
+ OMX_STATETYPE mState;
+ OMX_STATETYPE mTargetState;
+
+ Vector<PortInfo> mPorts;
+
+ bool isSetParameterAllowed(
+ OMX_INDEXTYPE index, const OMX_PTR params) const;
+
+ virtual OMX_ERRORTYPE sendCommand(
+ OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data);
+
+ virtual OMX_ERRORTYPE getParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE setParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual OMX_ERRORTYPE useBuffer(
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size,
+ OMX_U8 *ptr);
+
+ virtual OMX_ERRORTYPE allocateBuffer(
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size);
+
+ virtual OMX_ERRORTYPE freeBuffer(
+ OMX_U32 portIndex,
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ virtual OMX_ERRORTYPE emptyThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ virtual OMX_ERRORTYPE fillThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ virtual OMX_ERRORTYPE getState(OMX_STATETYPE *state);
+
+ void onSendCommand(OMX_COMMANDTYPE cmd, OMX_U32 param);
+ void onChangeState(OMX_STATETYPE state);
+ void onPortEnable(OMX_U32 portIndex, bool enable);
+ void onPortFlush(OMX_U32 portIndex, bool sendFlushComplete);
+
+ void checkTransitions();
+
+ DISALLOW_EVIL_CONSTRUCTORS(SimpleSoftOMXComponent);
+};
+
+} // namespace android
+
+#endif // SIMPLE_SOFT_OMX_COMPONENT_H_
diff --git a/media/libstagefright/include/SoftOMXComponent.h b/media/libstagefright/include/SoftOMXComponent.h
new file mode 100644
index 0000000..053bc22
--- /dev/null
+++ b/media/libstagefright/include/SoftOMXComponent.h
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_OMX_COMPONENT_H_
+
+#define SOFT_OMX_COMPONENT_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AString.h>
+#include <utils/RefBase.h>
+
+#include <OMX_Component.h>
+
+namespace android {
+
+struct SoftOMXComponent : public RefBase {
+ SoftOMXComponent(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+ virtual OMX_ERRORTYPE initCheck() const;
+
+ void setLibHandle(void *libHandle);
+ void *libHandle() const;
+
+protected:
+ virtual ~SoftOMXComponent();
+
+ const char *name() const;
+
+ void notify(
+ OMX_EVENTTYPE event,
+ OMX_U32 data1, OMX_U32 data2, OMX_PTR data);
+
+ void notifyEmptyBufferDone(OMX_BUFFERHEADERTYPE *header);
+ void notifyFillBufferDone(OMX_BUFFERHEADERTYPE *header);
+
+ virtual OMX_ERRORTYPE sendCommand(
+ OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data);
+
+ virtual OMX_ERRORTYPE getParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE setParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual OMX_ERRORTYPE getConfig(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual OMX_ERRORTYPE getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index);
+
+ virtual OMX_ERRORTYPE useBuffer(
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size,
+ OMX_U8 *ptr);
+
+ virtual OMX_ERRORTYPE allocateBuffer(
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size);
+
+ virtual OMX_ERRORTYPE freeBuffer(
+ OMX_U32 portIndex,
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ virtual OMX_ERRORTYPE emptyThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ virtual OMX_ERRORTYPE fillThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ virtual OMX_ERRORTYPE getState(OMX_STATETYPE *state);
+
+private:
+ AString mName;
+ const OMX_CALLBACKTYPE *mCallbacks;
+ OMX_COMPONENTTYPE *mComponent;
+
+ void *mLibHandle;
+
+ static OMX_ERRORTYPE SendCommandWrapper(
+ OMX_HANDLETYPE component,
+ OMX_COMMANDTYPE cmd,
+ OMX_U32 param,
+ OMX_PTR data);
+
+ static OMX_ERRORTYPE GetParameterWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params);
+
+ static OMX_ERRORTYPE SetParameterWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params);
+
+ static OMX_ERRORTYPE GetConfigWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params);
+
+ static OMX_ERRORTYPE SetConfigWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params);
+
+ static OMX_ERRORTYPE GetExtensionIndexWrapper(
+ OMX_HANDLETYPE component,
+ OMX_STRING name,
+ OMX_INDEXTYPE *index);
+
+ static OMX_ERRORTYPE UseBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size,
+ OMX_U8 *ptr);
+
+ static OMX_ERRORTYPE AllocateBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size);
+
+ static OMX_ERRORTYPE FreeBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_U32 portIndex,
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ static OMX_ERRORTYPE EmptyThisBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ static OMX_ERRORTYPE FillThisBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE *buffer);
+
+ static OMX_ERRORTYPE GetStateWrapper(
+ OMX_HANDLETYPE component,
+ OMX_STATETYPE *state);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftOMXComponent);
+};
+
+} // namespace android
+
+#endif // SOFT_OMX_COMPONENT_H_
diff --git a/media/libstagefright/include/TimedTextPlayer.h b/media/libstagefright/include/TimedTextPlayer.h
new file mode 100644
index 0000000..ac41b4f
--- /dev/null
+++ b/media/libstagefright/include/TimedTextPlayer.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TIMEDTEXT_PLAYER_H_
+
+#define TIMEDTEXT_PLAYER_H_
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include "include/TimedEventQueue.h"
+
+namespace android {
+
+class MediaSource;
+class AwesomePlayer;
+class MediaBuffer;
+
+class TimedTextPlayer {
+public:
+ TimedTextPlayer(AwesomePlayer *observer,
+ const wp<MediaPlayerBase> &listener,
+ TimedEventQueue *queue);
+
+ virtual ~TimedTextPlayer();
+
+ // index: the index of the text track which will
+ // be turned on
+ status_t start(uint8_t index);
+
+ void pause();
+
+ void resume();
+
+ status_t seekTo(int64_t time_us);
+
+ void addTextSource(sp<MediaSource> source);
+
+ status_t setTimedTextTrackIndex(int32_t index);
+
+private:
+ Mutex mLock;
+
+ sp<MediaSource> mSource;
+
+ bool mSeeking;
+ int64_t mSeekTimeUs;
+
+ bool mStarted;
+
+ sp<TimedEventQueue::Event> mTextEvent;
+ bool mTextEventPending;
+
+ TimedEventQueue *mQueue;
+
+ wp<MediaPlayerBase> mListener;
+ AwesomePlayer *mObserver;
+
+ MediaBuffer *mTextBuffer;
+ Parcel mData;
+
+ Vector<sp<MediaSource> > mTextTrackVector;
+
+ void reset();
+
+ void onTextEvent();
+ void postTextEvent(int64_t delayUs = -1);
+ void cancelTextEvent();
+
+ void notifyListener(
+ int msg, const void *data = NULL, size_t size = 0);
+
+ DISALLOW_EVIL_CONSTRUCTORS(TimedTextPlayer);
+};
+
+} // namespace android
+
+#endif // TIMEDTEXT_PLAYER_H_
diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h
index 0da45a8..62e5aa5 100644
--- a/media/libstagefright/include/WVMExtractor.h
+++ b/media/libstagefright/include/WVMExtractor.h
@@ -19,6 +19,7 @@
#define WVM_EXTRACTOR_H_
#include <media/stagefright/MediaExtractor.h>
+#include <utils/Errors.h>
namespace android {
@@ -33,12 +34,31 @@
virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
virtual sp<MetaData> getMetaData();
+ // Return the amount of data cached from the current
+ // playback positiion (in us).
+ // While more data is still being fetched *finalStatus == OK,
+ // Once fetching is completed (no more data available), *finalStatus != OK
+ // If fetching completed normally (i.e. reached EOS instead of IO error)
+ // *finalStatus == ERROR_END_OF_STREAM
+ int64_t getCachedDurationUs(status_t *finalStatus);
+
+ // Set to use adaptive streaming mode by the WV component.
+ // If adaptive == true, adaptive streaming mode will be used.
+ // Default mode is non-adaptive streaming mode.
+ // Should set to use adaptive streaming mode only if widevine:// protocol
+ // is used.
+ void setAdaptiveStreamingMode(bool adaptive);
+
+ // Retrieve the adaptive streaming mode used by the WV component.
+ bool getAdaptiveStreamingMode() const;
+
protected:
virtual ~WVMExtractor();
private:
sp<DataSource> mDataSource;
sp<MediaExtractor> mImpl;
+ bool mUseAdaptiveStreaming;
WVMExtractor(const WVMExtractor &);
WVMExtractor &operator=(const WVMExtractor &);
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 64266b8..e1b9991 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -591,7 +591,8 @@
// AudioSpecificInfo (with size prefix) follows
};
- CHECK(asiSize < 128);
+ // Make sure all sizes can be coded in a single byte.
+ CHECK(asiSize + 22 - 2 < 128);
size_t esdsSize = sizeof(kStaticESDS) + asiSize + 1;
uint8_t *esds = new uint8_t[esdsSize];
memcpy(esds, kStaticESDS, sizeof(kStaticESDS));
@@ -599,6 +600,11 @@
*ptr++ = asiSize;
memcpy(ptr, asi, asiSize);
+ // Increment by codecPrivateSize less 2 bytes that are accounted for
+ // already in lengths of 22/17
+ esds[1] += asiSize - 2;
+ esds[6] += asiSize - 2;
+
meta->setData(kKeyESDS, 0, esds, esdsSize);
delete[] esds;
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index 6e069c8..08ad6f3 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -1,41 +1,28 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
-ifneq ($(BUILD_WITHOUT_PV),true)
-# Set up the OpenCore variables.
-include external/opencore/Config.mk
-LOCAL_C_INCLUDES := $(PV_INCLUDES)
-LOCAL_CFLAGS := $(PV_CFLAGS_MINUS_VISIBILITY)
-endif
-
LOCAL_C_INCLUDES += $(JNI_H_INCLUDE)
LOCAL_SRC_FILES:= \
- OMX.cpp \
+ OMX.cpp \
OMXComponentBase.cpp \
+ OMXMaster.cpp \
OMXNodeInstance.cpp \
- OMXMaster.cpp
+ SimpleSoftOMXComponent.cpp \
+ SoftOMXComponent.cpp \
+ SoftOMXPlugin.cpp \
-ifneq ($(BUILD_WITHOUT_PV),true)
-LOCAL_SRC_FILES += \
- OMXPVCodecsPlugin.cpp
-else
-LOCAL_CFLAGS += -DNO_OPENCORE
-endif
+LOCAL_C_INCLUDES += \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/base/include/media/stagefright/openmax
-LOCAL_C_INCLUDES += $(TOP)/frameworks/base/include/media/stagefright/openmax
-
-LOCAL_SHARED_LIBRARIES := \
- libbinder \
- libmedia \
- libutils \
- libui \
- libcutils \
-
-ifneq ($(BUILD_WITHOUT_PV),true)
-LOCAL_SHARED_LIBRARIES += \
- libopencore_common
-endif
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libmedia \
+ libutils \
+ libui \
+ libcutils \
+ libstagefright_foundation \
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
LOCAL_LDLIBS += -lpthread -ldl
@@ -49,5 +36,6 @@
include $(BUILD_SHARED_LIBRARY)
-include $(call all-makefiles-under,$(LOCAL_PATH))
+################################################################################
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index 56b169a..545e6d4 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -20,23 +20,18 @@
#include "OMXMaster.h"
+#include "SoftOMXPlugin.h"
+
#include <dlfcn.h>
#include <media/stagefright/MediaDebug.h>
-#ifndef NO_OPENCORE
-#include "OMXPVCodecsPlugin.h"
-#endif
-
namespace android {
OMXMaster::OMXMaster()
: mVendorLibHandle(NULL) {
addVendorPlugin();
-
-#ifndef NO_OPENCORE
- addPlugin(new OMXPVCodecsPlugin);
-#endif
+ addPlugin(new SoftOMXPlugin);
}
OMXMaster::~OMXMaster() {
@@ -49,7 +44,11 @@
}
void OMXMaster::addVendorPlugin() {
- mVendorLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
+ addPlugin("libstagefrighthw.so");
+}
+
+void OMXMaster::addPlugin(const char *libname) {
+ mVendorLibHandle = dlopen(libname, RTLD_NOW);
if (mVendorLibHandle == NULL) {
return;
diff --git a/media/libstagefright/omx/OMXMaster.h b/media/libstagefright/omx/OMXMaster.h
index 7ba8d18..feee1f9 100644
--- a/media/libstagefright/omx/OMXMaster.h
+++ b/media/libstagefright/omx/OMXMaster.h
@@ -58,6 +58,7 @@
void *mVendorLibHandle;
void addVendorPlugin();
+ void addPlugin(const char *libname);
void addPlugin(OMXPluginBase *plugin);
void clearPlugins();
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index cdce772..8462988 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -234,6 +234,7 @@
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params);
+
return StatusFromOMXError(err);
}
diff --git a/media/libstagefright/omx/OMXPVCodecsPlugin.cpp b/media/libstagefright/omx/OMXPVCodecsPlugin.cpp
deleted file mode 100644
index d1f5be3..0000000
--- a/media/libstagefright/omx/OMXPVCodecsPlugin.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "OMXPVCodecsPlugin.h"
-
-#include "pv_omxcore.h"
-
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-OMXPVCodecsPlugin::OMXPVCodecsPlugin() {
- OMX_MasterInit();
-}
-
-OMXPVCodecsPlugin::~OMXPVCodecsPlugin() {
- OMX_MasterDeinit();
-}
-
-OMX_ERRORTYPE OMXPVCodecsPlugin::makeComponentInstance(
- const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component) {
- return OMX_MasterGetHandle(
- reinterpret_cast<OMX_HANDLETYPE *>(component),
- const_cast<char *>(name),
- appData,
- const_cast<OMX_CALLBACKTYPE *>(callbacks));
-}
-
-OMX_ERRORTYPE OMXPVCodecsPlugin::destroyComponentInstance(
- OMX_COMPONENTTYPE *component) {
- return OMX_MasterFreeHandle(component);
-}
-
-OMX_ERRORTYPE OMXPVCodecsPlugin::enumerateComponents(
- OMX_STRING name,
- size_t size,
- OMX_U32 index) {
- return OMX_MasterComponentNameEnum(name, size, index);
-}
-
-OMX_ERRORTYPE OMXPVCodecsPlugin::getRolesOfComponent(
- const char *name,
- Vector<String8> *roles) {
- roles->clear();
-
- OMX_U32 numRoles;
- OMX_ERRORTYPE err =
- OMX_MasterGetRolesOfComponent(
- const_cast<char *>(name),
- &numRoles,
- NULL);
-
- if (err != OMX_ErrorNone) {
- return err;
- }
-
- if (numRoles > 0) {
- OMX_U8 **array = new OMX_U8 *[numRoles];
- for (OMX_U32 i = 0; i < numRoles; ++i) {
- array[i] = new OMX_U8[OMX_MAX_STRINGNAME_SIZE];
- }
-
- OMX_U32 numRoles2;
- err = OMX_MasterGetRolesOfComponent(
- const_cast<char *>(name), &numRoles2, array);
-
- CHECK_EQ(err, OMX_ErrorNone);
- CHECK_EQ(numRoles, numRoles2);
-
- for (OMX_U32 i = 0; i < numRoles; ++i) {
- String8 s((const char *)array[i]);
- roles->push(s);
-
- delete[] array[i];
- array[i] = NULL;
- }
-
- delete[] array;
- array = NULL;
- }
-
- return OMX_ErrorNone;
-}
-
-} // namespace android
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
new file mode 100644
index 0000000..179b2a0
--- /dev/null
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -0,0 +1,640 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleSoftOMXComponent"
+#include <utils/Log.h>
+
+#include "include/SimpleSoftOMXComponent.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+SimpleSoftOMXComponent::SimpleSoftOMXComponent(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftOMXComponent(name, callbacks, appData, component),
+ mLooper(new ALooper),
+ mHandler(new AHandlerReflector<SimpleSoftOMXComponent>(this)),
+ mState(OMX_StateLoaded),
+ mTargetState(OMX_StateLoaded) {
+ mLooper->setName(name);
+ mLooper->registerHandler(mHandler);
+
+ mLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+}
+
+SimpleSoftOMXComponent::~SimpleSoftOMXComponent() {
+ mLooper->unregisterHandler(mHandler->id());
+ mLooper->stop();
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::sendCommand(
+ OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data) {
+ CHECK(data == NULL);
+
+ sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler->id());
+ msg->setInt32("cmd", cmd);
+ msg->setInt32("param", param);
+ msg->post();
+
+ return OMX_ErrorNone;
+}
+
+bool SimpleSoftOMXComponent::isSetParameterAllowed(
+ OMX_INDEXTYPE index, const OMX_PTR params) const {
+ if (mState == OMX_StateLoaded) {
+ return true;
+ }
+
+ OMX_U32 portIndex;
+
+ switch (index) {
+ case OMX_IndexParamPortDefinition:
+ {
+ portIndex = ((OMX_PARAM_PORTDEFINITIONTYPE *)params)->nPortIndex;
+ break;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ portIndex = ((OMX_AUDIO_PARAM_PCMMODETYPE *)params)->nPortIndex;
+ break;
+ }
+
+ case OMX_IndexParamAudioAac:
+ {
+ portIndex = ((OMX_AUDIO_PARAM_AACPROFILETYPE *)params)->nPortIndex;
+ break;
+ }
+
+ default:
+ return false;
+ }
+
+ CHECK(portIndex < mPorts.size());
+
+ return !mPorts.itemAt(portIndex).mDef.bEnabled;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::getParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ Mutex::Autolock autoLock(mLock);
+ return internalGetParameter(index, params);
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::setParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK(isSetParameterAllowed(index, params));
+
+ return internalSetParameter(index, params);
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamPortDefinition:
+ {
+ OMX_PARAM_PORTDEFINITIONTYPE *defParams =
+ (OMX_PARAM_PORTDEFINITIONTYPE *)params;
+
+ if (defParams->nPortIndex >= mPorts.size()
+ || defParams->nSize
+ != sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
+ return OMX_ErrorUndefined;
+ }
+
+ const PortInfo *port =
+ &mPorts.itemAt(defParams->nPortIndex);
+
+ memcpy(defParams, &port->mDef, sizeof(port->mDef));
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return OMX_ErrorUnsupportedIndex;
+ }
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamPortDefinition:
+ {
+ OMX_PARAM_PORTDEFINITIONTYPE *defParams =
+ (OMX_PARAM_PORTDEFINITIONTYPE *)params;
+
+ if (defParams->nPortIndex >= mPorts.size()
+ || defParams->nSize
+ != sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
+ return OMX_ErrorUndefined;
+ }
+
+ PortInfo *port =
+ &mPorts.editItemAt(defParams->nPortIndex);
+
+ if (defParams->nBufferSize != port->mDef.nBufferSize) {
+ CHECK_GE(defParams->nBufferSize, port->mDef.nBufferSize);
+ port->mDef.nBufferSize = defParams->nBufferSize;
+ }
+
+ if (defParams->nBufferCountActual
+ != port->mDef.nBufferCountActual) {
+ CHECK_GE(defParams->nBufferCountActual,
+ port->mDef.nBufferCountMin);
+
+ port->mDef.nBufferCountActual = defParams->nBufferCountActual;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return OMX_ErrorUnsupportedIndex;
+ }
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::useBuffer(
+ OMX_BUFFERHEADERTYPE **header,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size,
+ OMX_U8 *ptr) {
+ Mutex::Autolock autoLock(mLock);
+ CHECK_LT(portIndex, mPorts.size());
+
+ *header = new OMX_BUFFERHEADERTYPE;
+ (*header)->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ (*header)->nVersion.s.nVersionMajor = 1;
+ (*header)->nVersion.s.nVersionMinor = 0;
+ (*header)->nVersion.s.nRevision = 0;
+ (*header)->nVersion.s.nStep = 0;
+ (*header)->pBuffer = ptr;
+ (*header)->nAllocLen = size;
+ (*header)->nFilledLen = 0;
+ (*header)->nOffset = 0;
+ (*header)->pAppPrivate = appPrivate;
+ (*header)->pPlatformPrivate = NULL;
+ (*header)->pInputPortPrivate = NULL;
+ (*header)->pOutputPortPrivate = NULL;
+ (*header)->hMarkTargetComponent = NULL;
+ (*header)->pMarkData = NULL;
+ (*header)->nTickCount = 0;
+ (*header)->nTimeStamp = 0;
+ (*header)->nFlags = 0;
+ (*header)->nOutputPortIndex = portIndex;
+ (*header)->nInputPortIndex = portIndex;
+
+ PortInfo *port = &mPorts.editItemAt(portIndex);
+
+ CHECK(mState == OMX_StateLoaded || port->mDef.bEnabled == OMX_FALSE);
+
+ CHECK_LT(port->mBuffers.size(), port->mDef.nBufferCountActual);
+
+ port->mBuffers.push();
+
+ BufferInfo *buffer =
+ &port->mBuffers.editItemAt(port->mBuffers.size() - 1);
+
+ buffer->mHeader = *header;
+ buffer->mOwnedByUs = false;
+
+ if (port->mBuffers.size() == port->mDef.nBufferCountActual) {
+ port->mDef.bPopulated = OMX_TRUE;
+ checkTransitions();
+ }
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::allocateBuffer(
+ OMX_BUFFERHEADERTYPE **header,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size) {
+ OMX_U8 *ptr = new OMX_U8[size];
+
+ OMX_ERRORTYPE err =
+ useBuffer(header, portIndex, appPrivate, size, ptr);
+
+ if (err != OMX_ErrorNone) {
+ delete[] ptr;
+ ptr = NULL;
+
+ return err;
+ }
+
+ CHECK((*header)->pPlatformPrivate == NULL);
+ (*header)->pPlatformPrivate = ptr;
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::freeBuffer(
+ OMX_U32 portIndex,
+ OMX_BUFFERHEADERTYPE *header) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK_LT(portIndex, mPorts.size());
+
+ PortInfo *port = &mPorts.editItemAt(portIndex);
+
+#if 0 // XXX
+ CHECK((mState == OMX_StateIdle && mTargetState == OMX_StateLoaded)
+ || port->mDef.bEnabled == OMX_FALSE);
+#endif
+
+ bool found = false;
+ for (size_t i = 0; i < port->mBuffers.size(); ++i) {
+ BufferInfo *buffer = &port->mBuffers.editItemAt(i);
+
+ if (buffer->mHeader == header) {
+ CHECK(!buffer->mOwnedByUs);
+
+ if (header->pPlatformPrivate != NULL) {
+ // This buffer's data was allocated by us.
+ CHECK(header->pPlatformPrivate == header->pBuffer);
+
+ delete[] header->pBuffer;
+ header->pBuffer = NULL;
+ }
+
+ delete header;
+ header = NULL;
+
+ port->mBuffers.removeAt(i);
+ port->mDef.bPopulated = OMX_FALSE;
+
+ checkTransitions();
+
+ found = true;
+ break;
+ }
+ }
+
+ CHECK(found);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer) {
+ sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler->id());
+ msg->setPointer("header", buffer);
+ msg->post();
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::fillThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer) {
+ sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler->id());
+ msg->setPointer("header", buffer);
+ msg->post();
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::getState(OMX_STATETYPE *state) {
+ Mutex::Autolock autoLock(mLock);
+
+ *state = mState;
+
+ return OMX_ErrorNone;
+}
+
+void SimpleSoftOMXComponent::onMessageReceived(const sp<AMessage> &msg) {
+ Mutex::Autolock autoLock(mLock);
+
+ switch (msg->what()) {
+ case kWhatSendCommand:
+ {
+ int32_t cmd, param;
+ CHECK(msg->findInt32("cmd", &cmd));
+ CHECK(msg->findInt32("param", ¶m));
+
+ onSendCommand((OMX_COMMANDTYPE)cmd, (OMX_U32)param);
+ break;
+ }
+
+ case kWhatEmptyThisBuffer:
+ case kWhatFillThisBuffer:
+ {
+ OMX_BUFFERHEADERTYPE *header;
+ CHECK(msg->findPointer("header", (void **)&header));
+
+ CHECK(mState == OMX_StateExecuting && mTargetState == mState);
+
+ bool found = false;
+ for (size_t i = 0; i < mPorts.size(); ++i) {
+ PortInfo *port = &mPorts.editItemAt(i);
+
+ for (size_t j = 0; j < port->mBuffers.size(); ++j) {
+ BufferInfo *buffer = &port->mBuffers.editItemAt(j);
+
+ if (buffer->mHeader == header) {
+ CHECK(!buffer->mOwnedByUs);
+
+ buffer->mOwnedByUs = true;
+
+ CHECK((msg->what() == kWhatEmptyThisBuffer
+ && port->mDef.eDir == OMX_DirInput)
+ || (port->mDef.eDir == OMX_DirOutput));
+
+ port->mQueue.push_back(buffer);
+ onQueueFilled(i);
+
+ found = true;
+ break;
+ }
+ }
+ }
+
+ CHECK(found);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+void SimpleSoftOMXComponent::onSendCommand(
+ OMX_COMMANDTYPE cmd, OMX_U32 param) {
+ switch (cmd) {
+ case OMX_CommandStateSet:
+ {
+ onChangeState((OMX_STATETYPE)param);
+ break;
+ }
+
+ case OMX_CommandPortEnable:
+ case OMX_CommandPortDisable:
+ {
+ onPortEnable(param, cmd == OMX_CommandPortEnable);
+ break;
+ }
+
+ case OMX_CommandFlush:
+ {
+ onPortFlush(param, true /* sendFlushComplete */);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+void SimpleSoftOMXComponent::onChangeState(OMX_STATETYPE state) {
+ // We shouldn't be in a state transition already.
+ CHECK_EQ((int)mState, (int)mTargetState);
+
+ switch (mState) {
+ case OMX_StateLoaded:
+ CHECK_EQ((int)state, (int)OMX_StateIdle);
+ break;
+ case OMX_StateIdle:
+ CHECK(state == OMX_StateLoaded || state == OMX_StateExecuting);
+ break;
+ case OMX_StateExecuting:
+ {
+ CHECK_EQ((int)state, (int)OMX_StateIdle);
+
+ for (size_t i = 0; i < mPorts.size(); ++i) {
+ onPortFlush(i, false /* sendFlushComplete */);
+ }
+
+ mState = OMX_StateIdle;
+ notify(OMX_EventCmdComplete, OMX_CommandStateSet, state, NULL);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+
+ mTargetState = state;
+
+ checkTransitions();
+}
+
+void SimpleSoftOMXComponent::onPortEnable(OMX_U32 portIndex, bool enable) {
+ CHECK_LT(portIndex, mPorts.size());
+
+ PortInfo *port = &mPorts.editItemAt(portIndex);
+ CHECK_EQ((int)port->mTransition, (int)PortInfo::NONE);
+ CHECK(port->mDef.bEnabled == !enable);
+
+ if (!enable) {
+ port->mDef.bEnabled = OMX_FALSE;
+ port->mTransition = PortInfo::DISABLING;
+
+ for (size_t i = 0; i < port->mBuffers.size(); ++i) {
+ BufferInfo *buffer = &port->mBuffers.editItemAt(i);
+
+ if (buffer->mOwnedByUs) {
+ buffer->mOwnedByUs = false;
+
+ if (port->mDef.eDir == OMX_DirInput) {
+ notifyEmptyBufferDone(buffer->mHeader);
+ } else {
+ CHECK_EQ(port->mDef.eDir, OMX_DirOutput);
+ notifyFillBufferDone(buffer->mHeader);
+ }
+ }
+ }
+
+ port->mQueue.clear();
+ } else {
+ port->mTransition = PortInfo::ENABLING;
+ }
+
+ checkTransitions();
+}
+
+void SimpleSoftOMXComponent::onPortFlush(
+ OMX_U32 portIndex, bool sendFlushComplete) {
+ if (portIndex == OMX_ALL) {
+ for (size_t i = 0; i < mPorts.size(); ++i) {
+ onPortFlush(i, sendFlushComplete);
+ }
+
+ if (sendFlushComplete) {
+ notify(OMX_EventCmdComplete, OMX_CommandFlush, OMX_ALL, NULL);
+ }
+
+ return;
+ }
+
+ CHECK_LT(portIndex, mPorts.size());
+
+ PortInfo *port = &mPorts.editItemAt(portIndex);
+ CHECK_EQ((int)port->mTransition, (int)PortInfo::NONE);
+
+ for (size_t i = 0; i < port->mBuffers.size(); ++i) {
+ BufferInfo *buffer = &port->mBuffers.editItemAt(i);
+
+ if (!buffer->mOwnedByUs) {
+ continue;
+ }
+
+ buffer->mHeader->nFilledLen = 0;
+ buffer->mHeader->nOffset = 0;
+ buffer->mHeader->nFlags = 0;
+
+ buffer->mOwnedByUs = false;
+
+ if (port->mDef.eDir == OMX_DirInput) {
+ notifyEmptyBufferDone(buffer->mHeader);
+ } else {
+ CHECK_EQ(port->mDef.eDir, OMX_DirOutput);
+
+ notifyFillBufferDone(buffer->mHeader);
+ }
+ }
+
+ port->mQueue.clear();
+
+ if (sendFlushComplete) {
+ notify(OMX_EventCmdComplete, OMX_CommandFlush, portIndex, NULL);
+
+ onPortFlushCompleted(portIndex);
+ }
+}
+
+void SimpleSoftOMXComponent::checkTransitions() {
+ if (mState != mTargetState) {
+ bool transitionComplete = true;
+
+ if (mState == OMX_StateLoaded) {
+ CHECK_EQ((int)mTargetState, (int)OMX_StateIdle);
+
+ for (size_t i = 0; i < mPorts.size(); ++i) {
+ const PortInfo &port = mPorts.itemAt(i);
+ if (port.mDef.bEnabled == OMX_FALSE) {
+ continue;
+ }
+
+ if (port.mDef.bPopulated == OMX_FALSE) {
+ transitionComplete = false;
+ break;
+ }
+ }
+ } else if (mTargetState == OMX_StateLoaded) {
+ CHECK_EQ((int)mState, (int)OMX_StateIdle);
+
+ for (size_t i = 0; i < mPorts.size(); ++i) {
+ const PortInfo &port = mPorts.itemAt(i);
+ if (port.mDef.bEnabled == OMX_FALSE) {
+ continue;
+ }
+
+ size_t n = port.mBuffers.size();
+
+ if (n > 0) {
+ CHECK_LE(n, port.mDef.nBufferCountActual);
+
+ if (n == port.mDef.nBufferCountActual) {
+ CHECK_EQ((int)port.mDef.bPopulated, (int)OMX_TRUE);
+ } else {
+ CHECK_EQ((int)port.mDef.bPopulated, (int)OMX_FALSE);
+ }
+
+ transitionComplete = false;
+ break;
+ }
+ }
+ }
+
+ if (transitionComplete) {
+ mState = mTargetState;
+
+ notify(OMX_EventCmdComplete, OMX_CommandStateSet, mState, NULL);
+ }
+ }
+
+ for (size_t i = 0; i < mPorts.size(); ++i) {
+ PortInfo *port = &mPorts.editItemAt(i);
+
+ if (port->mTransition == PortInfo::DISABLING) {
+ if (port->mBuffers.empty()) {
+ LOGV("Port %d now disabled.", i);
+
+ port->mTransition = PortInfo::NONE;
+ notify(OMX_EventCmdComplete, OMX_CommandPortDisable, i, NULL);
+
+ onPortEnableCompleted(i, false /* enabled */);
+ }
+ } else if (port->mTransition == PortInfo::ENABLING) {
+ if (port->mDef.bPopulated == OMX_TRUE) {
+ LOGV("Port %d now enabled.", i);
+
+ port->mTransition = PortInfo::NONE;
+ port->mDef.bEnabled = OMX_TRUE;
+ notify(OMX_EventCmdComplete, OMX_CommandPortEnable, i, NULL);
+
+ onPortEnableCompleted(i, true /* enabled */);
+ }
+ }
+ }
+}
+
+void SimpleSoftOMXComponent::addPort(const OMX_PARAM_PORTDEFINITIONTYPE &def) {
+ CHECK_EQ(def.nPortIndex, mPorts.size());
+
+ mPorts.push();
+ PortInfo *info = &mPorts.editItemAt(mPorts.size() - 1);
+ info->mDef = def;
+ info->mTransition = PortInfo::NONE;
+}
+
+void SimpleSoftOMXComponent::onQueueFilled(OMX_U32 portIndex) {
+}
+
+void SimpleSoftOMXComponent::onPortFlushCompleted(OMX_U32 portIndex) {
+}
+
+void SimpleSoftOMXComponent::onPortEnableCompleted(
+ OMX_U32 portIndex, bool enabled) {
+}
+
+List<SimpleSoftOMXComponent::BufferInfo *> &
+SimpleSoftOMXComponent::getPortQueue(OMX_U32 portIndex) {
+ CHECK_LT(portIndex, mPorts.size());
+ return mPorts.editItemAt(portIndex).mQueue;
+}
+
+SimpleSoftOMXComponent::PortInfo *SimpleSoftOMXComponent::editPortInfo(
+ OMX_U32 portIndex) {
+ CHECK_LT(portIndex, mPorts.size());
+ return &mPorts.editItemAt(portIndex);
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/SoftOMXComponent.cpp b/media/libstagefright/omx/SoftOMXComponent.cpp
new file mode 100644
index 0000000..b1c34dc
--- /dev/null
+++ b/media/libstagefright/omx/SoftOMXComponent.cpp
@@ -0,0 +1,326 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftOMXComponent"
+#include <utils/Log.h>
+
+#include "include/SoftOMXComponent.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+SoftOMXComponent::SoftOMXComponent(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : mName(name),
+ mCallbacks(callbacks),
+ mComponent(new OMX_COMPONENTTYPE),
+ mLibHandle(NULL) {
+ mComponent->nSize = sizeof(*mComponent);
+ mComponent->nVersion.s.nVersionMajor = 1;
+ mComponent->nVersion.s.nVersionMinor = 0;
+ mComponent->nVersion.s.nRevision = 0;
+ mComponent->nVersion.s.nStep = 0;
+ mComponent->pComponentPrivate = this;
+ mComponent->pApplicationPrivate = appData;
+
+ mComponent->GetComponentVersion = NULL;
+ mComponent->SendCommand = SendCommandWrapper;
+ mComponent->GetParameter = GetParameterWrapper;
+ mComponent->SetParameter = SetParameterWrapper;
+ mComponent->GetConfig = GetConfigWrapper;
+ mComponent->SetConfig = SetConfigWrapper;
+ mComponent->GetExtensionIndex = GetExtensionIndexWrapper;
+ mComponent->GetState = GetStateWrapper;
+ mComponent->ComponentTunnelRequest = NULL;
+ mComponent->UseBuffer = UseBufferWrapper;
+ mComponent->AllocateBuffer = AllocateBufferWrapper;
+ mComponent->FreeBuffer = FreeBufferWrapper;
+ mComponent->EmptyThisBuffer = EmptyThisBufferWrapper;
+ mComponent->FillThisBuffer = FillThisBufferWrapper;
+ mComponent->SetCallbacks = NULL;
+ mComponent->ComponentDeInit = NULL;
+ mComponent->UseEGLImage = NULL;
+ mComponent->ComponentRoleEnum = NULL;
+
+ *component = mComponent;
+}
+
+SoftOMXComponent::~SoftOMXComponent() {
+ delete mComponent;
+ mComponent = NULL;
+}
+
+void SoftOMXComponent::setLibHandle(void *libHandle) {
+ CHECK(libHandle != NULL);
+ mLibHandle = libHandle;
+}
+
+void *SoftOMXComponent::libHandle() const {
+ return mLibHandle;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::initCheck() const {
+ return OMX_ErrorNone;
+}
+
+const char *SoftOMXComponent::name() const {
+ return mName.c_str();
+}
+
+void SoftOMXComponent::notify(
+ OMX_EVENTTYPE event,
+ OMX_U32 data1, OMX_U32 data2, OMX_PTR data) {
+ (*mCallbacks->EventHandler)(
+ mComponent,
+ mComponent->pApplicationPrivate,
+ event,
+ data1,
+ data2,
+ data);
+}
+
+void SoftOMXComponent::notifyEmptyBufferDone(OMX_BUFFERHEADERTYPE *header) {
+ (*mCallbacks->EmptyBufferDone)(
+ mComponent, mComponent->pApplicationPrivate, header);
+}
+
+void SoftOMXComponent::notifyFillBufferDone(OMX_BUFFERHEADERTYPE *header) {
+ (*mCallbacks->FillBufferDone)(
+ mComponent, mComponent->pApplicationPrivate, header);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::SendCommandWrapper(
+ OMX_HANDLETYPE component,
+ OMX_COMMANDTYPE cmd,
+ OMX_U32 param,
+ OMX_PTR data) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->sendCommand(cmd, param, data);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::GetParameterWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->getParameter(index, params);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::SetParameterWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->setParameter(index, params);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::GetConfigWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->getConfig(index, params);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::SetConfigWrapper(
+ OMX_HANDLETYPE component,
+ OMX_INDEXTYPE index,
+ OMX_PTR params) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->setConfig(index, params);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::GetExtensionIndexWrapper(
+ OMX_HANDLETYPE component,
+ OMX_STRING name,
+ OMX_INDEXTYPE *index) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->getExtensionIndex(name, index);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::UseBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size,
+ OMX_U8 *ptr) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->useBuffer(buffer, portIndex, appPrivate, size, ptr);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::AllocateBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->allocateBuffer(buffer, portIndex, appPrivate, size);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::FreeBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_U32 portIndex,
+ OMX_BUFFERHEADERTYPE *buffer) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->freeBuffer(portIndex, buffer);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::EmptyThisBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE *buffer) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->emptyThisBuffer(buffer);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::FillThisBufferWrapper(
+ OMX_HANDLETYPE component,
+ OMX_BUFFERHEADERTYPE *buffer) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->fillThisBuffer(buffer);
+}
+
+// static
+OMX_ERRORTYPE SoftOMXComponent::GetStateWrapper(
+ OMX_HANDLETYPE component,
+ OMX_STATETYPE *state) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ return me->getState(state);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+OMX_ERRORTYPE SoftOMXComponent::sendCommand(
+ OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::getParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::setParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::getConfig(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::useBuffer(
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size,
+ OMX_U8 *ptr) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::allocateBuffer(
+ OMX_BUFFERHEADERTYPE **buffer,
+ OMX_U32 portIndex,
+ OMX_PTR appPrivate,
+ OMX_U32 size) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::freeBuffer(
+ OMX_U32 portIndex,
+ OMX_BUFFERHEADERTYPE *buffer) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::emptyThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::fillThisBuffer(
+ OMX_BUFFERHEADERTYPE *buffer) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SoftOMXComponent::getState(OMX_STATETYPE *state) {
+ return OMX_ErrorUndefined;
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
new file mode 100644
index 0000000..6bd6624
--- /dev/null
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftOMXPlugin"
+#include <utils/Log.h>
+
+#include "SoftOMXPlugin.h"
+#include "include/SoftOMXComponent.h"
+
+#include <media/stagefright/foundation/AString.h>
+
+#include <dlfcn.h>
+
+namespace android {
+
+static const struct {
+ const char *mName;
+ const char *mLibNameSuffix;
+ const char *mRole;
+
+} kComponents[] = {
+ { "OMX.google.aac.decoder", "aacdec", "audio_decoder.aac" },
+ { "OMX.google.amrnb.decoder", "amrdec", "audio_decoder.amrnb" },
+ { "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" },
+ { "OMX.google.avc.decoder", "avcdec", "video_decoder.avc" },
+ { "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
+ { "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
+ { "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },
+ { "OMX.google.mpeg4.decoder", "mpeg4dec", "video_decoder.mpeg4" },
+ { "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" },
+ { "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" },
+ { "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" },
+};
+
+static const size_t kNumComponents =
+ sizeof(kComponents) / sizeof(kComponents[0]);
+
+SoftOMXPlugin::SoftOMXPlugin() {
+}
+
+OMX_ERRORTYPE SoftOMXPlugin::makeComponentInstance(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ LOGV("makeComponentInstance '%s'", name);
+
+ for (size_t i = 0; i < kNumComponents; ++i) {
+ if (strcmp(name, kComponents[i].mName)) {
+ continue;
+ }
+
+ AString libName = "libstagefright_soft_";
+ libName.append(kComponents[i].mLibNameSuffix);
+ libName.append(".so");
+
+ void *libHandle = dlopen(libName.c_str(), RTLD_NOW);
+
+ if (libHandle == NULL) {
+ LOGE("unable to dlopen %s", libName.c_str());
+
+ return OMX_ErrorComponentNotFound;
+ }
+
+ typedef SoftOMXComponent *(*CreateSoftOMXComponentFunc)(
+ const char *, const OMX_CALLBACKTYPE *,
+ OMX_PTR, OMX_COMPONENTTYPE **);
+
+ CreateSoftOMXComponentFunc createSoftOMXComponent =
+ (CreateSoftOMXComponentFunc)dlsym(
+ libHandle,
+ "_Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPE"
+ "PvPP17OMX_COMPONENTTYPE");
+
+ if (createSoftOMXComponent == NULL) {
+ dlclose(libHandle);
+ libHandle = NULL;
+
+ return OMX_ErrorComponentNotFound;
+ }
+
+ sp<SoftOMXComponent> codec =
+ (*createSoftOMXComponent)(name, callbacks, appData, component);
+
+ if (codec == NULL) {
+ dlclose(libHandle);
+ libHandle = NULL;
+
+ return OMX_ErrorInsufficientResources;
+ }
+
+ OMX_ERRORTYPE err = codec->initCheck();
+ if (err != OMX_ErrorNone) {
+ dlclose(libHandle);
+ libHandle = NULL;
+
+ return err;
+ }
+
+ codec->incStrong(this);
+ codec->setLibHandle(libHandle);
+
+ return OMX_ErrorNone;
+ }
+
+ return OMX_ErrorInvalidComponentName;
+}
+
+OMX_ERRORTYPE SoftOMXPlugin::destroyComponentInstance(
+ OMX_COMPONENTTYPE *component) {
+ SoftOMXComponent *me =
+ (SoftOMXComponent *)
+ ((OMX_COMPONENTTYPE *)component)->pComponentPrivate;
+
+ void *libHandle = me->libHandle();
+
+ me->decStrong(this);
+ me = NULL;
+
+ dlclose(libHandle);
+ libHandle = NULL;
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftOMXPlugin::enumerateComponents(
+ OMX_STRING name,
+ size_t size,
+ OMX_U32 index) {
+ if (index >= kNumComponents) {
+ return OMX_ErrorNoMore;
+ }
+
+ strcpy(name, kComponents[index].mName);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftOMXPlugin::getRolesOfComponent(
+ const char *name,
+ Vector<String8> *roles) {
+ for (size_t i = 0; i < kNumComponents; ++i) {
+ if (strcmp(name, kComponents[i].mName)) {
+ continue;
+ }
+
+ roles->clear();
+ roles->push(String8(kComponents[i].mRole));
+
+ return OMX_ErrorNone;
+ }
+
+ return OMX_ErrorInvalidComponentName;
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/OMXPVCodecsPlugin.h b/media/libstagefright/omx/SoftOMXPlugin.h
similarity index 76%
rename from media/libstagefright/omx/OMXPVCodecsPlugin.h
rename to media/libstagefright/omx/SoftOMXPlugin.h
index c133232..f93c323 100644
--- a/media/libstagefright/omx/OMXPVCodecsPlugin.h
+++ b/media/libstagefright/omx/SoftOMXPlugin.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,17 +14,17 @@
* limitations under the License.
*/
-#ifndef OMX_PV_CODECS_PLUGIN_H_
+#ifndef SOFT_OMX_PLUGIN_H_
-#define OMX_PV_CODECS_PLUGIN_H_
+#define SOFT_OMX_PLUGIN_H_
+#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/OMXPluginBase.h>
namespace android {
-struct OMXPVCodecsPlugin : public OMXPluginBase {
- OMXPVCodecsPlugin();
- virtual ~OMXPVCodecsPlugin();
+struct SoftOMXPlugin : public OMXPluginBase {
+ SoftOMXPlugin();
virtual OMX_ERRORTYPE makeComponentInstance(
const char *name,
@@ -45,10 +45,9 @@
Vector<String8> *roles);
private:
- OMXPVCodecsPlugin(const OMXPVCodecsPlugin &);
- OMXPVCodecsPlugin &operator=(const OMXPVCodecsPlugin &);
+ DISALLOW_EVIL_CONSTRUCTORS(SoftOMXPlugin);
};
} // namespace android
-#endif // OMX_PV_CODECS_PLUGIN_H_
+#endif // SOFT_OMX_PLUGIN_H_
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 54c0d77..a404f1f 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -29,6 +29,7 @@
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MediaSource.h>
@@ -454,6 +455,7 @@
{ "video_decoder.avc", "video/avc" },
{ "video_decoder.mpeg4", "video/mp4v-es" },
{ "video_decoder.h263", "video/3gpp" },
+ { "video_decoder.vpx", "video/x-vnd.on2.vp8" },
// we appear to use this as a synonym to amrnb.
{ "audio_decoder.amr", "audio/3gpp" },
@@ -461,7 +463,10 @@
{ "audio_decoder.amrnb", "audio/3gpp" },
{ "audio_decoder.amrwb", "audio/amr-wb" },
{ "audio_decoder.aac", "audio/mp4a-latm" },
- { "audio_decoder.mp3", "audio/mpeg" }
+ { "audio_decoder.mp3", "audio/mpeg" },
+ { "audio_decoder.vorbis", "audio/vorbis" },
+ { "audio_decoder.g711alaw", MEDIA_MIMETYPE_AUDIO_G711_ALAW },
+ { "audio_decoder.g711mlaw", MEDIA_MIMETYPE_AUDIO_G711_MLAW },
};
for (size_t i = 0; i < sizeof(kRoleToMime) / sizeof(kRoleToMime[0]); ++i) {
@@ -492,7 +497,15 @@
{ "audio/mp4a-latm",
"file:///sdcard/media_api/video/H264_AAC.3gp" },
{ "audio/mpeg",
- "file:///sdcard/media_api/music/MP3CBR.mp3" }
+ "file:///sdcard/media_api/music/MP3CBR.mp3" },
+ { "audio/vorbis",
+ "file:///sdcard/media_api/metaDataTestMedias/OGG/"
+ "When You Say Nothing At All.ogg" },
+ { "video/x-vnd.on2.vp8",
+ "file:///sdcard/media_api/webm/big-buck-bunny_trailer.webm" },
+ { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "file:///sdcard/M1F1-Alaw-AFsp.wav" },
+ { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ "file:///sdcard/M1F1-mulaw-AFsp.wav" },
};
for (size_t i = 0; i < sizeof(kMimeToURL) / sizeof(kMimeToURL[0]); ++i) {
@@ -746,6 +759,10 @@
const IOMX::ComponentInfo &info = *it;
const char *componentName = info.mName.string();
+ if (strncmp(componentName, "OMX.google.", 11)) {
+ continue;
+ }
+
for (List<String8>::const_iterator role_it = info.mRoles.begin();
role_it != info.mRoles.end(); ++role_it) {
const char *componentRole = (*role_it).string();
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
index fff0b5f..fef8066 100644
--- a/media/mtp/MtpStorage.cpp
+++ b/media/mtp/MtpStorage.cpp
@@ -33,12 +33,13 @@
namespace android {
MtpStorage::MtpStorage(MtpStorageID id, const char* filePath,
- const char* description, uint64_t reserveSpace)
+ const char* description, uint64_t reserveSpace, bool removable)
: mStorageID(id),
mFilePath(filePath),
mDescription(description),
mMaxCapacity(0),
- mReserveSpace(reserveSpace)
+ mReserveSpace(reserveSpace),
+ mRemovable(removable)
{
LOGV("MtpStorage id: %d path: %s\n", id, filePath);
}
@@ -47,7 +48,7 @@
}
int MtpStorage::getType() const {
- return MTP_STORAGE_FIXED_RAM;
+ return (mRemovable ? MTP_STORAGE_REMOVABLE_RAM : MTP_STORAGE_FIXED_RAM);
}
int MtpStorage::getFileSystemType() const {
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
index d6ad25f..3e4f40d 100644
--- a/media/mtp/MtpStorage.h
+++ b/media/mtp/MtpStorage.h
@@ -33,10 +33,12 @@
uint64_t mMaxCapacity;
// amount of free space to leave unallocated
uint64_t mReserveSpace;
+ bool mRemovable;
public:
MtpStorage(MtpStorageID id, const char* filePath,
- const char* description, uint64_t reserveSpace);
+ const char* description, uint64_t reserveSpace,
+ bool removable);
virtual ~MtpStorage();
inline MtpStorageID getStorageID() const { return mStorageID; }
@@ -47,6 +49,7 @@
uint64_t getFreeSpace();
const char* getDescription() const;
inline const char* getPath() const { return (const char *)mFilePath; }
+ inline bool isRemovable() const { return mRemovable; }
};
}; // namespace android
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 899907c..8e86eda 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -43,7 +43,7 @@
#include <private/media/AudioTrackShared.h>
#include <private/media/AudioEffectShared.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <hardware/audio_hal.h>
#include "AudioMixer.h"
@@ -1237,6 +1237,7 @@
LOGV("createTrack_l() setting main buffer %p", chain->inBuffer());
track->setMainBuffer(chain->inBuffer());
chain->setStrategy(AudioSystem::getStrategyForStream((audio_stream_type_t)track->type()));
+ chain->incTrackCnt();
}
}
lStatus = NO_ERROR;
@@ -1320,7 +1321,7 @@
sp<EffectChain> chain = getEffectChain_l(track->sessionId());
if (chain != 0) {
LOGV("addTrack_l() starting track on chain %p for session %d", chain.get(), track->sessionId());
- chain->startTrack();
+ chain->incActiveTrackCnt();
}
}
@@ -1338,8 +1339,17 @@
{
track->mState = TrackBase::TERMINATED;
if (mActiveTracks.indexOf(track) < 0) {
- mTracks.remove(track);
- deleteTrackName_l(track->name());
+ removeTrack_l(track);
+ }
+}
+
+void AudioFlinger::PlaybackThread::removeTrack_l(const sp<Track>& track)
+{
+ mTracks.remove(track);
+ deleteTrackName_l(track->name());
+ sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+ if (chain != 0) {
+ chain->decTrackCnt();
}
}
@@ -1845,12 +1855,11 @@
chain = getEffectChain_l(track->sessionId());
if (chain != 0) {
LOGV("stopping track on chain %p for session Id: %d", chain.get(), track->sessionId());
- chain->stopTrack();
+ chain->decActiveTrackCnt();
}
}
if (track->isTerminated()) {
- mTracks.remove(track);
- deleteTrackName_l(track->mName);
+ removeTrack_l(track);
}
}
}
@@ -1936,7 +1945,7 @@
if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) {
// when changing the audio output device, call addBatteryData to notify
// the change
- if (mDevice != value) {
+ if ((int)mDevice != value) {
uint32_t params = 0;
// check whether speaker is on
if (value & AUDIO_DEVICE_OUT_SPEAKER) {
@@ -2328,11 +2337,10 @@
if (!effectChains.isEmpty()) {
LOGV("stopping track on chain %p for session Id: %d", effectChains[0].get(),
trackToRemove->sessionId());
- effectChains[0]->stopTrack();
+ effectChains[0]->decActiveTrackCnt();
}
if (trackToRemove->isTerminated()) {
- mTracks.remove(trackToRemove);
- deleteTrackName_l(trackToRemove->mName);
+ removeTrack_l(trackToRemove);
}
}
@@ -5130,6 +5138,7 @@
if (session == track->sessionId()) {
LOGV("addEffectChain_l() track->setMainBuffer track %p buffer %p", track.get(), buffer);
track->setMainBuffer(buffer);
+ chain->incTrackCnt();
}
}
@@ -5139,7 +5148,7 @@
if (track == 0) continue;
if (session == track->sessionId()) {
LOGV("addEffectChain_l() activating track %p on session %d", track.get(), session);
- chain->startTrack();
+ chain->incActiveTrackCnt();
}
}
}
@@ -5175,11 +5184,23 @@
for (size_t i = 0; i < mEffectChains.size(); i++) {
if (chain == mEffectChains[i]) {
mEffectChains.removeAt(i);
+ // detach all active tracks from the chain
+ for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) {
+ sp<Track> track = mActiveTracks[i].promote();
+ if (track == 0) continue;
+ if (session == track->sessionId()) {
+ LOGV("removeEffectChain_l(): stopping track on chain %p for session Id: %d",
+ chain.get(), session);
+ chain->decActiveTrackCnt();
+ }
+ }
+
// detach all tracks with same session ID from this chain
for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> track = mTracks[i];
if (session == track->sessionId()) {
track->setMainBuffer(mMixBuffer);
+ chain->decTrackCnt();
}
}
break;
@@ -5461,7 +5482,7 @@
// If an insert effect is idle and input buffer is different from output buffer,
// accumulate input onto output
sp<EffectChain> chain = mChain.promote();
- if (chain != 0 && chain->activeTracks() != 0) {
+ if (chain != 0 && chain->activeTrackCnt() != 0) {
size_t frameCnt = mConfig.inputCfg.buffer.frameCount * 2; //always stereo here
int16_t *in = mConfig.inputCfg.buffer.s16;
int16_t *out = mConfig.outputCfg.buffer.s16;
@@ -6137,9 +6158,9 @@
AudioFlinger::EffectChain::EffectChain(const wp<ThreadBase>& wThread,
int sessionId)
- : mThread(wThread), mSessionId(sessionId), mActiveTrackCnt(0), mOwnInBuffer(false),
- mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
- mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX)
+ : mThread(wThread), mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0),
+ mOwnInBuffer(false), mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
+ mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX)
{
mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
}
@@ -6196,8 +6217,15 @@
(mSessionId == AUDIO_SESSION_OUTPUT_STAGE);
bool tracksOnSession = false;
if (!isGlobalSession) {
- tracksOnSession =
- playbackThread->hasAudioSession(mSessionId) & PlaybackThread::TRACK_SESSION;
+ tracksOnSession = (trackCnt() != 0);
+ }
+
+ // if no track is active, input buffer must be cleared here as the mixer process
+ // will not do it
+ if (tracksOnSession &&
+ activeTrackCnt() == 0) {
+ size_t numSamples = playbackThread->frameCount() * playbackThread->channelCount();
+ memset(mInBuffer, 0, numSamples * sizeof(int16_t));
}
size_t size = mEffects.size();
@@ -6210,13 +6238,6 @@
for (size_t i = 0; i < size; i++) {
mEffects[i]->updateState();
}
- // if no track is active, input buffer must be cleared here as the mixer process
- // will not do it
- if (tracksOnSession &&
- activeTracks() == 0) {
- size_t numSamples = playbackThread->frameCount() * playbackThread->channelCount();
- memset(mInBuffer, 0, numSamples * sizeof(int16_t));
- }
}
// addEffect_l() must be called with PlaybackThread::mLock held
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 22e5116..39314ad 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -38,7 +38,7 @@
#include <binder/BinderService.h>
#include <binder/MemoryDealer.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <hardware/audio_hal.h>
#include "AudioBufferProvider.h"
@@ -684,6 +684,7 @@
status_t addTrack_l(const sp<Track>& track);
void destroyTrack_l(const sp<Track>& track);
+ void removeTrack_l(const sp<Track>& track);
void readOutputParameters();
@@ -1134,9 +1135,13 @@
return mOutBuffer;
}
- void startTrack() {mActiveTrackCnt++;}
- void stopTrack() {mActiveTrackCnt--;}
- int activeTracks() { return mActiveTrackCnt;}
+ void incTrackCnt() { android_atomic_inc(&mTrackCnt); }
+ void decTrackCnt() { android_atomic_dec(&mTrackCnt); }
+ int32_t trackCnt() { return mTrackCnt;}
+
+ void incActiveTrackCnt() { android_atomic_inc(&mActiveTrackCnt); }
+ void decActiveTrackCnt() { android_atomic_dec(&mActiveTrackCnt); }
+ int32_t activeTrackCnt() { return mActiveTrackCnt;}
uint32_t strategy() { return mStrategy; }
void setStrategy(uint32_t strategy)
@@ -1155,7 +1160,8 @@
int mSessionId; // audio session ID
int16_t *mInBuffer; // chain input buffer
int16_t *mOutBuffer; // chain output buffer
- int mActiveTrackCnt; // number of active tracks connected
+ volatile int32_t mActiveTrackCnt; // number of active tracks connected
+ volatile int32_t mTrackCnt; // number of tracks connected
bool mOwnInBuffer; // true if the chain owns its input buffer
int mVolumeCtrlIdx; // index of insert effect having control over volume
uint32_t mLeftVolume; // previous volume on left channel
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index eebc1b3..ef8d957 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -35,7 +35,7 @@
#include <hardware_legacy/power.h>
#include <hardware/hardware.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <hardware/audio_policy.h>
#include <hardware/audio_policy_hal.h>
diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h
index 01e592b..d9b5ada 100644
--- a/services/audioflinger/AudioPolicyService.h
+++ b/services/audioflinger/AudioPolicyService.h
@@ -22,7 +22,7 @@
#include <utils/Vector.h>
#include <binder/BinderService.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <hardware/audio_policy.h>
#include <hardware/audio_policy_hal.h>
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 14f1e8b..e35435e 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -1,38 +1,5 @@
LOCAL_PATH:= $(call my-dir)
-# Set USE_CAMERA_STUB if you don't want to use the hardware camera.
-
-# force these builds to use camera stub only
-ifneq ($(filter sooner generic sim,$(TARGET_DEVICE)),)
- USE_CAMERA_STUB:=true
-endif
-
-ifeq ($(USE_CAMERA_STUB),)
- USE_CAMERA_STUB:=false
-endif
-
-ifeq ($(USE_CAMERA_STUB),true)
-#
-# libcamerastub
-#
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- CameraHardwareStub.cpp \
- FakeCamera.cpp
-
-LOCAL_MODULE:= libcamerastub
-
-ifeq ($(TARGET_SIMULATOR),true)
-LOCAL_CFLAGS += -DSINGLE_PROCESS
-endif
-
-LOCAL_SHARED_LIBRARIES:= libui
-
-include $(BUILD_STATIC_LIBRARY)
-endif # USE_CAMERA_STUB
-
#
# libcameraservice
#
@@ -49,18 +16,9 @@
libcutils \
libmedia \
libcamera_client \
- libgui
+ libgui \
+ libhardware
LOCAL_MODULE:= libcameraservice
-ifeq ($(TARGET_SIMULATOR),true)
-LOCAL_CFLAGS += -DSINGLE_PROCESS
-endif
-
-ifeq ($(USE_CAMERA_STUB), true)
-LOCAL_STATIC_LIBRARIES += libcamerastub
-else
-LOCAL_SHARED_LIBRARIES += libcamera
-endif
-
include $(BUILD_SHARED_LIBRARY)
diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/CameraHardwareInterface.h
new file mode 100644
index 0000000..f9fa30e
--- /dev/null
+++ b/services/camera/libcameraservice/CameraHardwareInterface.h
@@ -0,0 +1,619 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
+#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
+
+#include <binder/IMemory.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/RefBase.h>
+#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBuffer.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <system/window.h>
+#include <hardware/camera.h>
+
+namespace android {
+
+typedef void (*notify_callback)(int32_t msgType,
+ int32_t ext1,
+ int32_t ext2,
+ void* user);
+
+typedef void (*data_callback)(int32_t msgType,
+ const sp<IMemory> &dataPtr,
+ void* user);
+
+typedef void (*data_callback_timestamp)(nsecs_t timestamp,
+ int32_t msgType,
+ const sp<IMemory> &dataPtr,
+ void *user);
+
+/**
+ * CameraHardwareInterface.h defines the interface to the
+ * camera hardware abstraction layer, used for setting and getting
+ * parameters, live previewing, and taking pictures.
+ *
+ * It is a referenced counted interface with RefBase as its base class.
+ * CameraService calls openCameraHardware() to retrieve a strong pointer to the
+ * instance of this interface and may be called multiple times. The
+ * following steps describe a typical sequence:
+ *
+ * -# After CameraService calls openCameraHardware(), getParameters() and
+ * setParameters() are used to initialize the camera instance.
+ * CameraService calls getPreviewHeap() to establish access to the
+ * preview heap so it can be registered with SurfaceFlinger for
+ * efficient display updating while in preview mode.
+ * -# startPreview() is called. The camera instance then periodically
+ * sends the message CAMERA_MSG_PREVIEW_FRAME (if enabled) each time
+ * a new preview frame is available. If data callback code needs to use
+ * this memory after returning, it must copy the data.
+ *
+ * Prior to taking a picture, CameraService calls autofocus(). When auto
+ * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification,
+ * which informs the application whether focusing was successful. The camera instance
+ * only sends this message once and it is up to the application to call autoFocus()
+ * again if refocusing is desired.
+ *
+ * CameraService calls takePicture() to request the camera instance take a
+ * picture. At this point, if a shutter, postview, raw, and/or compressed callback
+ * is desired, the corresponding message must be enabled. As with CAMERA_MSG_PREVIEW_FRAME,
+ * any memory provided in a data callback must be copied if it's needed after returning.
+ */
+
+class CameraHardwareInterface : public virtual RefBase {
+public:
+ CameraHardwareInterface(hw_module_t *module, const char *name)
+ {
+ mDevice = 0;
+ mName = name;
+ LOGI("Opening camera %s, this %p", name, this);
+ int rc = module->methods->open(module, name,
+ (hw_device_t **)&mDevice);
+ if (rc != OK)
+ LOGE("Could not open camera %s: %d", name, rc);
+ initHalPreviewWindow();
+ }
+
+ ~CameraHardwareInterface()
+ {
+ LOGI("Destroying camera %s", mName.string());
+ int rc = mDevice->common.close(&mDevice->common);
+ if (rc != OK)
+ LOGE("Could not close camera %s: %d", mName.string(), rc);
+ }
+
+ /** Set the ANativeWindow to which preview frames are sent */
+ status_t setPreviewWindow(const sp<ANativeWindow>& buf)
+ {
+ LOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get());
+
+ if (mDevice->ops->set_preview_window) {
+ mPreviewWindow = buf;
+ mHalPreviewWindow.user = this;
+ LOGV("%s &mHalPreviewWindow %p mHalPreviewWindow.user %p", __FUNCTION__,
+ &mHalPreviewWindow, mHalPreviewWindow.user);
+ return mDevice->ops->set_preview_window(mDevice,
+ buf.get() ? &mHalPreviewWindow.nw : 0);
+ }
+ return INVALID_OPERATION;
+ }
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(notify_callback notify_cb,
+ data_callback data_cb,
+ data_callback_timestamp data_cb_timestamp,
+ void* user)
+ {
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mCbUser = user;
+
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+
+ if (mDevice->ops->set_callbacks) {
+ mDevice->ops->set_callbacks(mDevice,
+ __notify_cb,
+ __data_cb,
+ __data_cb_timestamp,
+ __get_memory,
+ this);
+ }
+ }
+
+ /**
+ * The following three functions all take a msgtype,
+ * which is a bitmask of the messages defined in
+ * include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msgType)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->enable_msg_type)
+ mDevice->ops->enable_msg_type(mDevice, msgType);
+ }
+
+ /**
+ * Disable a message, or a set of messages.
+ *
+ * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
+ * should not rely on its client to call releaseRecordingFrame() to release
+ * video recording frames sent out by the cameral hal before and after the
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
+ * modify/access any video recording frame after calling
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
+ */
+ void disableMsgType(int32_t msgType)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->disable_msg_type)
+ mDevice->ops->disable_msg_type(mDevice, msgType);
+ }
+
+ /**
+ * Query whether a message, or a set of messages, is enabled.
+ * Note that this is operates as an AND, if any of the messages
+ * queried are off, this will return false.
+ */
+ int msgTypeEnabled(int32_t msgType)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->msg_type_enabled)
+ return mDevice->ops->msg_type_enabled(mDevice, msgType);
+ return false;
+ }
+
+ /**
+ * Start preview mode.
+ */
+ status_t startPreview()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->start_preview)
+ return mDevice->ops->start_preview(mDevice);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->stop_preview)
+ mDevice->ops->stop_preview(mDevice);
+ }
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ int previewEnabled()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->preview_enabled)
+ return mDevice->ops->preview_enabled(mDevice);
+ return false;
+ }
+
+ /**
+ * Request the camera hal to store meta data or real YUV data in
+ * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
+ * recording session. If it is not called, the default camera
+ * hal behavior is to store real YUV data in the video buffers.
+ *
+ * This method should be called before startRecording() in order
+ * to be effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the
+ * receiver of the video buffers to interpret the contents and
+ * to find the actual frame data with the help of the meta data
+ * in the buffer. How this is done is outside of the scope of
+ * this method.
+ *
+ * Some camera hal may not support storing meta data in the video
+ * buffers, but all camera hal should support storing real YUV data
+ * in the video buffers. If the camera hal does not support storing
+ * the meta data in the video buffers when it is requested to do
+ * do, INVALID_OPERATION must be returned. It is very useful for
+ * the camera hal to pass meta data rather than the actual frame
+ * data directly to the video encoder, since the amount of the
+ * uncompressed frame data can be very large if video size is large.
+ *
+ * @param enable if true to instruct the camera hal to store
+ * meta data in the video buffers; false to instruct
+ * the camera hal to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+
+ status_t storeMetaDataInBuffers(int enable)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->store_meta_data_in_buffers)
+ return mDevice->ops->store_meta_data_in_buffers(mDevice, enable);
+ return enable ? INVALID_OPERATION: OK;
+ }
+
+ /**
+ * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
+ * message is sent with the corresponding frame. Every record frame must be released
+ * by a cameral hal client via releaseRecordingFrame() before the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
+ * to manage the life-cycle of the video recording frames, and the client must
+ * not modify/access any video recording frames.
+ */
+ status_t startRecording()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->start_recording)
+ return mDevice->ops->start_recording(mDevice);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->stop_recording)
+ mDevice->ops->stop_recording(mDevice);
+ }
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->recording_enabled)
+ return mDevice->ops->recording_enabled(mDevice);
+ return false;
+ }
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ *
+ * It is camera hal client's responsibility to release video recording
+ * frames sent out by the camera hal before the camera hal receives
+ * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
+ * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
+ * responsibility of managing the life-cycle of the video recording
+ * frames.
+ */
+ void releaseRecordingFrame(const sp<IMemory>& mem)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->release_recording_frame) {
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+ void *data = ((uint8_t *)heap->base()) + offset;
+ return mDevice->ops->release_recording_frame(mDevice, data);
+ }
+ }
+
+ /**
+ * Start auto focus, the notification callback routine is called
+ * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
+ * will be called again if another auto focus is needed.
+ */
+ status_t autoFocus()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->auto_focus)
+ return mDevice->ops->auto_focus(mDevice);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress
+ * or not, this function will return the focus position to the default.
+ * If the camera does not support auto-focus, this is a no-op.
+ */
+ status_t cancelAutoFocus()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->cancel_auto_focus)
+ return mDevice->ops->cancel_auto_focus(mDevice);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Take a picture.
+ */
+ status_t takePicture()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->take_picture)
+ return mDevice->ops->take_picture(mDevice);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this
+ * method when no picture is being taken is a no-op.
+ */
+ status_t cancelPicture()
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->cancel_picture)
+ return mDevice->ops->cancel_picture(mDevice);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported. */
+ status_t setParameters(const CameraParameters ¶ms)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->set_parameters)
+ return mDevice->ops->set_parameters(mDevice,
+ params.flatten().string());
+ return INVALID_OPERATION;
+ }
+
+ /** Return the camera parameters. */
+ CameraParameters getParameters() const
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ CameraParameters parms;
+ if (mDevice->ops->get_parameters) {
+ char *temp = mDevice->ops->get_parameters(mDevice);
+ String8 str_parms(temp);
+ free(temp);
+ parms.unflatten(str_parms);
+ }
+ return parms;
+ }
+
+ /**
+ * Send command to camera driver.
+ */
+ status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->send_command)
+ return mDevice->ops->send_command(mDevice, cmd, arg1, arg2);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release() {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->release)
+ mDevice->ops->release(mDevice);
+ }
+
+ /**
+ * Dump state of the camera hardware
+ */
+ status_t dump(int fd, const Vector<String16>& args) const
+ {
+ LOGV("%s(%s)", __FUNCTION__, mName.string());
+ if (mDevice->ops->dump)
+ return mDevice->ops->dump(mDevice, fd);
+ return OK; // It's fine if the HAL doesn't implement dump()
+ }
+
+private:
+ camera_device_t *mDevice;
+ String8 mName;
+
+ static void __notify_cb(int32_t msg_type, int32_t ext1,
+ int32_t ext2, void *user)
+ {
+ LOGV("%s", __FUNCTION__);
+ CameraHardwareInterface *__this =
+ static_cast<CameraHardwareInterface *>(user);
+ __this->mNotifyCb(msg_type, ext1, ext2, __this->mCbUser);
+ }
+
+ static void __data_cb(int32_t msg_type,
+ const camera_memory_t *data,
+ void *user)
+ {
+ LOGV("%s", __FUNCTION__);
+ CameraHardwareInterface *__this =
+ static_cast<CameraHardwareInterface *>(user);
+ sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
+ __this->mDataCb(msg_type, mem, __this->mCbUser);
+ }
+
+ static void __data_cb_timestamp(nsecs_t timestamp, int32_t msg_type,
+ const camera_memory_t *data,
+ void *user)
+ {
+ LOGV("%s", __FUNCTION__);
+ CameraHardwareInterface *__this =
+ static_cast<CameraHardwareInterface *>(user);
+ // Start refcounting the heap object from here on. When the clients
+ // drop all references, it will be destroyed (as well as the enclosed
+ // MemoryHeapBase.
+ sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
+ __this->mDataCbTimestamp(timestamp, msg_type, mem, __this->mCbUser);
+ }
+
+ // This is a utility class that combines a MemoryHeapBase and a MemoryBase
+ // in one. Since we tend to use them in a one-to-one relationship, this is
+ // handy.
+
+ class CameraHeapMemory : public MemoryBase {
+ public:
+ CameraHeapMemory(size_t size) :
+ MemoryBase(new MemoryHeapBase(size), 0, size)
+ {
+ handle.data = getHeap()->base();
+ handle.size = size;
+ handle.handle = this;
+ }
+
+ camera_memory_t handle;
+ };
+
+ static camera_memory_t* __get_memory(size_t size,
+ void *user __attribute__((unused)))
+ {
+ // We allocate the object here, but we do not assign it to a strong
+ // pointer yet. The HAL will pass it back to us via the data callback
+ // or the data-timestamp callback, and from there on we will wrap it
+ // within a strong pointer.
+
+ CameraHeapMemory *mem = new CameraHeapMemory(size);
+ return &mem->handle;
+ }
+
+ static ANativeWindow *__to_anw(void *user)
+ {
+ CameraHardwareInterface *__this =
+ reinterpret_cast<CameraHardwareInterface *>(user);
+ return __this->mPreviewWindow.get();
+ }
+#define anw(n) __to_anw(((struct camera_preview_window *)n)->user)
+
+ static int __dequeue_buffer(struct preview_stream_ops* w,
+ buffer_handle_t** buffer)
+ {
+ int rc;
+ ANativeWindow *a = anw(w);
+ ANativeWindowBuffer* anb;
+ rc = a->dequeueBuffer(a, &anb);
+ if (!rc) {
+ rc = a->lockBuffer(a, anb);
+ if (!rc)
+ *buffer = &anb->handle;
+ else
+ a->cancelBuffer(a, anb);
+ }
+ return rc;
+ }
+
+#ifndef container_of
+#define container_of(ptr, type, member) ({ \
+ const typeof(((type *) 0)->member) *__mptr = (ptr); \
+ (type *) ((char *) __mptr - (char *)(&((type *)0)->member)); })
+#endif
+
+ static int __enqueue_buffer(struct preview_stream_ops* w,
+ buffer_handle_t* buffer)
+ {
+ ANativeWindow *a = anw(w);
+ return a->queueBuffer(a,
+ container_of(buffer, ANativeWindowBuffer, handle));
+ }
+
+ static int __cancel_buffer(struct preview_stream_ops* w,
+ buffer_handle_t* buffer)
+ {
+ ANativeWindow *a = anw(w);
+ return a->cancelBuffer(a,
+ container_of(buffer, ANativeWindowBuffer, handle));
+ }
+
+ static int __set_buffer_count(struct preview_stream_ops* w, int count)
+ {
+ ANativeWindow *a = anw(w);
+ return native_window_set_buffer_count(a, count);
+ }
+
+ static int __set_buffers_geometry(struct preview_stream_ops* w,
+ int width, int height, int format)
+ {
+ ANativeWindow *a = anw(w);
+ return native_window_set_buffers_geometry(a,
+ width, height, format);
+ }
+
+ static int __set_crop(struct preview_stream_ops *w,
+ int left, int top, int right, int bottom)
+ {
+ ANativeWindow *a = anw(w);
+ android_native_rect_t crop;
+ crop.left = left;
+ crop.top = top;
+ crop.right = right;
+ crop.bottom = bottom;
+ return native_window_set_crop(a, &crop);
+ }
+
+ static int __set_usage(struct preview_stream_ops* w, int usage)
+ {
+ ANativeWindow *a = anw(w);
+ return native_window_set_usage(a, usage);
+ }
+
+ static int __set_swap_interval(struct preview_stream_ops *w, int interval)
+ {
+ ANativeWindow *a = anw(w);
+ return a->setSwapInterval(a, interval);
+ }
+
+ static int __get_min_undequeued_buffer_count(
+ const struct preview_stream_ops *w,
+ int *count)
+ {
+ ANativeWindow *a = anw(w);
+ return a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, count);
+ }
+
+ void initHalPreviewWindow()
+ {
+ mHalPreviewWindow.nw.cancel_buffer = __cancel_buffer;
+ mHalPreviewWindow.nw.dequeue_buffer = __dequeue_buffer;
+ mHalPreviewWindow.nw.enqueue_buffer = __enqueue_buffer;
+ mHalPreviewWindow.nw.set_buffer_count = __set_buffer_count;
+ mHalPreviewWindow.nw.set_buffers_geometry = __set_buffers_geometry;
+ mHalPreviewWindow.nw.set_crop = __set_crop;
+ mHalPreviewWindow.nw.set_usage = __set_usage;
+ mHalPreviewWindow.nw.set_swap_interval = __set_swap_interval;
+
+ mHalPreviewWindow.nw.get_min_undequeued_buffer_count =
+ __get_min_undequeued_buffer_count;
+ }
+
+ sp<ANativeWindow> mPreviewWindow;
+
+ struct camera_preview_window {
+ struct preview_stream_ops nw;
+ void *user;
+ };
+
+ struct camera_preview_window mHalPreviewWindow;
+
+ notify_callback mNotifyCb;
+ data_callback mDataCb;
+ data_callback_timestamp mDataCbTimestamp;
+ void *mCbUser;
+};
+
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index f4859ec..1e8c30b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -16,6 +16,7 @@
*/
#define LOG_TAG "CameraService"
+//#define LOG_NDEBUG 0
#include <stdio.h>
#include <sys/types.h>
@@ -37,6 +38,7 @@
#include <utils/String16.h>
#include "CameraService.h"
+#include "CameraHardwareInterface.h"
namespace android {
@@ -69,24 +71,34 @@
static CameraService *gCameraService;
CameraService::CameraService()
-:mSoundRef(0)
+:mSoundRef(0), mModule(0)
{
LOGI("CameraService started (pid=%d)", getpid());
-
- mNumberOfCameras = HAL_getNumberOfCameras();
- if (mNumberOfCameras > MAX_CAMERAS) {
- LOGE("Number of cameras(%d) > MAX_CAMERAS(%d).",
- mNumberOfCameras, MAX_CAMERAS);
- mNumberOfCameras = MAX_CAMERAS;
- }
-
- for (int i = 0; i < mNumberOfCameras; i++) {
- setCameraFree(i);
- }
-
gCameraService = this;
}
+void CameraService::onFirstRef()
+{
+ BnCameraService::onFirstRef();
+
+ if (hw_get_module(CAMERA_HARDWARE_MODULE_ID,
+ (const hw_module_t **)&mModule) < 0) {
+ LOGE("Could not load camera HAL module");
+ mNumberOfCameras = 0;
+ }
+ else {
+ mNumberOfCameras = mModule->get_number_of_cameras();
+ if (mNumberOfCameras > MAX_CAMERAS) {
+ LOGE("Number of cameras(%d) > MAX_CAMERAS(%d).",
+ mNumberOfCameras, MAX_CAMERAS);
+ mNumberOfCameras = MAX_CAMERAS;
+ }
+ for (int i = 0; i < mNumberOfCameras; i++) {
+ setCameraFree(i);
+ }
+ }
+}
+
CameraService::~CameraService() {
for (int i = 0; i < mNumberOfCameras; i++) {
if (mBusy[i]) {
@@ -103,12 +115,19 @@
status_t CameraService::getCameraInfo(int cameraId,
struct CameraInfo* cameraInfo) {
+ if (!mModule) {
+ return NO_INIT;
+ }
+
if (cameraId < 0 || cameraId >= mNumberOfCameras) {
return BAD_VALUE;
}
- HAL_getCameraInfo(cameraId, cameraInfo);
- return OK;
+ struct camera_info info;
+ status_t rc = mModule->get_camera_info(cameraId, &info);
+ cameraInfo->facing = info.facing;
+ cameraInfo->orientation = info.orientation;
+ return rc;
}
sp<ICamera> CameraService::connect(
@@ -116,6 +135,11 @@
int callingPid = getCallingPid();
LOG1("CameraService::connect E (pid %d, id %d)", callingPid, cameraId);
+ if (!mModule) {
+ LOGE("Camera HAL module not loaded");
+ return NULL;
+ }
+
sp<Client> client;
if (cameraId < 0 || cameraId >= mNumberOfCameras) {
LOGE("CameraService::connect X (pid %d) rejected (invalid cameraId %d).",
@@ -146,15 +170,19 @@
return NULL;
}
- sp<CameraHardwareInterface> hardware = HAL_openCameraHardware(cameraId);
- if (hardware == NULL) {
- LOGE("Fail to open camera hardware (id=%d)", cameraId);
+ struct camera_info info;
+ if (mModule->get_camera_info(cameraId, &info) != OK) {
+ LOGE("Invalid camera id %d", cameraId);
return NULL;
}
- CameraInfo info;
- HAL_getCameraInfo(cameraId, &info);
- client = new Client(this, cameraClient, hardware, cameraId, info.facing,
- callingPid);
+
+ char camera_device_name[10];
+ snprintf(camera_device_name, sizeof(camera_device_name), "%d", cameraId);
+
+ client = new Client(this, cameraClient,
+ new CameraHardwareInterface(&mModule->common,
+ camera_device_name),
+ cameraId, info.facing, callingPid);
mClient[cameraId] = client;
LOG1("CameraService::connect X");
return client;
@@ -320,7 +348,7 @@
CAMERA_MSG_FOCUS);
// Callback is disabled by default
- mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP;
+ mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
mPlayShutterSound = true;
cameraService->setCameraBusy(cameraId);
@@ -410,7 +438,7 @@
return NO_ERROR;
}
- mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP;
+ mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
mClientPid = callingPid;
mCameraClient = client;
@@ -543,7 +571,7 @@
if (checkPidAndHardware() != NO_ERROR) return;
mPreviewCallbackFlag = callback_flag;
- if (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+ if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
} else {
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
@@ -1009,7 +1037,7 @@
int flags = mPreviewCallbackFlag;
// is callback enabled?
- if (!(flags & FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
+ if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
// If the enable bit is off, the copy-out and one-shot bits are ignored
LOG2("frame callback is disabled");
mLock.unlock();
@@ -1020,17 +1048,17 @@
sp<ICameraClient> c = mCameraClient;
// clear callback flags if no client or one-shot mode
- if (c == 0 || (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
+ if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
LOG2("Disable preview callback");
- mPreviewCallbackFlag &= ~(FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
- FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
- FRAME_CALLBACK_FLAG_ENABLE_MASK);
+ mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
+ CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
}
if (c != 0) {
// Is the received frame copied out or not?
- if (flags & FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
+ if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
LOG2("frame is copied");
copyFrameAndPostCopiedFrame(c, heap, offset, size);
} else {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 9a9ab0e..5e2d571 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -19,9 +19,8 @@
#define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
#include <binder/BinderService.h>
-
#include <camera/ICameraService.h>
-#include <camera/CameraHardwareInterface.h>
+#include <hardware/camera.h>
/* This needs to be increased if we can have more cameras */
#define MAX_CAMERAS 2
@@ -30,6 +29,7 @@
class MemoryHeapBase;
class MediaPlayer;
+class CameraHardwareInterface;
class CameraService :
public BinderService<CameraService>,
@@ -53,6 +53,7 @@
virtual status_t dump(int fd, const Vector<String16>& args);
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags);
+ virtual void onFirstRef();
enum sound_kind {
SOUND_SHUTTER = 0,
@@ -199,6 +200,8 @@
// is found to be disabled. It returns true if mLock is grabbed.
bool lockIfMessageWanted(int32_t msgType);
};
+
+ camera_module_t *mModule;
};
} // namespace android
diff --git a/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp b/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp
index 8a228fd..f86ca47 100644
--- a/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp
+++ b/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp
@@ -830,10 +830,10 @@
ASSERT(c->previewEnabled() == true);
sleep(2);
c->stopPreview();
- if ((v & FRAME_CALLBACK_FLAG_ENABLE_MASK) == 0) {
+ if ((v & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) == 0) {
cc->assertData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::EQ, 0);
} else {
- if ((v & FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) == 0) {
+ if ((v & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) == 0) {
cc->assertData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::GE, 10);
} else {
cc->assertData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::EQ, 1);
@@ -849,7 +849,7 @@
ASSERT(c->recordingEnabled() == false);
sp<MSurface> surface = new MSurface();
ASSERT(c->setPreviewDisplay(surface) == NO_ERROR);
- c->setPreviewCallbackFlag(FRAME_CALLBACK_FLAG_ENABLE_MASK);
+ c->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
cc->setReleaser(c.get());
c->startRecording();
ASSERT(c->recordingEnabled() == true);
@@ -870,7 +870,7 @@
CameraParameters param(c->getParameters());
param.setPreviewSize(w, h);
- c->setPreviewCallbackFlag(FRAME_CALLBACK_FLAG_ENABLE_MASK);
+ c->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
c->setParameters(param.flatten());
c->startPreview();