Software AVC encoder is now OMX-based

o related-to-bug: 6383440

Change-Id: I0b1ae50a704b0979857ea447585eabe86602149a
diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk
index d80d3f8..ee31ab2 100644
--- a/media/libstagefright/codecs/avc/enc/Android.mk
+++ b/media/libstagefright/codecs/avc/enc/Android.mk
@@ -3,6 +3,7 @@
 
 LOCAL_SRC_FILES := \
     AVCEncoder.cpp \
+    SoftAVCEncoder.cpp \
     src/avcenc_api.cpp \
     src/bitstream_io.cpp \
     src/block.cpp \
@@ -33,3 +34,40 @@
     -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
 
 include $(BUILD_STATIC_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+        SoftAVCEncoder.cpp
+
+LOCAL_C_INCLUDES := \
+        frameworks/av/media/libstagefright/include \
+        frameworks/native/include/media/openmax \
+        $(LOCAL_PATH)/src \
+        $(LOCAL_PATH)/include \
+        $(LOCAL_PATH)/../common/include \
+        $(LOCAL_PATH)/../common
+
+LOCAL_CFLAGS := \
+    -D__arm__ \
+    -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+
+
+LOCAL_STATIC_LIBRARIES := \
+        libstagefright_avcenc
+
+LOCAL_SHARED_LIBRARIES := \
+        libstagefright \
+        libstagefright_avc_common \
+        libstagefright_enc_common \
+        libstagefright_foundation \
+        libstagefright_omx \
+        libutils \
+
+
+LOCAL_MODULE := libstagefright_soft_h264enc
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
new file mode 100644
index 0000000..c6f658d
--- /dev/null
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -0,0 +1,890 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAVCEncoder"
+#include <utils/Log.h>
+
+#include "avcenc_api.h"
+#include "avcenc_int.h"
+#include "OMX_Video.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include "SoftAVCEncoder.h"
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+    params->nSize = sizeof(T);
+    params->nVersion.s.nVersionMajor = 1;
+    params->nVersion.s.nVersionMinor = 0;
+    params->nVersion.s.nRevision = 0;
+    params->nVersion.s.nStep = 0;
+}
+
+typedef struct LevelConversion {
+    OMX_U32 omxLevel;
+    AVCLevel avcLevel;
+} LevelConcersion;
+
+static LevelConversion ConversionTable[] = {
+    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
+    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
+    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
+    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
+    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
+    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
+#if 0
+    // encoding speed is very poor if video
+    // resolution is higher than CIF
+    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
+    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
+    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
+    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
+    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
+    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
+    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
+    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
+    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
+    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
+#endif
+};
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+        OMX_U32 omxLevel, AVCLevel *avcLevel) {
+    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
+        i < n; ++i) {
+        if (omxLevel == ConversionTable[i].omxLevel) {
+            *avcLevel = ConversionTable[i].avcLevel;
+            return OK;
+        }
+    }
+
+    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
+            (int32_t)omxLevel);
+
+    return BAD_VALUE;
+}
+
+static status_t ConvertAvcSpecLevelToOmxAvcLevel(
+    AVCLevel avcLevel, OMX_U32 *omxLevel) {
+    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
+        i < n; ++i) {
+        if (avcLevel == ConversionTable[i].avcLevel) {
+            *omxLevel = ConversionTable[i].omxLevel;
+            return OK;
+        }
+    }
+
+    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
+            (int32_t) avcLevel);
+
+    return BAD_VALUE;
+}
+
+inline static void ConvertYUV420SemiPlanarToYUV420Planar(
+        uint8_t *inyuv, uint8_t* outyuv,
+        int32_t width, int32_t height) {
+
+    int32_t outYsize = width * height;
+    uint32_t *outy =  (uint32_t *) outyuv;
+    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
+    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
+
+    /* Y copying */
+    memcpy(outy, inyuv, outYsize);
+
+    /* U & V copying */
+    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
+    for (int32_t i = height >> 1; i > 0; --i) {
+        for (int32_t j = width >> 2; j > 0; --j) {
+            uint32_t temp = *inyuv_4++;
+            uint32_t tempU = temp & 0xFF;
+            tempU = tempU | ((temp >> 8) & 0xFF00);
+
+            uint32_t tempV = (temp >> 8) & 0xFF;
+            tempV = tempV | ((temp >> 16) & 0xFF00);
+
+            // Flip U and V
+            *outcb++ = tempV;
+            *outcr++ = tempU;
+        }
+    }
+}
+
+static int32_t MallocWrapper(
+        void *userData, int32_t size, int32_t attrs) {
+    return reinterpret_cast<int32_t>(malloc(size));
+}
+
+static void FreeWrapper(void *userData, int32_t ptr) {
+    free(reinterpret_cast<void *>(ptr));
+}
+
+static int32_t DpbAllocWrapper(void *userData,
+        unsigned int sizeInMbs, unsigned int numBuffers) {
+    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
+    CHECK(encoder != NULL);
+    return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
+}
+
+static int32_t BindFrameWrapper(
+        void *userData, int32_t index, uint8_t **yuv) {
+    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
+    CHECK(encoder != NULL);
+    return encoder->bindOutputBuffer(index, yuv);
+}
+
+static void UnbindFrameWrapper(void *userData, int32_t index) {
+    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
+    CHECK(encoder != NULL);
+    return encoder->unbindOutputBuffer(index);
+}
+
+SoftAVCEncoder::SoftAVCEncoder(
+            const char *name,
+            const OMX_CALLBACKTYPE *callbacks,
+            OMX_PTR appData,
+            OMX_COMPONENTTYPE **component)
+    : SimpleSoftOMXComponent(name, callbacks, appData, component),
+      mVideoWidth(176),
+      mVideoHeight(144),
+      mVideoFrameRate(30),
+      mVideoBitRate(192000),
+      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
+      mIDRFrameRefreshIntervalInSec(1),
+      mAVCEncProfile(AVC_BASELINE),
+      mAVCEncLevel(AVC_LEVEL2),
+      mNumInputFrames(-1),
+      mPrevTimestampUs(-1),
+      mStarted(false),
+      mSawInputEOS(false),
+      mSignalledError(false),
+      mHandle(new tagAVCHandle),
+      mEncParams(new tagAVCEncParam),
+      mInputFrameData(NULL),
+      mSliceGroup(NULL) {
+
+    initPorts();
+    ALOGI("Construct SoftAVCEncoder");
+}
+
+SoftAVCEncoder::~SoftAVCEncoder() {
+    ALOGV("Destruct SoftAVCEncoder");
+    releaseEncoder();
+    List<BufferInfo *> &outQueue = getPortQueue(1);
+    List<BufferInfo *> &inQueue = getPortQueue(0);
+    CHECK(outQueue.empty());
+    CHECK(inQueue.empty());
+}
+
+OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
+    CHECK(mHandle != NULL);
+    memset(mHandle, 0, sizeof(tagAVCHandle));
+    mHandle->AVCObject = NULL;
+    mHandle->userData = this;
+    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
+    mHandle->CBAVC_FrameBind = BindFrameWrapper;
+    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
+    mHandle->CBAVC_Malloc = MallocWrapper;
+    mHandle->CBAVC_Free = FreeWrapper;
+
+    CHECK(mEncParams != NULL);
+    memset(mEncParams, 0, sizeof(mEncParams));
+    mEncParams->rate_control = AVC_ON;
+    mEncParams->initQP = 0;
+    mEncParams->init_CBP_removal_delay = 1600;
+
+    mEncParams->intramb_refresh = 0;
+    mEncParams->auto_scd = AVC_ON;
+    mEncParams->out_of_band_param_set = AVC_ON;
+    mEncParams->poc_type = 2;
+    mEncParams->log2_max_poc_lsb_minus_4 = 12;
+    mEncParams->delta_poc_zero_flag = 0;
+    mEncParams->offset_poc_non_ref = 0;
+    mEncParams->offset_top_bottom = 0;
+    mEncParams->num_ref_in_cycle = 0;
+    mEncParams->offset_poc_ref = NULL;
+
+    mEncParams->num_ref_frame = 1;
+    mEncParams->num_slice_group = 1;
+    mEncParams->fmo_type = 0;
+
+    mEncParams->db_filter = AVC_ON;
+    mEncParams->disable_db_idc = 0;
+
+    mEncParams->alpha_offset = 0;
+    mEncParams->beta_offset = 0;
+    mEncParams->constrained_intra_pred = AVC_OFF;
+
+    mEncParams->data_par = AVC_OFF;
+    mEncParams->fullsearch = AVC_OFF;
+    mEncParams->search_range = 16;
+    mEncParams->sub_pel = AVC_OFF;
+    mEncParams->submb_pred = AVC_OFF;
+    mEncParams->rdopt_mode = AVC_OFF;
+    mEncParams->bidir_pred = AVC_OFF;
+
+    mEncParams->use_overrun_buffer = AVC_OFF;
+
+    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+        // Color conversion is needed.
+        CHECK(mInputFrameData == NULL);
+        mInputFrameData =
+            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+        CHECK(mInputFrameData != NULL);
+    }
+
+    // PV's AVC encoder requires the video dimension of multiple
+    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+        ALOGE("Video frame size %dx%d must be a multiple of 16",
+            mVideoWidth, mVideoHeight);
+        return OMX_ErrorBadParameter;
+    }
+
+    mEncParams->width = mVideoWidth;
+    mEncParams->height = mVideoHeight;
+    mEncParams->bitrate = mVideoBitRate;
+    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
+    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
+
+    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
+            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
+    CHECK(mSliceGroup == NULL);
+    mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
+    CHECK(mSliceGroup != NULL);
+    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
+        mSliceGroup[ii] = idx++;
+        if (idx >= mEncParams->num_slice_group) {
+            idx = 0;
+        }
+    }
+    mEncParams->slice_group = mSliceGroup;
+
+    // Set IDR frame refresh interval
+    if (mIDRFrameRefreshIntervalInSec < 0) {
+        mEncParams->idr_period = -1;
+    } else if (mIDRFrameRefreshIntervalInSec == 0) {
+        mEncParams->idr_period = 1;  // All I frames
+    } else {
+        mEncParams->idr_period =
+            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+    }
+
+    // Set profile and level
+    mEncParams->profile = mAVCEncProfile;
+    mEncParams->level = mAVCEncLevel;
+
+    return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
+    CHECK(!mStarted);
+
+    OMX_ERRORTYPE errType = OMX_ErrorNone;
+    if (OMX_ErrorNone != (errType = initEncParams())) {
+        ALOGE("Failed to initialized encoder params");
+        mSignalledError = true;
+        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+        return errType;
+    }
+
+    AVCEnc_Status err;
+    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
+    if (err != AVCENC_SUCCESS) {
+        ALOGE("Failed to initialize the encoder: %d", err);
+        mSignalledError = true;
+        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+        return OMX_ErrorUndefined;
+    }
+
+    mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
+    mSpsPpsHeaderReceived = false;
+    mReadyForNextFrame = true;
+    mIsIDRFrame = false;
+    mStarted = true;
+
+    return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
+    if (!mStarted) {
+        return OMX_ErrorNone;
+    }
+
+    PVAVCCleanUpEncoder(mHandle);
+    releaseOutputBuffers();
+
+    delete mInputFrameData;
+    mInputFrameData = NULL;
+
+    delete mSliceGroup;
+    mSliceGroup = NULL;
+
+    delete mEncParams;
+    mEncParams = NULL;
+
+    delete mHandle;
+    mHandle = NULL;
+
+    mStarted = false;
+
+    return OMX_ErrorNone;
+}
+
+void SoftAVCEncoder::releaseOutputBuffers() {
+    for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
+        MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
+        buffer->setObserver(NULL);
+        buffer->release();
+    }
+    mOutputBuffers.clear();
+}
+
+void SoftAVCEncoder::initPorts() {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+
+    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
+
+    // 31584 is PV's magic number.  Not sure why.
+    const size_t kOutputBufferSize =
+            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
+
+    def.nPortIndex = 0;
+    def.eDir = OMX_DirInput;
+    def.nBufferCountMin = kNumBuffers;
+    def.nBufferCountActual = def.nBufferCountMin;
+    def.nBufferSize = kInputBufferSize;
+    def.bEnabled = OMX_TRUE;
+    def.bPopulated = OMX_FALSE;
+    def.eDomain = OMX_PortDomainVideo;
+    def.bBuffersContiguous = OMX_FALSE;
+    def.nBufferAlignment = 1;
+
+    def.format.video.cMIMEType = const_cast<char *>("video/raw");
+    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
+    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
+    def.format.video.nBitrate = mVideoBitRate;
+    def.format.video.nFrameWidth = mVideoWidth;
+    def.format.video.nFrameHeight = mVideoHeight;
+    def.format.video.nStride = mVideoWidth;
+    def.format.video.nSliceHeight = mVideoHeight;
+
+    addPort(def);
+
+    def.nPortIndex = 1;
+    def.eDir = OMX_DirOutput;
+    def.nBufferCountMin = kNumBuffers;
+    def.nBufferCountActual = def.nBufferCountMin;
+    def.nBufferSize = kOutputBufferSize;
+    def.bEnabled = OMX_TRUE;
+    def.bPopulated = OMX_FALSE;
+    def.eDomain = OMX_PortDomainVideo;
+    def.bBuffersContiguous = OMX_FALSE;
+    def.nBufferAlignment = 2;
+
+    def.format.video.cMIMEType = const_cast<char *>("video/avc");
+    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
+    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+    def.format.video.xFramerate = (0 << 16);  // Q16 format
+    def.format.video.nBitrate = mVideoBitRate;
+    def.format.video.nFrameWidth = mVideoWidth;
+    def.format.video.nFrameHeight = mVideoHeight;
+    def.format.video.nStride = mVideoWidth;
+    def.format.video.nSliceHeight = mVideoHeight;
+
+    addPort(def);
+}
+
+OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
+        OMX_INDEXTYPE index, OMX_PTR params) {
+    switch (index) {
+        case OMX_IndexParamVideoErrorCorrection:
+        {
+            return OMX_ErrorNotImplemented;
+        }
+
+        case OMX_IndexParamVideoBitrate:
+        {
+            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
+                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
+
+            if (bitRate->nPortIndex != 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            bitRate->eControlRate = OMX_Video_ControlRateVariable;
+            bitRate->nTargetBitrate = mVideoBitRate;
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamVideoPortFormat:
+        {
+            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+            if (formatParams->nPortIndex > 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (formatParams->nIndex > 1) {
+                return OMX_ErrorNoMore;
+            }
+
+            if (formatParams->nPortIndex == 0) {
+                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+                if (formatParams->nIndex == 0) {
+                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+                } else {
+                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+                }
+            } else {
+                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
+                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamVideoAvc:
+        {
+            OMX_VIDEO_PARAM_AVCTYPE *avcParams =
+                (OMX_VIDEO_PARAM_AVCTYPE *)params;
+
+            if (avcParams->nPortIndex != 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
+            OMX_U32 omxLevel = AVC_LEVEL2;
+            if (OMX_ErrorNone !=
+                ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
+                return OMX_ErrorUndefined;
+            }
+
+            avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
+            avcParams->nRefFrames = 1;
+            avcParams->nBFrames = 0;
+            avcParams->bUseHadamard = OMX_TRUE;
+            avcParams->nAllowedPictureTypes =
+                    (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
+            avcParams->nRefIdx10ActiveMinus1 = 0;
+            avcParams->nRefIdx11ActiveMinus1 = 0;
+            avcParams->bWeightedPPrediction = OMX_FALSE;
+            avcParams->bEntropyCodingCABAC = OMX_FALSE;
+            avcParams->bconstIpred = OMX_FALSE;
+            avcParams->bDirect8x8Inference = OMX_FALSE;
+            avcParams->bDirectSpatialTemporal = OMX_FALSE;
+            avcParams->nCabacInitIdc = 0;
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamVideoProfileLevelQuerySupported:
+        {
+            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
+                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
+
+            if (profileLevel->nPortIndex != 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            const size_t size =
+                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
+
+            if (profileLevel->nProfileIndex >= size) {
+                return OMX_ErrorNoMore;
+            }
+
+            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
+            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
+
+            return OMX_ErrorNone;
+        }
+
+        default:
+            return SimpleSoftOMXComponent::internalGetParameter(index, params);
+    }
+}
+
+OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
+        OMX_INDEXTYPE index, const OMX_PTR params) {
+    switch (index) {
+        case OMX_IndexParamVideoErrorCorrection:
+        {
+            return OMX_ErrorNotImplemented;
+        }
+
+        case OMX_IndexParamVideoBitrate:
+        {
+            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
+                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
+
+            if (bitRate->nPortIndex != 1 ||
+                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
+                return OMX_ErrorUndefined;
+            }
+
+            mVideoBitRate = bitRate->nTargetBitrate;
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamPortDefinition:
+        {
+            OMX_PARAM_PORTDEFINITIONTYPE *def =
+                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
+            if (def->nPortIndex > 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (def->nPortIndex == 0) {
+                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
+                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
+                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar)) {
+                    return OMX_ErrorUndefined;
+                }
+            } else {
+                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
+                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
+                    return OMX_ErrorUndefined;
+                }
+            }
+
+            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
+            if (OMX_ErrorNone != err) {
+                return err;
+            }
+
+            if (def->nPortIndex == 0) {
+                mVideoWidth = def->format.video.nFrameWidth;
+                mVideoHeight = def->format.video.nFrameHeight;
+                mVideoFrameRate = def->format.video.xFramerate >> 16;
+                mVideoColorFormat = def->format.video.eColorFormat;
+            } else {
+                mVideoBitRate = def->format.video.nBitrate;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamStandardComponentRole:
+        {
+            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+                (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+            if (strncmp((const char *)roleParams->cRole,
+                        "video_encoder.avc",
+                        OMX_MAX_STRINGNAME_SIZE - 1)) {
+                return OMX_ErrorUndefined;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamVideoPortFormat:
+        {
+            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+            if (formatParams->nPortIndex > 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (formatParams->nIndex > 1) {
+                return OMX_ErrorNoMore;
+            }
+
+            if (formatParams->nPortIndex == 0) {
+                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
+                    ((formatParams->nIndex == 0 &&
+                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
+                    (formatParams->nIndex == 1 &&
+                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar))) {
+                    return OMX_ErrorUndefined;
+                }
+                mVideoColorFormat = formatParams->eColorFormat;
+            } else {
+                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
+                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
+                    return OMX_ErrorUndefined;
+                }
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamVideoAvc:
+        {
+            OMX_VIDEO_PARAM_AVCTYPE *avcType =
+                (OMX_VIDEO_PARAM_AVCTYPE *)params;
+
+            if (avcType->nPortIndex != 1) {
+                return OMX_ErrorUndefined;
+            }
+
+            // PV's AVC encoder only supports baseline profile
+            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
+                avcType->nRefFrames != 1 ||
+                avcType->nBFrames != 0 ||
+                avcType->bUseHadamard != OMX_TRUE ||
+                (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
+                avcType->nRefIdx10ActiveMinus1 != 0 ||
+                avcType->nRefIdx11ActiveMinus1 != 0 ||
+                avcType->bWeightedPPrediction != OMX_FALSE ||
+                avcType->bEntropyCodingCABAC != OMX_FALSE ||
+                avcType->bconstIpred != OMX_FALSE ||
+                avcType->bDirect8x8Inference != OMX_FALSE ||
+                avcType->bDirectSpatialTemporal != OMX_FALSE ||
+                avcType->nCabacInitIdc != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
+                return OMX_ErrorUndefined;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        default:
+            return SimpleSoftOMXComponent::internalSetParameter(index, params);
+    }
+}
+
+void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
+    if (mSignalledError || mSawInputEOS) {
+        return;
+    }
+
+    if (!mStarted) {
+        if (OMX_ErrorNone != initEncoder()) {
+            return;
+        }
+    }
+
+    List<BufferInfo *> &inQueue = getPortQueue(0);
+    List<BufferInfo *> &outQueue = getPortQueue(1);
+
+    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
+        BufferInfo *inInfo = *inQueue.begin();
+        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+        BufferInfo *outInfo = *outQueue.begin();
+        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+        outHeader->nTimeStamp = 0;
+        outHeader->nFlags = 0;
+        outHeader->nOffset = 0;
+        outHeader->nFilledLen = 0;
+        outHeader->nOffset = 0;
+
+        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
+        uint32_t dataLength = outHeader->nAllocLen;
+
+        if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
+            // 4 bytes are reserved for holding the start code 0x00000001
+            // of the sequence parameter set at the beginning.
+            outPtr += 4;
+            dataLength -= 4;
+        }
+
+        int32_t type;
+        AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
+
+        // Combine SPS and PPS and place them in the very first output buffer
+        // SPS and PPS are separated by start code 0x00000001
+        // Assume that we have exactly one SPS and exactly one PPS.
+        while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
+            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
+            if (encoderStatus == AVCENC_WRONG_STATE) {
+                mSpsPpsHeaderReceived = true;
+                CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
+                outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
+                outQueue.erase(outQueue.begin());
+                outInfo->mOwnedByUs = false;
+                notifyFillBufferDone(outHeader);
+                return;
+            } else {
+                switch (type) {
+                    case AVC_NALTYPE_SPS:
+                        ++mNumInputFrames;
+                        memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
+                        outHeader->nFilledLen = 4 + dataLength;
+                        outPtr += (dataLength + 4);  // 4 bytes for next start code
+                        dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
+                        break;
+                    default:
+                        CHECK_EQ(AVC_NALTYPE_PPS, type);
+                        ++mNumInputFrames;
+                        memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
+                                "\x00\x00\x00\x01", 4);
+                        outHeader->nFilledLen += (dataLength + 4);
+                        outPtr += (dataLength + 4);
+                        break;
+                }
+            }
+        }
+
+        // Get next input video frame
+        if (mReadyForNextFrame) {
+            // Save the input buffer info so that it can be
+            // passed to an output buffer
+            InputBufferInfo info;
+            info.mTimeUs = inHeader->nTimeStamp;
+            info.mFlags = inHeader->nFlags;
+            mInputBufferInfoVec.push(info);
+            mPrevTimestampUs = inHeader->nTimeStamp;
+
+            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                mSawInputEOS = true;
+            }
+
+            if (inHeader->nFilledLen > 0) {
+                AVCFrameIO videoInput;
+                memset(&videoInput, 0, sizeof(videoInput));
+                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
+                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
+                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
+                const void *inData = inHeader->pBuffer + inHeader->nOffset;
+                uint8_t *inputData = (uint8_t *) inData;
+
+                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+                    ConvertYUV420SemiPlanarToYUV420Planar(
+                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+                    inputData = mInputFrameData;
+                }
+                CHECK(inputData != NULL);
+                videoInput.YCbCr[0] = inputData;
+                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
+                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
+                    ((videoInput.height * videoInput.pitch) >> 2);
+                videoInput.disp_order = mNumInputFrames;
+
+                encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
+                if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
+                    mReadyForNextFrame = false;
+                    ++mNumInputFrames;
+                    if (encoderStatus == AVCENC_NEW_IDR) {
+                        mIsIDRFrame = 1;
+                    }
+                } else {
+                    if (encoderStatus < AVCENC_SUCCESS) {
+                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
+                        mSignalledError = true;
+                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+                        return;
+                    } else {
+                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
+                        inQueue.erase(inQueue.begin());
+                        inInfo->mOwnedByUs = false;
+                        notifyEmptyBufferDone(inHeader);
+                        return;
+                    }
+                }
+            }
+        }
+
+        // Encode an input video frame
+        CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
+        dataLength = outHeader->nAllocLen;  // Reset the output buffer length
+        if (inHeader->nFilledLen > 0) {
+            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
+            if (encoderStatus == AVCENC_SUCCESS) {
+                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
+            } else if (encoderStatus == AVCENC_PICTURE_READY) {
+                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
+                if (mIsIDRFrame) {
+                    outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
+                    mIsIDRFrame = false;
+                }
+                mReadyForNextFrame = true;
+                AVCFrameIO recon;
+                if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
+                    PVAVCEncReleaseRecon(mHandle, &recon);
+                }
+            } else {
+                dataLength = 0;
+                mReadyForNextFrame = true;
+            }
+
+            if (encoderStatus < AVCENC_SUCCESS) {
+                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
+                mSignalledError = true;
+                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+                return;
+            }
+        } else {
+            dataLength = 0;
+        }
+
+        inQueue.erase(inQueue.begin());
+        inInfo->mOwnedByUs = false;
+        notifyEmptyBufferDone(inHeader);
+
+        outQueue.erase(outQueue.begin());
+        CHECK(!mInputBufferInfoVec.empty());
+        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
+        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
+        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
+        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
+        outHeader->nFilledLen = dataLength;
+        outInfo->mOwnedByUs = false;
+        notifyFillBufferDone(outHeader);
+    }
+}
+
+int32_t SoftAVCEncoder::allocOutputBuffers(
+        unsigned int sizeInMbs, unsigned int numBuffers) {
+    CHECK(mOutputBuffers.isEmpty());
+    size_t frameSize = (sizeInMbs << 7) * 3;
+    for (unsigned int i = 0; i <  numBuffers; ++i) {
+        MediaBuffer *buffer = new MediaBuffer(frameSize);
+        buffer->setObserver(this);
+        mOutputBuffers.push(buffer);
+    }
+
+    return 1;
+}
+
+void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
+    CHECK(index >= 0);
+}
+
+int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
+    CHECK(index >= 0);
+    CHECK(index < (int32_t) mOutputBuffers.size());
+    *yuv = (uint8_t *) mOutputBuffers[index]->data();
+
+    return 1;
+}
+
+void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
+    ALOGV("signalBufferReturned: %p", buffer);
+}
+
+}  // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+        const char *name, const OMX_CALLBACKTYPE *callbacks,
+        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+    return new android::SoftAVCEncoder(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
new file mode 100644
index 0000000..a2587c6
--- /dev/null
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_AVC_ENCODER_H_
+#define SOFT_AVC_ENCODER_H_
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Vector.h>
+
+#include "avcenc_api.h"
+#include "SimpleSoftOMXComponent.h"
+
+namespace android {
+
+struct MediaBuffer;
+
+struct SoftAVCEncoder : public MediaBufferObserver,
+                        public SimpleSoftOMXComponent {
+    SoftAVCEncoder(
+            const char *name,
+            const OMX_CALLBACKTYPE *callbacks,
+            OMX_PTR appData,
+            OMX_COMPONENTTYPE **component);
+
+    // Override SimpleSoftOMXComponent methods
+    virtual OMX_ERRORTYPE internalGetParameter(
+            OMX_INDEXTYPE index, OMX_PTR params);
+
+    virtual OMX_ERRORTYPE internalSetParameter(
+            OMX_INDEXTYPE index, const OMX_PTR params);
+
+    virtual void onQueueFilled(OMX_U32 portIndex);
+
+
+    // Implement MediaBufferObserver
+    virtual void signalBufferReturned(MediaBuffer *buffer);
+
+
+    // Callbacks required by PV's encoder
+    int32_t allocOutputBuffers(unsigned int sizeInMbs, unsigned int numBuffers);
+    void    unbindOutputBuffer(int32_t index);
+    int32_t bindOutputBuffer(int32_t index, uint8_t **yuv);
+
+protected:
+    virtual ~SoftAVCEncoder();
+
+private:
+    enum {
+        kNumBuffers = 2,
+    };
+
+    // OMX input buffer's timestamp and flags
+    typedef struct {
+        int64_t mTimeUs;
+        int32_t mFlags;
+    } InputBufferInfo;
+
+    int32_t  mVideoWidth;
+    int32_t  mVideoHeight;
+    int32_t  mVideoFrameRate;
+    int32_t  mVideoBitRate;
+    int32_t  mVideoColorFormat;
+    int32_t  mIDRFrameRefreshIntervalInSec;
+    AVCProfile mAVCEncProfile;
+    AVCLevel   mAVCEncLevel;
+
+    int64_t  mNumInputFrames;
+    int64_t  mPrevTimestampUs;
+    bool     mStarted;
+    bool     mSpsPpsHeaderReceived;
+    bool     mReadyForNextFrame;
+    bool     mSawInputEOS;
+    bool     mSignalledError;
+    bool     mIsIDRFrame;
+
+    tagAVCHandle          *mHandle;
+    tagAVCEncParam        *mEncParams;
+    uint8_t               *mInputFrameData;
+    uint32_t              *mSliceGroup;
+    Vector<MediaBuffer *> mOutputBuffers;
+    Vector<InputBufferInfo> mInputBufferInfoVec;
+
+    void initPorts();
+    OMX_ERRORTYPE initEncParams();
+    OMX_ERRORTYPE initEncoder();
+    OMX_ERRORTYPE releaseEncoder();
+    void releaseOutputBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(SoftAVCEncoder);
+};
+
+}  // namespace android
+
+#endif  // SOFT_AVC_ENCODER_H_
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index d0c7346..9b7bb5a 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -41,6 +41,7 @@
     { "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" },
     { "OMX.google.amrwb.encoder", "amrwbenc", "audio_encoder.amrwb" },
     { "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" },
+    { "OMX.google.h264.encoder", "h264enc", "video_encoder.avc" },
     { "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
     { "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
     { "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },