Move Codec2-related code from hardware/google/av
Test: None
Bug: 112362730
Change-Id: Ie2f8ff431d65c40333f267ab9877d47089adeea4
diff --git a/media/codec2/components/Android.bp b/media/codec2/components/Android.bp
new file mode 100644
index 0000000..e8176cf
--- /dev/null
+++ b/media/codec2/components/Android.bp
@@ -0,0 +1,3 @@
+subdirs = [
+ "*",
+]
diff --git a/media/codec2/components/aac/Android.bp b/media/codec2/components/aac/Android.bp
new file mode 100644
index 0000000..b70f30a
--- /dev/null
+++ b/media/codec2/components/aac/Android.bp
@@ -0,0 +1,30 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2aacdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: [
+ "C2SoftAacDec.cpp",
+ "DrcPresModeWrap.cpp",
+ ],
+
+ static_libs: [
+ "libFraunhoferAAC",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2aacenc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftAacEnc.cpp"],
+
+ static_libs: [
+ "libFraunhoferAAC",
+ ],
+}
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
new file mode 100644
index 0000000..c7c8442
--- /dev/null
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -0,0 +1,941 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAacDec"
+#include <log/log.h>
+
+#include <inttypes.h>
+#include <math.h>
+#include <numeric>
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAacDec.h"
+
+#define FILEREAD_MAX_LAYERS 2
+
+#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT 1.0 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3 /* MPEG-D DRC effect type; 3 => Limited playback range */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
+// names of properties that can be used to override the default DRC settings
+#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
+#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
+#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
+#define PROP_DRC_OVERRIDE_EFFECT "ro.aac_drc_effect_type"
+
+namespace android {
+
+class C2SoftAacDec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_AAC))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
+ .withFields({C2F(mSampleRate, value).oneOf({
+ 7350, 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000
+ })})
+ .withSetter(Setter<decltype(*mSampleRate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(8000, 960000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+
+ addParameter(
+ DefineParam(mAacFormat, C2_NAME_STREAM_AAC_FORMAT_SETTING)
+ .withDefault(new C2StreamAacFormatInfo::input(0u, C2AacStreamFormatRaw))
+ .withFields({C2F(mAacFormat, value).oneOf({
+ C2AacStreamFormatRaw, C2AacStreamFormatAdts
+ })})
+ .withSetter(Setter<decltype(*mAacFormat)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_AAC_LC, C2Config::LEVEL_UNUSED))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_AAC_LC,
+ C2Config::PROFILE_AAC_HE,
+ C2Config::PROFILE_AAC_HE_PS,
+ C2Config::PROFILE_AAC_LD,
+ C2Config::PROFILE_AAC_ELD,
+ C2Config::PROFILE_AAC_ER_SCALABLE,
+ C2Config::PROFILE_AAC_XHE}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_UNUSED
+ })
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE)
+ .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY))
+ .withFields({
+ C2F(mDrcCompressMode, value).oneOf({
+ C2Config::DRC_COMPRESSION_ODM_DEFAULT,
+ C2Config::DRC_COMPRESSION_NONE,
+ C2Config::DRC_COMPRESSION_LIGHT,
+ C2Config::DRC_COMPRESSION_HEAVY})
+ })
+ .withSetter(Setter<decltype(*mDrcCompressMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL)
+ .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL))
+ .withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)})
+ .withSetter(Setter<decltype(*mDrcTargetRefLevel)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL)
+ .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL))
+ .withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)})
+ .withSetter(Setter<decltype(*mDrcEncTargetLevel)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR)
+ .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST))
+ .withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)})
+ .withSetter(Setter<decltype(*mDrcBoostFactor)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR)
+ .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT))
+ .withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)})
+ .withSetter(Setter<decltype(*mDrcAttenuationFactor)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE)
+ .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE))
+ .withFields({
+ C2F(mDrcEffectType, value).oneOf({
+ C2Config::DRC_EFFECT_ODM_DEFAULT,
+ C2Config::DRC_EFFECT_OFF,
+ C2Config::DRC_EFFECT_NONE,
+ C2Config::DRC_EFFECT_LATE_NIGHT,
+ C2Config::DRC_EFFECT_NOISY_ENVIRONMENT,
+ C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE,
+ C2Config::DRC_EFFECT_LOW_PLAYBACK_LEVEL,
+ C2Config::DRC_EFFECT_DIALOG_ENHANCEMENT,
+ C2Config::DRC_EFFECT_GENERAL_COMPRESSION})
+ })
+ .withSetter(Setter<decltype(*mDrcEffectType)>::StrictValueWithNoDeps)
+ .build());
+ }
+
+ bool isAdts() const { return mAacFormat->value == C2AacStreamFormatAdts; }
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me) {
+ (void)mayBlock;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+ int32_t getDrcCompressMode() const { return mDrcCompressMode->value == C2Config::DRC_COMPRESSION_HEAVY ? 1 : 0; }
+ int32_t getDrcTargetRefLevel() const { return (mDrcTargetRefLevel->value <= 0 ? -mDrcTargetRefLevel->value * 4. + 0.5 : -1); }
+ int32_t getDrcEncTargetLevel() const { return (mDrcEncTargetLevel->value <= 0 ? -mDrcEncTargetLevel->value * 4. + 0.5 : -1); }
+ int32_t getDrcBoostFactor() const { return mDrcBoostFactor->value * 127. + 0.5; }
+ int32_t getDrcAttenuationFactor() const { return mDrcAttenuationFactor->value * 127. + 0.5; }
+ int32_t getDrcEffectType() const { return mDrcEffectType->value; }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+ std::shared_ptr<C2StreamAacFormatInfo::input> mAacFormat;
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamDrcCompressionModeTuning::input> mDrcCompressMode;
+ std::shared_ptr<C2StreamDrcTargetReferenceLevelTuning::input> mDrcTargetRefLevel;
+ std::shared_ptr<C2StreamDrcEncodedTargetLevelTuning::input> mDrcEncTargetLevel;
+ std::shared_ptr<C2StreamDrcBoostFactorTuning::input> mDrcBoostFactor;
+ std::shared_ptr<C2StreamDrcAttenuationFactorTuning::input> mDrcAttenuationFactor;
+ std::shared_ptr<C2StreamDrcEffectTypeTuning::input> mDrcEffectType;
+ // TODO Add : C2StreamAacSbrModeTuning
+};
+
+constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+
+C2SoftAacDec::C2SoftAacDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mAACDecoder(nullptr),
+ mStreamInfo(nullptr),
+ mSignalledError(false),
+ mOutputDelayRingBuffer(nullptr) {
+}
+
+C2SoftAacDec::~C2SoftAacDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftAacDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAacDec::onStop() {
+ drainDecoder();
+ // reset the "configured" state
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mOutputDelayRingBufferFilled = 0;
+ mBuffersInfo.clear();
+
+ // To make the codec behave the same before and after a reset, we need to invalidate the
+ // streaminfo struct. This does that:
+ mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
+
+ mSignalledError = false;
+
+ return C2_OK;
+}
+
+void C2SoftAacDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftAacDec::onRelease() {
+ if (mAACDecoder) {
+ aacDecoder_Close(mAACDecoder);
+ mAACDecoder = nullptr;
+ }
+ if (mOutputDelayRingBuffer) {
+ delete[] mOutputDelayRingBuffer;
+ mOutputDelayRingBuffer = nullptr;
+ }
+}
+
+status_t C2SoftAacDec::initDecoder() {
+ ALOGV("initDecoder()");
+ status_t status = UNKNOWN_ERROR;
+ mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
+ if (mAACDecoder != nullptr) {
+ mStreamInfo = aacDecoder_GetStreamInfo(mAACDecoder);
+ if (mStreamInfo != nullptr) {
+ status = OK;
+ }
+ }
+
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
+ mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mOutputDelayRingBufferFilled = 0;
+
+ if (mAACDecoder == nullptr) {
+ ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
+ }
+
+ //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
+
+ //init DRC wrapper
+ mDrcWrap.setDecoderHandle(mAACDecoder);
+ mDrcWrap.submitStreamData(mStreamInfo);
+
+ // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
+ // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
+
+ // DRC_PRES_MODE_WRAP_DESIRED_TARGET
+ int32_t targetRefLevel = mIntf->getDrcTargetRefLevel();
+ ALOGV("AAC decoder using desired DRC target reference level of %d", targetRefLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, (unsigned)targetRefLevel);
+
+ // DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
+
+ int32_t attenuationFactor = mIntf->getDrcAttenuationFactor();
+ ALOGV("AAC decoder using desired DRC attenuation factor of %d", attenuationFactor);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, (unsigned)attenuationFactor);
+
+ // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
+ int32_t boostFactor = mIntf->getDrcBoostFactor();
+ ALOGV("AAC decoder using desired DRC boost factor of %d", boostFactor);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, (unsigned)boostFactor);
+
+ // DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+ int32_t compressMode = mIntf->getDrcCompressMode();
+ ALOGV("AAC decoder using desried DRC heavy compression switch of %d", compressMode);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, (unsigned)compressMode);
+
+ // DRC_PRES_MODE_WRAP_ENCODER_TARGET
+ int32_t encTargetLevel = mIntf->getDrcEncTargetLevel();
+ ALOGV("AAC decoder using encoder-side DRC reference level of %d", encTargetLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, (unsigned)encTargetLevel);
+
+ // AAC_UNIDRC_SET_EFFECT
+ int32_t effectType = mIntf->getDrcEffectType();
+ ALOGV("AAC decoder using MPEG-D DRC effect type %d", effectType);
+ aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_SET_EFFECT, effectType);
+
+ // By default, the decoder creates a 5.1 channel downmix signal.
+ // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+
+ return status;
+}
+
+bool C2SoftAacDec::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
+ if (numSamples == 0) {
+ return true;
+ }
+ if (outputDelayRingBufferSpaceLeft() < numSamples) {
+ ALOGE("RING BUFFER WOULD OVERFLOW");
+ return false;
+ }
+ if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
+ || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
+ }
+
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow C2SoftAacDec::outputDelayRingBufferPutSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
+ mOutputDelayRingBufferWritePos++;
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ mOutputDelayRingBufferFilled += numSamples;
+ return true;
+}
+
+int32_t C2SoftAacDec::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
+
+ if (numSamples > mOutputDelayRingBufferFilled) {
+ ALOGE("RING BUFFER WOULD UNDERRUN");
+ return -1;
+ }
+
+ if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
+ || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ if (samples != nullptr) {
+ for (int32_t i = 0; i < numSamples; i++) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
+ }
+ } else {
+ mOutputDelayRingBufferReadPos += numSamples;
+ }
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow C2SoftAacDec::outputDelayRingBufferGetSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ if (samples != nullptr) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
+ }
+ mOutputDelayRingBufferReadPos++;
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ mOutputDelayRingBufferFilled -= numSamples;
+ return numSamples;
+}
+
+int32_t C2SoftAacDec::outputDelayRingBufferSamplesAvailable() {
+ return mOutputDelayRingBufferFilled;
+}
+
+int32_t C2SoftAacDec::outputDelayRingBufferSpaceLeft() {
+ return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
+}
+
+void C2SoftAacDec::drainRingBuffer(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool,
+ bool eos) {
+ while (!mBuffersInfo.empty() && outputDelayRingBufferSamplesAvailable()
+ >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ Info &outInfo = mBuffersInfo.front();
+ ALOGV("outInfo.frameIndex = %" PRIu64, outInfo.frameIndex);
+ int samplesize __unused = mStreamInfo->numChannels * sizeof(int16_t);
+
+ int available = outputDelayRingBufferSamplesAvailable();
+ int numFrames = outInfo.decodedSizes.size();
+ int numSamples = numFrames * (mStreamInfo->frameSize * mStreamInfo->numChannels);
+ if (available < numSamples) {
+ if (eos) {
+ numSamples = available;
+ } else {
+ break;
+ }
+ }
+ ALOGV("%d samples available (%d), or %d frames",
+ numSamples, available, numFrames);
+ ALOGV("getting %d from ringbuffer", numSamples);
+
+ std::shared_ptr<C2LinearBlock> block;
+ std::function<void(const std::unique_ptr<C2Work>&)> fillWork =
+ [&block, numSamples, pool, this]()
+ -> std::function<void(const std::unique_ptr<C2Work>&)> {
+ auto fillEmptyWork = [](
+ const std::unique_ptr<C2Work> &work, c2_status_t err) {
+ work->result = err;
+ C2FrameData &output = work->worklets.front()->output;
+ output.flags = work->input.flags;
+ output.buffers.clear();
+ output.ordinal = work->input.ordinal;
+
+ work->workletsProcessed = 1u;
+ };
+
+ using namespace std::placeholders;
+ if (numSamples == 0) {
+ return std::bind(fillEmptyWork, _1, C2_OK);
+ }
+
+ // TODO: error handling, proper usage, etc.
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(
+ numSamples * sizeof(int16_t), usage, &block);
+ if (err != C2_OK) {
+ ALOGD("failed to fetch a linear block (%d)", err);
+ return std::bind(fillEmptyWork, _1, C2_NO_MEMORY);
+ }
+ C2WriteView wView = block->map().get();
+ // TODO
+ INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(wView.data());
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer, numSamples);
+ if (ns != numSamples) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ return std::bind(fillEmptyWork, _1, C2_CORRUPTED);
+ }
+ return [buffer = createLinearBuffer(block)](
+ const std::unique_ptr<C2Work> &work) {
+ work->result = C2_OK;
+ C2FrameData &output = work->worklets.front()->output;
+ output.flags = work->input.flags;
+ output.buffers.clear();
+ output.buffers.push_back(buffer);
+ output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ }();
+
+ if (work && work->input.ordinal.frameIndex == c2_cntr64_t(outInfo.frameIndex)) {
+ fillWork(work);
+ } else {
+ finish(outInfo.frameIndex, fillWork);
+ }
+
+ ALOGV("out timestamp %" PRIu64 " / %u", outInfo.timestamp, block ? block->capacity() : 0);
+ mBuffersInfo.pop_front();
+ }
+}
+
+void C2SoftAacDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError) {
+ return;
+ }
+
+ UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
+ UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
+ UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
+
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+ C2ReadView view = mDummyReadView;
+ size_t offset = 0u;
+ size_t size = 0u;
+ if (!work->input.buffers.empty()) {
+ view = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ size = view.capacity();
+ }
+
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+ bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
+ //TODO
+#if 0
+ if (mInputBufferCount == 0 && !codecConfig) {
+ ALOGW("first buffer should have FLAG_CODEC_CONFIG set");
+ codecConfig = true;
+ }
+#endif
+ if (codecConfig && size > 0u) {
+ // const_cast because of libAACdec method signature.
+ inBuffer[0] = const_cast<UCHAR *>(view.data() + offset);
+ inBufferLength[0] = size;
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_ConfigRaw(mAACDecoder,
+ inBuffer,
+ inBufferLength);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGE("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.buffers.clear();
+ return;
+ }
+
+ Info inInfo;
+ inInfo.frameIndex = work->input.ordinal.frameIndex.peeku();
+ inInfo.timestamp = work->input.ordinal.timestamp.peeku();
+ inInfo.bufferSize = size;
+ inInfo.decodedSizes.clear();
+ while (size > 0u) {
+ ALOGV("size = %zu", size);
+ if (mIntf->isAdts()) {
+ size_t adtsHeaderSize = 0;
+ // skip 30 bits, aac_frame_length follows.
+ // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+
+ const uint8_t *adtsHeader = view.data() + offset;
+
+ bool signalError = false;
+ if (size < 7) {
+ ALOGE("Audio data too short to contain even the ADTS header. "
+ "Got %zu bytes.", size);
+ hexdump(adtsHeader, size);
+ signalError = true;
+ } else {
+ bool protectionAbsent = (adtsHeader[1] & 1);
+
+ unsigned aac_frame_length =
+ ((adtsHeader[3] & 3) << 11)
+ | (adtsHeader[4] << 3)
+ | (adtsHeader[5] >> 5);
+
+ if (size < aac_frame_length) {
+ ALOGE("Not enough audio data for the complete frame. "
+ "Got %zu bytes, frame size according to the ADTS "
+ "header is %u bytes.",
+ size, aac_frame_length);
+ hexdump(adtsHeader, size);
+ signalError = true;
+ } else {
+ adtsHeaderSize = (protectionAbsent ? 7 : 9);
+ if (aac_frame_length < adtsHeaderSize) {
+ signalError = true;
+ } else {
+ // const_cast because of libAACdec method signature.
+ inBuffer[0] = const_cast<UCHAR *>(adtsHeader + adtsHeaderSize);
+ inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+
+ offset += adtsHeaderSize;
+ size -= adtsHeaderSize;
+ }
+ }
+ }
+
+ if (signalError) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } else {
+ // const_cast because of libAACdec method signature.
+ inBuffer[0] = const_cast<UCHAR *>(view.data() + offset);
+ inBufferLength[0] = size;
+ }
+
+ // Fill and decode
+ bytesValid[0] = inBufferLength[0];
+
+ INT prevSampleRate = mStreamInfo->sampleRate;
+ INT prevNumChannels = mStreamInfo->numChannels;
+
+ aacDecoder_Fill(mAACDecoder,
+ inBuffer,
+ inBufferLength,
+ bytesValid);
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
+ size -= inBufferUsedLength;
+ offset += inBufferUsedLength;
+
+ AAC_DECODER_ERROR decoderErr;
+ do {
+ if (outputDelayRingBufferSpaceLeft() <
+ (mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ ALOGV("skipping decode: not enough space left in ringbuffer");
+ // discard buffer
+ size = 0;
+ break;
+ }
+
+ int numConsumed = mStreamInfo->numTotalBytes;
+ decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ 0 /* flags */);
+
+ numConsumed = mStreamInfo->numTotalBytes - numConsumed;
+
+ if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+ break;
+ }
+ inInfo.decodedSizes.push_back(numConsumed);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ if (bytesValid[0] != 0) {
+ ALOGE("bytesValid[0] != 0 should never happen");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ size_t numOutBytes =
+ mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+
+ if (decoderErr == AAC_DEC_OK) {
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } else {
+ ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
+
+ memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
+
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ // Discard input buffer.
+ size = 0;
+
+ aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+
+ // After an error, replace bufferSize with the sum of the
+ // decodedSizes to resynchronize the in/out lists.
+ inInfo.decodedSizes.pop_back();
+ inInfo.bufferSize = std::accumulate(
+ inInfo.decodedSizes.begin(), inInfo.decodedSizes.end(), 0);
+
+ // fall through
+ }
+
+ /*
+ * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+ * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+ * rate system and the sampling rate in the final output is actually
+ * doubled compared with the core AAC decoder sampling rate.
+ *
+ * Explicit signalling is done by explicitly defining SBR audio object
+ * type in the bitstream. Implicit signalling is done by embedding
+ * SBR content in AAC extension payload specific to SBR, and hence
+ * requires an AAC decoder to perform pre-checks on actual audio frames.
+ *
+ * Thus, we could not say for sure whether a stream is
+ * AAC+/eAAC+ until the first data frame is decoded.
+ */
+ if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
+ // if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) {
+ ALOGD("Invalid AAC stream");
+ // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ // mSignalledError = true;
+ // }
+ } else if ((mStreamInfo->sampleRate != prevSampleRate) ||
+ (mStreamInfo->numChannels != prevNumChannels)) {
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ prevSampleRate, mStreamInfo->sampleRate,
+ prevNumChannels, mStreamInfo->numChannels);
+
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, mStreamInfo->sampleRate);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, mStreamInfo->numChannels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config(
+ { &sampleRateInfo, &channelCountInfo },
+ C2_MAY_BLOCK,
+ &failures);
+ if (err == OK) {
+ // TODO: this does not handle the case where the values are
+ // altered during config.
+ C2FrameData &output = work->worklets.front()->output;
+ output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
+ output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ ALOGV("size = %zu", size);
+ } while (decoderErr == AAC_DEC_OK);
+ }
+
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ mBuffersInfo.push_back(std::move(inInfo));
+ work->workletsProcessed = 0u;
+ if (!eos && mOutputDelayCompensated < outputDelay) {
+ // discard outputDelay at the beginning
+ int32_t toCompensate = outputDelay - mOutputDelayCompensated;
+ int32_t discard = outputDelayRingBufferSamplesAvailable();
+ if (discard > toCompensate) {
+ discard = toCompensate;
+ }
+ int32_t discarded = outputDelayRingBufferGetSamples(nullptr, discard);
+ mOutputDelayCompensated += discarded;
+ return;
+ }
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ } else {
+ drainRingBuffer(work, pool, false /* not EOS */);
+ }
+}
+
+c2_status_t C2SoftAacDec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ bool eos = (drainMode == DRAIN_COMPONENT_WITH_EOS);
+
+ drainDecoder();
+ drainRingBuffer(work, pool, eos);
+
+ if (eos) {
+ auto fillEmptyWork = [](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ while (mBuffersInfo.size() > 1u) {
+ finish(mBuffersInfo.front().frameIndex, fillEmptyWork);
+ mBuffersInfo.pop_front();
+ }
+ if (work && work->workletsProcessed == 0u) {
+ fillEmptyWork(work);
+ }
+ mBuffersInfo.clear();
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAacDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+c2_status_t C2SoftAacDec::onFlush_sm() {
+ drainDecoder();
+ mBuffersInfo.clear();
+
+ int avail;
+ while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) {
+ if (avail > mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ avail = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ }
+ int32_t ns = outputDelayRingBufferGetSamples(nullptr, avail);
+ if (ns != avail) {
+ ALOGW("not a complete frame of samples available");
+ break;
+ }
+ }
+ mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
+
+ return C2_OK;
+}
+
+void C2SoftAacDec::drainDecoder() {
+ // flush decoder until outputDelay is compensated
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+
+ mOutputDelayCompensated -= tmpOutBufferSamples;
+ }
+}
+
+class C2SoftAacDecFactory : public C2ComponentFactory {
+public:
+ C2SoftAacDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAacDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftAacDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAacDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAacDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAacDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAacDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
new file mode 100644
index 0000000..965c29e
--- /dev/null
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AAC_DEC_H_
+#define ANDROID_C2_SOFT_AAC_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+#include "aacdecoder_lib.h"
+#include "DrcPresModeWrap.h"
+
+namespace android {
+
+struct C2SoftAacDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftAacDec(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftAacDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ enum {
+ kNumDelayBlocksMax = 8,
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+
+ HANDLE_AACDECODER mAACDecoder;
+ CStreamInfo *mStreamInfo;
+ bool mIsFirst;
+ size_t mInputBufferCount;
+ size_t mOutputBufferCount;
+ bool mSignalledError;
+ struct Info {
+ uint64_t frameIndex;
+ size_t bufferSize;
+ uint64_t timestamp;
+ std::vector<int32_t> decodedSizes;
+ };
+ std::list<Info> mBuffersInfo;
+
+ CDrcPresModeWrapper mDrcWrap;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+ void drainDecoder();
+
+ void drainRingBuffer(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool,
+ bool eos);
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+
+// delay compensation
+ bool mEndOfInput;
+ bool mEndOfOutput;
+ int32_t mOutputDelayCompensated;
+ int32_t mOutputDelayRingBufferSize;
+ short *mOutputDelayRingBuffer;
+ int32_t mOutputDelayRingBufferWritePos;
+ int32_t mOutputDelayRingBufferReadPos;
+ int32_t mOutputDelayRingBufferFilled;
+ bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferSamplesAvailable();
+ int32_t outputDelayRingBufferSpaceLeft();
+
+ C2_DO_NOT_COPY(C2SoftAacDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AAC_DEC_H_
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
new file mode 100644
index 0000000..aeefbdb
--- /dev/null
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -0,0 +1,602 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAacEnc"
+#include <utils/Log.h>
+
+#include <inttypes.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+#include "C2SoftAacEnc.h"
+
+namespace android {
+
+class C2SoftAacEnc::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_AAC))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::input(0u, 44100))
+ .withFields({C2F(mSampleRate, value).oneOf({
+ 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000
+ })})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 6)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(8000, 960000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .calculatedAs(MaxBufSizeCalculator, mChannelCount)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(0u,
+ C2Config::PROFILE_AAC_LC, C2Config::LEVEL_UNUSED))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_AAC_LC,
+ C2Config::PROFILE_AAC_HE,
+ C2Config::PROFILE_AAC_HE_PS,
+ C2Config::PROFILE_AAC_LD,
+ C2Config::PROFILE_AAC_ELD}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_UNUSED
+ })
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+ }
+
+ uint32_t getSampleRate() const { return mSampleRate->value; }
+ uint32_t getChannelCount() const { return mChannelCount->value; }
+ uint32_t getBitrate() const { return mBitrate->value; }
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output> &me) {
+ (void)mayBlock;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ static C2R MaxBufSizeCalculator(
+ bool mayBlock,
+ C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamChannelCountInfo::input> &channelCount) {
+ (void)mayBlock;
+ me.set().value = 1024 * sizeof(short) * channelCount.v.value;
+ return C2R::Ok();
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+};
+
+constexpr char COMPONENT_NAME[] = "c2.android.aac.encoder";
+
+C2SoftAacEnc::C2SoftAacEnc(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mAACEncoder(nullptr),
+ mSBRMode(-1),
+ mSBRRatio(0),
+ mAACProfile(AOT_AAC_LC),
+ mNumBytesPerInputFrame(0u),
+ mOutBufferSize(0u),
+ mSentCodecSpecificData(false),
+ mInputSize(0),
+ mInputTimeUs(-1ll),
+ mSignalledError(false),
+ mOutIndex(0u) {
+}
+
+C2SoftAacEnc::~C2SoftAacEnc() {
+ onReset();
+}
+
+c2_status_t C2SoftAacEnc::onInit() {
+ status_t err = initEncoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+status_t C2SoftAacEnc::initEncoder() {
+ if (AACENC_OK != aacEncOpen(&mAACEncoder, 0, 0)) {
+ ALOGE("Failed to init AAC encoder");
+ return UNKNOWN_ERROR;
+ }
+ return setAudioParams();
+}
+
+c2_status_t C2SoftAacEnc::onStop() {
+ mSentCodecSpecificData = false;
+ mInputSize = 0u;
+ mInputTimeUs = -1ll;
+ mSignalledError = false;
+ return C2_OK;
+}
+
+void C2SoftAacEnc::onReset() {
+ (void)onStop();
+ aacEncClose(&mAACEncoder);
+}
+
+void C2SoftAacEnc::onRelease() {
+ // no-op
+}
+
+c2_status_t C2SoftAacEnc::onFlush_sm() {
+ mSentCodecSpecificData = false;
+ mInputSize = 0u;
+ return C2_OK;
+}
+
+static CHANNEL_MODE getChannelMode(uint32_t nChannels) {
+ CHANNEL_MODE chMode = MODE_INVALID;
+ switch (nChannels) {
+ case 1: chMode = MODE_1; break;
+ case 2: chMode = MODE_2; break;
+ case 3: chMode = MODE_1_2; break;
+ case 4: chMode = MODE_1_2_1; break;
+ case 5: chMode = MODE_1_2_2; break;
+ case 6: chMode = MODE_1_2_2_1; break;
+ default: chMode = MODE_INVALID;
+ }
+ return chMode;
+}
+
+//static AUDIO_OBJECT_TYPE getAOTFromProfile(OMX_U32 profile) {
+// if (profile == OMX_AUDIO_AACObjectLC) {
+// return AOT_AAC_LC;
+// } else if (profile == OMX_AUDIO_AACObjectHE) {
+// return AOT_SBR;
+// } else if (profile == OMX_AUDIO_AACObjectHE_PS) {
+// return AOT_PS;
+// } else if (profile == OMX_AUDIO_AACObjectLD) {
+// return AOT_ER_AAC_LD;
+// } else if (profile == OMX_AUDIO_AACObjectELD) {
+// return AOT_ER_AAC_ELD;
+// } else {
+// ALOGW("Unsupported AAC profile - defaulting to AAC-LC");
+// return AOT_AAC_LC;
+// }
+//}
+
+status_t C2SoftAacEnc::setAudioParams() {
+ // We call this whenever sample rate, number of channels, bitrate or SBR mode change
+ // in reponse to setParameter calls.
+
+ ALOGV("setAudioParams: %u Hz, %u channels, %u bps, %i sbr mode, %i sbr ratio",
+ mIntf->getSampleRate(), mIntf->getChannelCount(), mIntf->getBitrate(), mSBRMode, mSBRRatio);
+
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_AOT, mAACProfile)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SAMPLERATE, mIntf->getSampleRate())) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_BITRATE, mIntf->getBitrate())) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_CHANNELMODE,
+ getChannelMode(mIntf->getChannelCount()))) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_TRANSMUX, TT_MP4_RAW)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+
+ if (mSBRMode != -1 && mAACProfile == AOT_ER_AAC_ELD) {
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, mSBRMode)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* SBR ratio parameter configurations:
+ 0: Default configuration wherein SBR ratio is configured depending on audio object type by
+ the FDK.
+ 1: Downsampled SBR (default for ELD)
+ 2: Dualrate SBR (default for HE-AAC)
+ */
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_RATIO, mSBRRatio)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+void C2SoftAacEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError) {
+ return;
+ }
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+
+ uint32_t sampleRate = mIntf->getSampleRate();
+ uint32_t channelCount = mIntf->getChannelCount();
+
+ if (!mSentCodecSpecificData) {
+ // The very first thing we want to output is the codec specific
+ // data.
+
+ if (AACENC_OK != aacEncEncode(mAACEncoder, nullptr, nullptr, nullptr, nullptr)) {
+ ALOGE("Unable to initialize encoder for profile / sample-rate / bit-rate / channels");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ uint32_t bitrate = mIntf->getBitrate();
+ uint32_t actualBitRate = aacEncoder_GetParam(mAACEncoder, AACENC_BITRATE);
+ if (bitrate != actualBitRate) {
+ ALOGW("Requested bitrate %u unsupported, using %u", bitrate, actualBitRate);
+ }
+
+ AACENC_InfoStruct encInfo;
+ if (AACENC_OK != aacEncInfo(mAACEncoder, &encInfo)) {
+ ALOGE("Failed to get AAC encoder info");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(encInfo.confSize, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, encInfo.confBuf, encInfo.confSize);
+ ALOGV("put csd");
+#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+ hexdump(csd->m.value, csd->flexCount());
+#endif
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+
+ mOutBufferSize = encInfo.maxOutBufBytes;
+ mNumBytesPerInputFrame = encInfo.frameLength * channelCount * sizeof(int16_t);
+ mInputTimeUs = work->input.ordinal.timestamp;
+
+ mSentCodecSpecificData = true;
+ }
+
+ uint8_t temp[1];
+ C2ReadView view = mDummyReadView;
+ const uint8_t *data = temp;
+ size_t capacity = 0u;
+ if (!work->input.buffers.empty()) {
+ view = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ data = view.data();
+ capacity = view.capacity();
+ }
+
+ size_t numFrames = (capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
+ / mNumBytesPerInputFrame;
+ ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu mNumBytesPerInputFrame = %u",
+ capacity, mInputSize, numFrames, mNumBytesPerInputFrame);
+
+ std::shared_ptr<C2LinearBlock> block;
+ std::shared_ptr<C2Buffer> buffer;
+ std::unique_ptr<C2WriteView> wView;
+ uint8_t *outPtr = temp;
+ size_t outAvailable = 0u;
+ uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
+
+ AACENC_InArgs inargs;
+ AACENC_OutArgs outargs;
+ memset(&inargs, 0, sizeof(inargs));
+ memset(&outargs, 0, sizeof(outargs));
+ inargs.numInSamples = capacity / sizeof(int16_t);
+
+ void* inBuffer[] = { (unsigned char *)data };
+ INT inBufferIds[] = { IN_AUDIO_DATA };
+ INT inBufferSize[] = { (INT)capacity };
+ INT inBufferElSize[] = { sizeof(int16_t) };
+
+ AACENC_BufDesc inBufDesc;
+ inBufDesc.numBufs = sizeof(inBuffer) / sizeof(void*);
+ inBufDesc.bufs = (void**)&inBuffer;
+ inBufDesc.bufferIdentifiers = inBufferIds;
+ inBufDesc.bufSizes = inBufferSize;
+ inBufDesc.bufElSizes = inBufferElSize;
+
+ void* outBuffer[] = { outPtr };
+ INT outBufferIds[] = { OUT_BITSTREAM_DATA };
+ INT outBufferSize[] = { 0 };
+ INT outBufferElSize[] = { sizeof(UCHAR) };
+
+ AACENC_BufDesc outBufDesc;
+ outBufDesc.numBufs = sizeof(outBuffer) / sizeof(void*);
+ outBufDesc.bufs = (void**)&outBuffer;
+ outBufDesc.bufferIdentifiers = outBufferIds;
+ outBufDesc.bufSizes = outBufferSize;
+ outBufDesc.bufElSizes = outBufferElSize;
+
+ AACENC_ERROR encoderErr = AACENC_OK;
+
+ class FillWork {
+ public:
+ FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+ const std::shared_ptr<C2Buffer> &buffer)
+ : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {
+ }
+ ~FillWork() = default;
+
+ void operator()(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = mOrdinal;
+ work->workletsProcessed = 1u;
+ work->result = C2_OK;
+ if (mBuffer) {
+ work->worklets.front()->output.buffers.push_back(mBuffer);
+ }
+ ALOGV("timestamp = %lld, index = %lld, w/%s buffer",
+ mOrdinal.timestamp.peekll(),
+ mOrdinal.frameIndex.peekll(),
+ mBuffer ? "" : "o");
+ }
+
+ private:
+ const uint32_t mFlags;
+ const C2WorkOrdinalStruct mOrdinal;
+ const std::shared_ptr<C2Buffer> mBuffer;
+ };
+
+ C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+
+ while (encoderErr == AACENC_OK && inargs.numInSamples > 0) {
+ if (numFrames && !block) {
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock failed : err = %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+
+ wView.reset(new C2WriteView(block->map().get()));
+ outPtr = wView->data();
+ outAvailable = wView->size();
+ --numFrames;
+ }
+
+ memset(&outargs, 0, sizeof(outargs));
+
+ outBuffer[0] = outPtr;
+ outBufferSize[0] = outAvailable;
+
+ encoderErr = aacEncEncode(mAACEncoder,
+ &inBufDesc,
+ &outBufDesc,
+ &inargs,
+ &outargs);
+
+ if (encoderErr == AACENC_OK) {
+ if (buffer) {
+ outOrdinal.frameIndex = mOutIndex++;
+ outOrdinal.timestamp = mInputTimeUs;
+ cloneAndSend(
+ inputIndex,
+ work,
+ FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+ buffer.reset();
+ }
+
+ if (outargs.numOutBytes > 0) {
+ mInputSize = 0;
+ int consumed = ((capacity / sizeof(int16_t)) - inargs.numInSamples);
+ mInputTimeUs = work->input.ordinal.timestamp
+ + (consumed * 1000000ll / channelCount / sampleRate);
+ buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
+#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+ hexdump(outPtr, std::min(outargs.numOutBytes, 256));
+#endif
+ outPtr = temp;
+ outAvailable = 0;
+ block.reset();
+ } else {
+ mInputSize += outargs.numInSamples * sizeof(int16_t);
+ }
+
+ if (outargs.numInSamples > 0) {
+ inBuffer[0] = (int16_t *)inBuffer[0] + outargs.numInSamples;
+ inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t);
+ inargs.numInSamples -= outargs.numInSamples;
+ }
+ }
+ ALOGV("encoderErr = %d mInputSize = %zu inargs.numInSamples = %d, mInputTimeUs = %lld",
+ encoderErr, mInputSize, inargs.numInSamples, mInputTimeUs.peekll());
+ }
+
+ if (eos && inBufferSize[0] > 0) {
+ if (numFrames && !block) {
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock failed : err = %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+
+ wView.reset(new C2WriteView(block->map().get()));
+ outPtr = wView->data();
+ outAvailable = wView->size();
+ --numFrames;
+ }
+
+ memset(&outargs, 0, sizeof(outargs));
+
+ outBuffer[0] = outPtr;
+ outBufferSize[0] = outAvailable;
+
+ // Flush
+ inargs.numInSamples = -1;
+
+ (void)aacEncEncode(mAACEncoder,
+ &inBufDesc,
+ &outBufDesc,
+ &inargs,
+ &outargs);
+ }
+
+ outOrdinal.frameIndex = mOutIndex++;
+ outOrdinal.timestamp = mInputTimeUs;
+ FillWork((C2FrameData::flags_t)(eos ? C2FrameData::FLAG_END_OF_STREAM : 0),
+ outOrdinal, buffer)(work);
+}
+
+c2_status_t C2SoftAacEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ switch (drainMode) {
+ case DRAIN_COMPONENT_NO_EOS:
+ [[fallthrough]];
+ case NO_DRAIN:
+ // no-op
+ return C2_OK;
+ case DRAIN_CHAIN:
+ return C2_OMITTED;
+ case DRAIN_COMPONENT_WITH_EOS:
+ break;
+ default:
+ return C2_BAD_VALUE;
+ }
+
+ (void)pool;
+ mSentCodecSpecificData = false;
+ mInputSize = 0u;
+
+ // TODO: we don't have any pending work at this time to drain.
+ return C2_OK;
+}
+
+class C2SoftAacEncFactory : public C2ComponentFactory {
+public:
+ C2SoftAacEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAacEnc(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftAacEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAacEnc::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAacEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAacEncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAacEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
new file mode 100644
index 0000000..82fb438
--- /dev/null
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AAC_ENC_H_
+#define ANDROID_C2_SOFT_AAC_ENC_H_
+
+#include <atomic>
+
+#include <SimpleC2Component.h>
+
+#include "aacenc_lib.h"
+
+namespace android {
+
+class C2SoftAacEnc : public SimpleC2Component {
+public:
+ class IntfImpl;
+
+ C2SoftAacEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftAacEnc();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ std::shared_ptr<IntfImpl> mIntf;
+
+ HANDLE_AACENCODER mAACEncoder;
+
+ int32_t mSBRMode;
+ int32_t mSBRRatio;
+ AUDIO_OBJECT_TYPE mAACProfile;
+ UINT mNumBytesPerInputFrame;
+ UINT mOutBufferSize;
+
+ bool mSentCodecSpecificData;
+ size_t mInputSize;
+ c2_cntr64_t mInputTimeUs;
+
+ bool mSignalledError;
+ std::atomic_uint64_t mOutIndex;
+
+ status_t initEncoder();
+
+ status_t setAudioParams();
+
+ C2_DO_NOT_COPY(C2SoftAacEnc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AAC_ENC_H_
diff --git a/media/codec2/components/aac/DrcPresModeWrap.cpp b/media/codec2/components/aac/DrcPresModeWrap.cpp
new file mode 100644
index 0000000..5b9aebc
--- /dev/null
+++ b/media/codec2/components/aac/DrcPresModeWrap.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "DrcPresModeWrap.h"
+
+#include <assert.h>
+
+#define LOG_TAG "C2SoftAacDrcWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+//#define DRC_PRES_MODE_WRAP_DEBUG
+
+#define GPM_ENCODER_TARGET_LEVEL 64
+#define MAX_TARGET_LEVEL 40
+
+CDrcPresModeWrapper::CDrcPresModeWrapper()
+{
+ mDataUpdate = true;
+
+ /* Data from streamInfo. */
+ /* Initialized to the same values as in the aac decoder */
+ mStreamPRL = -1;
+ mStreamDRCPresMode = -1;
+ mStreamNrAACChan = 0;
+ mStreamNrOutChan = 0;
+
+ /* Desired values (set by user). */
+ /* Initialized to the same values as in the aac decoder */
+ mDesTarget = -1;
+ mDesAttFactor = 0;
+ mDesBoostFactor = 0;
+ mDesHeavy = 0;
+
+ mEncoderTarget = -1;
+
+ /* Values from last time. */
+ /* Initialized to the same values as the desired values */
+ mLastTarget = -1;
+ mLastAttFactor = 0;
+ mLastBoostFactor = 0;
+ mLastHeavy = 0;
+}
+
+CDrcPresModeWrapper::~CDrcPresModeWrapper()
+{
+}
+
+void
+CDrcPresModeWrapper::setDecoderHandle(const HANDLE_AACDECODER handle)
+{
+ mHandleDecoder = handle;
+}
+
+void
+CDrcPresModeWrapper::submitStreamData(CStreamInfo* pStreamInfo)
+{
+ assert(pStreamInfo);
+
+ if (mStreamPRL != pStreamInfo->drcProgRefLev) {
+ mStreamPRL = pStreamInfo->drcProgRefLev;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcProgRefLev is %d\n", mStreamPRL);
+#endif
+ }
+
+ if (mStreamDRCPresMode != pStreamInfo->drcPresMode) {
+ mStreamDRCPresMode = pStreamInfo->drcPresMode;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcPresMode is %d\n", mStreamDRCPresMode);
+#endif
+ }
+
+ if (mStreamNrAACChan != pStreamInfo->aacNumChannels) {
+ mStreamNrAACChan = pStreamInfo->aacNumChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: aacNumChannels is %d\n", mStreamNrAACChan);
+#endif
+ }
+
+ if (mStreamNrOutChan != pStreamInfo->numChannels) {
+ mStreamNrOutChan = pStreamInfo->numChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: numChannels is %d\n", mStreamNrOutChan);
+#endif
+ }
+
+
+
+ if (mStreamNrOutChan<mStreamNrAACChan) {
+ mIsDownmix = true;
+ } else {
+ mIsDownmix = false;
+ }
+
+ if (mIsDownmix && (mStreamNrOutChan == 1)) {
+ mIsMonoDownmix = true;
+ } else {
+ mIsMonoDownmix = false;
+ }
+
+ if (mIsDownmix && mStreamNrOutChan == 2){
+ mIsStereoDownmix = true;
+ } else {
+ mIsStereoDownmix = false;
+ }
+
+}
+
+void
+CDrcPresModeWrapper::setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value)
+{
+ switch (param) {
+ case DRC_PRES_MODE_WRAP_DESIRED_TARGET:
+ mDesTarget = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR:
+ mDesAttFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR:
+ mDesBoostFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_HEAVY:
+ mDesHeavy = value;
+ break;
+ case DRC_PRES_MODE_WRAP_ENCODER_TARGET:
+ mEncoderTarget = value;
+ break;
+ default:
+ break;
+ }
+ mDataUpdate = true;
+}
+
+void
+CDrcPresModeWrapper::update()
+{
+ // Get Data from Decoder
+ int progRefLevel = mStreamPRL;
+ int drcPresMode = mStreamDRCPresMode;
+
+ // by default, do as desired
+ int newTarget = mDesTarget;
+ int newAttFactor = mDesAttFactor;
+ int newBoostFactor = mDesBoostFactor;
+ int newHeavy = mDesHeavy;
+
+ if (mDataUpdate) {
+ // sanity check
+ if (mDesTarget < MAX_TARGET_LEVEL){
+ mDesTarget = MAX_TARGET_LEVEL; // limit target level to -10 dB or below
+ newTarget = MAX_TARGET_LEVEL;
+ }
+
+ if (mEncoderTarget != -1) {
+ if (mDesTarget<124) { // if target level > -31 dB
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ // no stereo or mono downmixing, calculated scaling of light DRC
+ /* use as little compression as possible */
+ newAttFactor = 0;
+ newBoostFactor = 0;
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ if (mEncoderTarget < mDesTarget) { // if mEncoderTarget > target level
+ // mEncoderTarget > target level > PRL
+ int calcFactor;
+ float calcFactor_norm;
+ // 0.0f < calcFactor_norm < 1.0f
+ calcFactor_norm = (float)(mDesTarget - progRefLevel) /
+ (float)(mEncoderTarget - progRefLevel);
+ calcFactor = (int)(calcFactor_norm*127.0f); // 0 <= calcFactor < 127
+ // calcFactor is the lower limit
+ newAttFactor = (calcFactor>newAttFactor) ? calcFactor : newAttFactor;
+ // new AttFactor will be always = calcFactor, as it is set to 0 before.
+ newBoostFactor = newAttFactor;
+ } else {
+ /* target level > mEncoderTarget > PRL */
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ newBoostFactor = 127;
+ }
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget<92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else {
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB: light DRC only needed if we perform downmixing
+ if (mIsDownmix) { // we do downmixing
+ newAttFactor = 127;
+ }
+ }
+ }
+ else { // handle other used encoder target levels
+
+ // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+ if (mStreamNrAACChan > 6) {
+ drcPresMode = 0;
+ }
+
+ switch (drcPresMode) {
+ case 0:
+ default: // presentation mode not indicated
+ {
+
+ if (mDesTarget<124) { // if target level > -31 dB
+ // no stereo or mono downmixing
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127; // at least, use light compression
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget < 92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else{
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ // Presentation mode 1 and 2 according to ETSI TS 101 154:
+ // Digital Video Broadcasting (DVB); Specification for the use of Video and Audio Coding
+ // in Broadcasting Applications based on the MPEG-2 Transport Stream,
+ // section C.5.4., "Decoding", and Table C.33
+ // ISO DRC -> newHeavy = 0 (Use light compression, MPEG-style)
+ // Compression_value -> newHeavy = 1 (Use heavy compression, DVB-style)
+ // scaling restricted -> newAttFactor = 127
+
+ case 1: // presentation mode 1, Light:-31/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ newHeavy = 1;
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ case 2: // presentation mode 2, Light:-23/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ if (mIsMonoDownmix) { // if mono downmix
+ newHeavy = 1;
+ } else {
+ newHeavy = 0;
+ newAttFactor = 127;
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ newHeavy = 0;
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ } // switch()
+ } // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL)
+
+ // sanity again
+ if (newHeavy == 1) {
+ newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
+ newAttFactor = 127;
+ }
+
+ // update the decoder
+ if (newTarget != mLastTarget) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_REFERENCE_LEVEL, newTarget);
+ mLastTarget = newTarget;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newTarget != mDesTarget)
+ ALOGV("DRC presentation mode wrapper: forced target level to %d (from %d)\n", newTarget, mDesTarget);
+ else
+ ALOGV("DRC presentation mode wrapper: set target level to %d\n", newTarget);
+#endif
+ }
+
+ if (newAttFactor != mLastAttFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_ATTENUATION_FACTOR, newAttFactor);
+ mLastAttFactor = newAttFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newAttFactor != mDesAttFactor)
+ ALOGV("DRC presentation mode wrapper: forced attenuation factor to %d (from %d)\n", newAttFactor, mDesAttFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set attenuation factor to %d\n", newAttFactor);
+#endif
+ }
+
+ if (newBoostFactor != mLastBoostFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_BOOST_FACTOR, newBoostFactor);
+ mLastBoostFactor = newBoostFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newBoostFactor != mDesBoostFactor)
+ ALOGV("DRC presentation mode wrapper: forced boost factor to %d (from %d)\n",
+ newBoostFactor, mDesBoostFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set boost factor to %d\n", newBoostFactor);
+#endif
+ }
+
+ if (newHeavy != mLastHeavy) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_HEAVY_COMPRESSION, newHeavy);
+ mLastHeavy = newHeavy;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newHeavy != mDesHeavy)
+ ALOGV("DRC presentation mode wrapper: forced heavy compression to %d (from %d)\n",
+ newHeavy, mDesHeavy);
+ else
+ ALOGV("DRC presentation mode wrapper: set heavy compression to %d\n", newHeavy);
+#endif
+ }
+
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC config: tgt_lev: %3d, cut: %3d, boost: %3d, heavy: %d\n", newTarget,
+ newAttFactor, newBoostFactor, newHeavy);
+#endif
+ mDataUpdate = false;
+
+ } // if (mDataUpdate)
+}
diff --git a/media/codec2/components/aac/DrcPresModeWrap.h b/media/codec2/components/aac/DrcPresModeWrap.h
new file mode 100644
index 0000000..f0b6cf2
--- /dev/null
+++ b/media/codec2/components/aac/DrcPresModeWrap.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#include "aacdecoder_lib.h"
+
+typedef enum
+{
+ DRC_PRES_MODE_WRAP_DESIRED_TARGET = 0x0000,
+ DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR = 0x0001,
+ DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR = 0x0002,
+ DRC_PRES_MODE_WRAP_DESIRED_HEAVY = 0x0003,
+ DRC_PRES_MODE_WRAP_ENCODER_TARGET = 0x0004
+} DRC_PRES_MODE_WRAP_PARAM;
+
+
+class CDrcPresModeWrapper {
+public:
+ CDrcPresModeWrapper();
+ ~CDrcPresModeWrapper();
+ void setDecoderHandle(const HANDLE_AACDECODER handle);
+ void setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value);
+ void submitStreamData(CStreamInfo*);
+ void update();
+
+protected:
+ HANDLE_AACDECODER mHandleDecoder;
+ int mDesTarget;
+ int mDesAttFactor;
+ int mDesBoostFactor;
+ int mDesHeavy;
+
+ int mEncoderTarget;
+
+ int mLastTarget;
+ int mLastAttFactor;
+ int mLastBoostFactor;
+ int mLastHeavy;
+
+ SCHAR mStreamPRL;
+ SCHAR mStreamDRCPresMode;
+ INT mStreamNrAACChan;
+ INT mStreamNrOutChan;
+
+ bool mIsDownmix;
+ bool mIsMonoDownmix;
+ bool mIsStereoDownmix;
+
+ bool mDataUpdate;
+};
diff --git a/media/codec2/components/aac/MODULE_LICENSE_APACHE2 b/media/codec2/components/aac/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/aac/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/aac/NOTICE b/media/codec2/components/aac/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/aac/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/aac/patent_disclaimer.txt b/media/codec2/components/aac/patent_disclaimer.txt
new file mode 100644
index 0000000..b4bf11d
--- /dev/null
+++ b/media/codec2/components/aac/patent_disclaimer.txt
@@ -0,0 +1,9 @@
+
+THIS IS NOT A GRANT OF PATENT RIGHTS.
+
+Google makes no representation or warranty that the codecs for which
+source code is made available hereunder are unencumbered by
+third-party patents. Those intending to use this source code in
+hardware or software products are advised that implementations of
+these codecs, including in open source software or shareware, may
+require patent licenses from the relevant patent holders.
diff --git a/media/codec2/components/amr_nb_wb/Android.bp b/media/codec2/components/amr_nb_wb/Android.bp
new file mode 100644
index 0000000..764b3db
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/Android.bp
@@ -0,0 +1,77 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2amrnbdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftAmrDec.cpp"],
+
+ cflags: [
+ "-DAMRNB",
+ ],
+
+ static_libs: [
+ "libstagefright_amrnbdec",
+ "libstagefright_amrwbdec",
+ ],
+
+ shared_libs: [
+ "libstagefright_amrnb_common",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2amrwbdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftAmrDec.cpp"],
+
+ static_libs: [
+ "libstagefright_amrnbdec",
+ "libstagefright_amrwbdec",
+ ],
+
+ shared_libs: [
+ "libstagefright_amrnb_common",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2amrnbenc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftAmrNbEnc.cpp"],
+
+ static_libs: [
+ "libstagefright_amrnbenc",
+ ],
+
+ shared_libs: [
+ "libstagefright_amrnb_common",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2amrwbenc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftAmrWbEnc.cpp"],
+
+ static_libs: [
+ "libstagefright_amrwbenc",
+ ],
+
+ shared_libs: [
+ "libstagefright_enc_common",
+ ],
+}
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
new file mode 100644
index 0000000..c591e21
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -0,0 +1,438 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#ifdef AMRNB
+#define LOG_TAG "C2SoftAmrNbDec"
+#else
+#define LOG_TAG "C2SoftAmrWbDec"
+#endif
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAmrDec.h"
+#include "gsmamr_dec.h"
+#include "pvamrwbdecoder.h"
+
+namespace android {
+
+#ifdef AMRNB
+ constexpr char COMPONENT_NAME[] = "c2.android.amrnb.decoder";
+#else
+ constexpr char COMPONENT_NAME[] = "c2.android.amrwb.decoder";
+#endif
+
+class C2SoftAmrDec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+#ifdef AMRNB
+ MEDIA_MIMETYPE_AUDIO_AMR_NB
+#else
+ MEDIA_MIMETYPE_AUDIO_AMR_WB
+#endif
+ )).build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+#ifdef AMRNB
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 8000))
+ .withFields({C2F(mSampleRate, value).equalTo(8000)})
+#else
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 16000))
+ .withFields({C2F(mSampleRate, value).equalTo(16000)})
+#endif
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).equalTo(1)})
+ .withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+#ifdef AMRNB
+ .withDefault(new C2BitrateTuning::input(0u, 4750))
+ .withFields({C2F(mBitrate, value).inRange(4750, 12200)})
+#else
+ .withDefault(new C2BitrateTuning::input(0u, 6600))
+ .withFields({C2F(mBitrate, value).inRange(6600, 23850)})
+#endif
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftAmrDec::C2SoftAmrDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mAmrHandle(nullptr),
+ mDecoderBuf(nullptr),
+ mDecoderCookie(nullptr) {
+#ifdef AMRNB
+ mIsWide = false;
+#else
+ mIsWide = true;
+#endif
+}
+
+C2SoftAmrDec::~C2SoftAmrDec() {
+ (void)onRelease();
+}
+
+c2_status_t C2SoftAmrDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_NO_MEMORY;
+}
+
+c2_status_t C2SoftAmrDec::onStop() {
+ if (!mIsWide) {
+ Speech_Decode_Frame_reset(mAmrHandle);
+ } else {
+ pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */);
+ }
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ return C2_OK;
+}
+
+void C2SoftAmrDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftAmrDec::onRelease() {
+ if (!mIsWide) {
+ if (mAmrHandle) {
+ GSMDecodeFrameExit(&mAmrHandle);
+ }
+ mAmrHandle = nullptr;
+ } else {
+ if (mDecoderBuf) {
+ free(mDecoderBuf);
+ }
+ mDecoderBuf = nullptr;
+ mAmrHandle = nullptr;
+ mDecoderCookie = nullptr;
+ }
+}
+
+c2_status_t C2SoftAmrDec::onFlush_sm() {
+ return onStop();
+}
+
+status_t C2SoftAmrDec::initDecoder() {
+ if (!mIsWide) {
+ if (GSMInitDecode(&mAmrHandle, (int8_t *)"AMRNBDecoder"))
+ return UNKNOWN_ERROR;
+ } else {
+ uint32_t memReq = pvDecoder_AmrWbMemRequirements();
+ mDecoderBuf = malloc(memReq);
+ if (mDecoderBuf) {
+ pvDecoder_AmrWb_Init(&mAmrHandle, mDecoderBuf, &mDecoderCookie);
+ }
+ else {
+ return NO_MEMORY;
+ }
+ }
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ return OK;
+}
+
+static size_t getFrameSize(bool isWide, unsigned FM) {
+ static const size_t kFrameSizeNB[16] = {
+ 12, 13, 15, 17, 19, 20, 26, 31,
+ 5, 6, 5, 5, // SID
+ 0, 0, 0, // future use
+ 0 // no data
+ };
+ static const size_t kFrameSizeWB[16] = {
+ 17, 23, 32, 36, 40, 46, 50, 58, 60,
+ 5, // SID
+ 0, 0, 0, 0, // future use
+ 0, // speech lost
+ 0 // no data
+ };
+
+ if (FM > 15 || (isWide && FM > 9 && FM < 14) || (!isWide && FM > 11 && FM < 15)) {
+ ALOGE("illegal AMR frame mode %d", FM);
+ return 0;
+ }
+ // add 1 for header byte
+ return (isWide ? kFrameSizeWB[FM] : kFrameSizeNB[FM]) + 1;
+}
+
+static status_t calculateNumFrames(const uint8 *input, size_t inSize,
+ std::vector<size_t> *frameSizeList, bool isWide) {
+ for (size_t k = 0; k < inSize;) {
+ int16_t FM = ((input[0] >> 3) & 0x0f);
+ size_t frameSize = getFrameSize(isWide, FM);
+ if (frameSize == 0) {
+ return UNKNOWN_ERROR;
+ }
+ if ((inSize - k) >= frameSize) {
+ input += frameSize;
+ k += frameSize;
+ }
+ else break;
+ frameSizeList->push_back(frameSize);
+ }
+ return OK;
+}
+
+void C2SoftAmrDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ C2ReadView rView = mDummyReadView;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+ if (inSize == 0) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+
+ std::vector<size_t> frameSizeList;
+ if (OK != calculateNumFrames(rView.data() + inOffset, inSize, &frameSizeList,
+ mIsWide)) {
+ work->result = C2_CORRUPTED;
+ mSignalledError = true;
+ return;
+ }
+ if (frameSizeList.empty()) {
+ ALOGE("input size smaller than expected");
+ work->result = C2_CORRUPTED;
+ mSignalledError = true;
+ return;
+ }
+
+ int16_t outSamples = mIsWide ? kNumSamplesPerFrameWB : kNumSamplesPerFrameNB;
+ size_t calOutSize = outSamples * frameSizeList.size() * sizeof(int16_t);
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(calOutSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ int16_t *output = reinterpret_cast<int16_t *>(wView.data());
+ auto it = frameSizeList.begin();
+ const uint8_t *inPtr = rView.data() + inOffset;
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ if (it == frameSizeList.end()) {
+ ALOGD("unexpected trailing bytes, ignoring them");
+ break;
+ }
+ uint8_t *input = const_cast<uint8_t *>(inPtr + inPos);
+ int16_t FM = ((*input >> 3) & 0x0f);
+ if (!mIsWide) {
+ int32_t numBytesRead = AMRDecode(mAmrHandle,
+ (Frame_Type_3GPP) FM,
+ input + 1, output, MIME_IETF);
+ if (static_cast<size_t>(numBytesRead + 1) != *it) {
+ ALOGE("panic, parsed size does not match decoded size");
+ work->result = C2_CORRUPTED;
+ mSignalledError = true;
+ return;
+ }
+ } else {
+ if (FM >= 9) {
+ // Produce silence instead of comfort noise and for
+ // speech lost/no data.
+ memset(output, 0, outSamples * sizeof(int16_t));
+ } else {
+ int16_t FT;
+ RX_State_wb rx_state;
+ int16_t numRecSamples;
+
+ mime_unsorting(const_cast<uint8_t *>(&input[1]),
+ mInputSampleBuffer, &FT, &FM, 1, &rx_state);
+ pvDecoder_AmrWb(FM, mInputSampleBuffer, output, &numRecSamples,
+ mDecoderBuf, FT, mDecoderCookie);
+ if (numRecSamples != outSamples) {
+ ALOGE("Sample output per frame incorrect");
+ work->result = C2_CORRUPTED;
+ mSignalledError = true;
+ return;
+ }
+ /* Delete the 2 LSBs (14-bit output) */
+ for (int i = 0; i < numRecSamples; ++i) {
+ output[i] &= 0xfffC;
+ }
+ }
+ }
+ inPos += *it;
+ output += outSamples;
+ ++it;
+ }
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+c2_status_t C2SoftAmrDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void)pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+ return C2_OK;
+}
+
+class C2SoftAMRDecFactory : public C2ComponentFactory {
+public:
+ C2SoftAMRDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAmrDec(COMPONENT_NAME, id,
+ std::make_shared<C2SoftAmrDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAmrDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAmrDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAMRDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAMRDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h
new file mode 100644
index 0000000..6384450
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AMR_DEC_H_
+#define ANDROID_C2_SOFT_AMR_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+namespace android {
+
+struct C2SoftAmrDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftAmrDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftAmrDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+private:
+ enum {
+ kNumSamplesPerFrameNB = 160,
+ kNumSamplesPerFrameWB = 320,
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ void *mAmrHandle;
+ void *mDecoderBuf;
+ int16_t *mDecoderCookie;
+
+ int16_t mInputSampleBuffer[477];
+
+ bool mIsWide;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+
+ status_t initDecoder();
+
+ C2_DO_NOT_COPY(C2SoftAmrDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AMR_DEC_H_
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
new file mode 100644
index 0000000..ca21480
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -0,0 +1,355 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAmrNbEnc"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAmrNbEnc.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.amrnb.encoder";
+
+class C2SoftAmrNbEnc::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_AMR_NB))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
+ .withFields({C2F(mChannelCount, value).equalTo(1)})
+ .withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::input(0u, 8000))
+ .withFields({C2F(mSampleRate, value).equalTo(8000)})
+ .withSetter(
+ (Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 4750))
+ .withFields({C2F(mBitrate, value).inRange(4750, 12200)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+ }
+
+ uint32_t getSampleRate() const { return mSampleRate->value; }
+ uint32_t getChannelCount() const { return mChannelCount->value; }
+ uint32_t getBitrate() const { return mBitrate->value; }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftAmrNbEnc::C2SoftAmrNbEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mEncState(nullptr),
+ mSidState(nullptr) {
+}
+
+C2SoftAmrNbEnc::~C2SoftAmrNbEnc() {
+ onRelease();
+}
+
+c2_status_t C2SoftAmrNbEnc::onInit() {
+ bool dtx_enable = false;
+
+ if (AMREncodeInit(&mEncState, &mSidState, dtx_enable) != 0)
+ return C2_CORRUPTED;
+ // TODO: get mode directly from config
+ switch(mIntf->getBitrate()) {
+ case 4750: mMode = MR475;
+ break;
+ case 5150: mMode = MR515;
+ break;
+ case 5900: mMode = MR59;
+ break;
+ case 6700: mMode = MR67;
+ break;
+ case 7400: mMode = MR74;
+ break;
+ case 7950: mMode = MR795;
+ break;
+ case 10200: mMode = MR102;
+ break;
+ case 12200: mMode = MR122;
+ break;
+ default: mMode = MR795;
+ }
+ mIsFirst = true;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mAnchorTimeStamp = 0;
+ mProcessedSamples = 0;
+ mFilledLen = 0;
+
+ return C2_OK;
+}
+
+void C2SoftAmrNbEnc::onRelease() {
+ if (mEncState) {
+ AMREncodeExit(&mEncState, &mSidState);
+ mEncState = mSidState = nullptr;
+ }
+}
+
+c2_status_t C2SoftAmrNbEnc::onStop() {
+ if (AMREncodeReset(mEncState, mSidState) != 0)
+ return C2_CORRUPTED;
+ mIsFirst = true;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mAnchorTimeStamp = 0;
+ mProcessedSamples = 0;
+ mFilledLen = 0;
+
+ return C2_OK;
+}
+
+void C2SoftAmrNbEnc::onReset() {
+ (void) onStop();
+}
+
+c2_status_t C2SoftAmrNbEnc::onFlush_sm() {
+ return onStop();
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftAmrNbEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+ size_t outCapacity = kNumBytesPerInputFrame;
+ outCapacity += mFilledLen + inSize;
+ std::shared_ptr<C2LinearBlock> outputBlock;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &outputBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = outputBlock->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ uint64_t outTimeStamp =
+ mProcessedSamples * 1000000ll / mIntf->getSampleRate();
+ size_t inPos = 0;
+ size_t outPos = 0;
+ while (inPos < inSize) {
+ const uint8_t *inPtr = rView.data() + inOffset;
+ int validSamples = mFilledLen / sizeof(int16_t);
+ if ((inPos + (kNumBytesPerInputFrame - mFilledLen)) <= inSize) {
+ memcpy(mInputFrame + validSamples, inPtr + inPos,
+ (kNumBytesPerInputFrame - mFilledLen));
+ inPos += (kNumBytesPerInputFrame - mFilledLen);
+ } else {
+ memcpy(mInputFrame + validSamples, inPtr + inPos, (inSize - inPos));
+ mFilledLen += (inSize - inPos);
+ inPos += (inSize - inPos);
+ if (eos) {
+ validSamples = mFilledLen / sizeof(int16_t);
+ memset(mInputFrame + validSamples, 0, (kNumBytesPerInputFrame - mFilledLen));
+ } else break;
+
+ }
+ Frame_Type_3GPP frameType;
+ int numEncBytes = AMREncode(mEncState, mSidState, mMode, mInputFrame,
+ wView.data() + outPos, &frameType,
+ AMR_TX_WMF);
+ if (numEncBytes < 0 || numEncBytes > ((int)outCapacity - (int)outPos)) {
+ ALOGE("encodeFrame call failed, state [%d %zu %zu]", numEncBytes, outPos, outCapacity);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ // Convert header byte from WMF to IETF format.
+ if (numEncBytes > 0)
+ wView.data()[outPos] = ((wView.data()[outPos] << 3) | 4) & 0x7c;
+ outPos += numEncBytes;
+ mProcessedSamples += kNumSamplesPerFrame;
+ mFilledLen = 0;
+ }
+ ALOGV("causal sample size %d", mFilledLen);
+ if (mIsFirst) {
+ mIsFirst = false;
+ mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+ }
+ fillEmptyWork(work);
+ if (outPos != 0) {
+ work->worklets.front()->output.buffers.push_back(
+ createLinearBuffer(std::move(outputBlock), 0, outPos));
+ work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+
+ }
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ if (mFilledLen) ALOGV("Discarding trailing %d bytes", mFilledLen);
+ }
+}
+
+c2_status_t C2SoftAmrNbEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ onFlush_sm();
+ return C2_OK;
+}
+
+class C2SoftAmrNbEncFactory : public C2ComponentFactory {
+public:
+ C2SoftAmrNbEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAmrNbEnc(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftAmrNbEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAmrNbEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftAmrNbEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAmrNbEncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAmrNbEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
new file mode 100644
index 0000000..6ab14db
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AMR_NB_ENC_H_
+#define ANDROID_C2_SOFT_AMR_NB_ENC_H_
+
+#include <SimpleC2Component.h>
+
+#include "gsmamr_enc.h"
+
+namespace android {
+
+class C2SoftAmrNbEnc : public SimpleC2Component {
+ public:
+ class IntfImpl;
+ C2SoftAmrNbEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftAmrNbEnc();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ std::shared_ptr<IntfImpl> mIntf;
+ static const int32_t kNumSamplesPerFrame = L_FRAME;
+ static const int32_t kNumBytesPerInputFrame = kNumSamplesPerFrame * sizeof(int16_t);
+
+ void *mEncState;
+ void *mSidState;
+ Mode mMode;
+ bool mIsFirst;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+ uint64_t mAnchorTimeStamp;
+ uint64_t mProcessedSamples;
+ int32_t mFilledLen;
+ int16_t mInputFrame[kNumSamplesPerFrame];
+
+ C2_DO_NOT_COPY(C2SoftAmrNbEnc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AMR_NB_ENC_H_
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
new file mode 100644
index 0000000..be3892f
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -0,0 +1,430 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAmrWbEnc"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAmrWbEnc.h"
+#include "cmnMemory.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.amrwb.encoder";
+
+class C2SoftAmrWbEnc::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_AMR_WB))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
+ .withFields({C2F(mChannelCount, value).equalTo(1)})
+ .withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::input(0u, 16000))
+ .withFields({C2F(mSampleRate, value).equalTo(16000)})
+ .withSetter(
+ (Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 6600))
+ .withFields({C2F(mBitrate, value).inRange(6600, 23850)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+ }
+
+ uint32_t getSampleRate() const { return mSampleRate->value; }
+ uint32_t getChannelCount() const { return mChannelCount->value; }
+ uint32_t getBitrate() const { return mBitrate->value; }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftAmrWbEnc::C2SoftAmrWbEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mEncoderHandle(nullptr),
+ mApiHandle(nullptr),
+ mMemOperator(nullptr) {
+}
+
+C2SoftAmrWbEnc::~C2SoftAmrWbEnc() {
+ onRelease();
+}
+
+c2_status_t C2SoftAmrWbEnc::onInit() {
+ // TODO: get mode directly from config
+ switch(mIntf->getBitrate()) {
+ case 6600: mMode = VOAMRWB_MD66;
+ break;
+ case 8850: mMode = VOAMRWB_MD885;
+ break;
+ case 12650: mMode = VOAMRWB_MD1265;
+ break;
+ case 14250: mMode = VOAMRWB_MD1425;
+ break;
+ case 15850: mMode = VOAMRWB_MD1585;
+ break;
+ case 18250: mMode = VOAMRWB_MD1825;
+ break;
+ case 19850: mMode = VOAMRWB_MD1985;
+ break;
+ case 23050: mMode = VOAMRWB_MD2305;
+ break;
+ case 23850: mMode = VOAMRWB_MD2385;
+ break;
+ default: mMode = VOAMRWB_MD2305;
+ }
+ status_t err = initEncoder();
+ mIsFirst = true;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mAnchorTimeStamp = 0;
+ mProcessedSamples = 0;
+ mFilledLen = 0;
+
+ return err == OK ? C2_OK : C2_NO_MEMORY;
+}
+
+void C2SoftAmrWbEnc::onRelease() {
+ if (mEncoderHandle) {
+ CHECK_EQ((VO_U32)VO_ERR_NONE, mApiHandle->Uninit(mEncoderHandle));
+ mEncoderHandle = nullptr;
+ }
+ if (mApiHandle) {
+ delete mApiHandle;
+ mApiHandle = nullptr;
+ }
+ if (mMemOperator) {
+ delete mMemOperator;
+ mMemOperator = nullptr;
+ }
+}
+
+c2_status_t C2SoftAmrWbEnc::onStop() {
+ for (int i = 0; i < kNumSamplesPerFrame; i++) {
+ mInputFrame[i] = 0x0008; /* EHF_MASK */
+ }
+ uint8_t outBuffer[kNumBytesPerInputFrame];
+ (void) encodeInput(outBuffer, kNumBytesPerInputFrame);
+ mIsFirst = true;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mAnchorTimeStamp = 0;
+ mProcessedSamples = 0;
+ mFilledLen = 0;
+
+ return C2_OK;
+}
+
+void C2SoftAmrWbEnc::onReset() {
+ (void) onStop();
+}
+
+c2_status_t C2SoftAmrWbEnc::onFlush_sm() {
+ return onStop();
+}
+
+status_t C2SoftAmrWbEnc::initEncoder() {
+ mApiHandle = new VO_AUDIO_CODECAPI;
+ if (!mApiHandle) return NO_MEMORY;
+
+ if (VO_ERR_NONE != voGetAMRWBEncAPI(mApiHandle)) {
+ ALOGE("Failed to get api handle");
+ return UNKNOWN_ERROR;
+ }
+
+ mMemOperator = new VO_MEM_OPERATOR;
+ if (!mMemOperator) return NO_MEMORY;
+
+ mMemOperator->Alloc = cmnMemAlloc;
+ mMemOperator->Copy = cmnMemCopy;
+ mMemOperator->Free = cmnMemFree;
+ mMemOperator->Set = cmnMemSet;
+ mMemOperator->Check = cmnMemCheck;
+
+ VO_CODEC_INIT_USERDATA userData;
+ memset(&userData, 0, sizeof(userData));
+ userData.memflag = VO_IMF_USERMEMOPERATOR;
+ userData.memData = (VO_PTR) mMemOperator;
+
+ if (VO_ERR_NONE != mApiHandle->Init(
+ &mEncoderHandle, VO_AUDIO_CodingAMRWB, &userData)) {
+ ALOGE("Failed to init AMRWB encoder");
+ return UNKNOWN_ERROR;
+ }
+
+ VOAMRWBFRAMETYPE type = VOAMRWB_RFC3267;
+ if (VO_ERR_NONE != mApiHandle->SetParam(
+ mEncoderHandle, VO_PID_AMRWB_FRAMETYPE, &type)) {
+ ALOGE("Failed to set AMRWB encoder frame type to %d", type);
+ return UNKNOWN_ERROR;
+ }
+
+ if (VO_ERR_NONE !=
+ mApiHandle->SetParam(
+ mEncoderHandle, VO_PID_AMRWB_MODE, &mMode)) {
+ ALOGE("Failed to set AMRWB encoder mode to %d", mMode);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+int C2SoftAmrWbEnc::encodeInput(uint8_t *buffer, uint32_t length) {
+ VO_CODECBUFFER inputData;
+ memset(&inputData, 0, sizeof(inputData));
+ inputData.Buffer = (unsigned char *) mInputFrame;
+ inputData.Length = kNumBytesPerInputFrame;
+
+ CHECK_EQ((VO_U32)VO_ERR_NONE,
+ mApiHandle->SetInputData(mEncoderHandle, &inputData));
+
+ VO_AUDIO_OUTPUTINFO outputInfo;
+ memset(&outputInfo, 0, sizeof(outputInfo));
+ VO_CODECBUFFER outputData;
+ memset(&outputData, 0, sizeof(outputData));
+ outputData.Buffer = buffer;
+ outputData.Length = length;
+ VO_U32 ret = mApiHandle->GetOutputData(
+ mEncoderHandle, &outputData, &outputInfo);
+ if (ret != VO_ERR_NONE && ret != VO_ERR_INPUT_BUFFER_SMALL) {
+ ALOGD("encountered error during encode call");
+ return -1;
+ }
+ return outputData.Length;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftAmrWbEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+ size_t outCapacity = kNumBytesPerInputFrame;
+ outCapacity += mFilledLen + inSize;
+ std::shared_ptr<C2LinearBlock> outputBlock;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &outputBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = outputBlock->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+ uint64_t outTimeStamp =
+ mProcessedSamples * 1000000ll / mIntf->getSampleRate();
+ size_t inPos = 0;
+ size_t outPos = 0;
+ while (inPos < inSize) {
+ const uint8_t *inPtr = rView.data() + inOffset;
+ int validSamples = mFilledLen / sizeof(int16_t);
+ if ((inPos + (kNumBytesPerInputFrame - mFilledLen)) <= inSize) {
+ memcpy(mInputFrame + validSamples, inPtr + inPos,
+ (kNumBytesPerInputFrame - mFilledLen));
+ inPos += (kNumBytesPerInputFrame - mFilledLen);
+ } else {
+ memcpy(mInputFrame + validSamples, inPtr + inPos, (inSize - inPos));
+ mFilledLen += (inSize - inPos);
+ inPos += (inSize - inPos);
+ if (eos) {
+ validSamples = mFilledLen / sizeof(int16_t);
+ memset(mInputFrame + validSamples, 0, (kNumBytesPerInputFrame - mFilledLen));
+ } else break;
+ }
+ int numEncBytes = encodeInput((wView.data() + outPos), outCapacity - outPos);
+ if (numEncBytes < 0) {
+ ALOGE("encodeFrame call failed, state [%d %zu %zu]", numEncBytes, outPos, outCapacity);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ outPos += numEncBytes;
+ mProcessedSamples += kNumSamplesPerFrame;
+ mFilledLen = 0;
+ }
+ ALOGV("causal sample size %d", mFilledLen);
+ if (mIsFirst) {
+ mIsFirst = false;
+ mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+ }
+ fillEmptyWork(work);
+ if (outPos != 0) {
+ work->worklets.front()->output.buffers.push_back(
+ createLinearBuffer(std::move(outputBlock), 0, outPos));
+ work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ }
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ if (mFilledLen) ALOGV("Discarding trailing %d bytes", mFilledLen);
+ }
+}
+
+c2_status_t C2SoftAmrWbEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ onFlush_sm();
+ return C2_OK;
+}
+
+class C2SoftAmrWbEncFactory : public C2ComponentFactory {
+public:
+ C2SoftAmrWbEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAmrWbEnc(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftAmrWbEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAmrWbEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftAmrWbEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAmrWbEncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAmrWbEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
new file mode 100644
index 0000000..0cc9e9f
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AMR_WB_ENC_H_
+#define ANDROID_C2_SOFT_AMR_WB_ENC_H_
+
+#include <SimpleC2Component.h>
+
+#include "voAMRWB.h"
+
+namespace android {
+
+class C2SoftAmrWbEnc : public SimpleC2Component {
+public:
+ class IntfImpl;
+ C2SoftAmrWbEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftAmrWbEnc();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ std::shared_ptr<IntfImpl> mIntf;
+ static const int32_t kNumSamplesPerFrame = 320;
+ static const int32_t kNumBytesPerInputFrame = kNumSamplesPerFrame * sizeof(int16_t);
+
+ void *mEncoderHandle;
+ VO_AUDIO_CODECAPI *mApiHandle;
+ VO_MEM_OPERATOR *mMemOperator;
+ VOAMRWBMODE mMode;
+ bool mIsFirst;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+ uint64_t mAnchorTimeStamp;
+ uint64_t mProcessedSamples;
+ int32_t mFilledLen;
+ int16_t mInputFrame[kNumSamplesPerFrame];
+
+ status_t initEncoder();
+ int encodeInput(uint8_t *buffer, uint32_t length);
+
+ C2_DO_NOT_COPY(C2SoftAmrWbEnc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AMR_WB_ENC_H_
diff --git a/media/codec2/components/amr_nb_wb/MODULE_LICENSE_APACHE2 b/media/codec2/components/amr_nb_wb/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/amr_nb_wb/NOTICE b/media/codec2/components/amr_nb_wb/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/amr_nb_wb/patent_disclaimer.txt b/media/codec2/components/amr_nb_wb/patent_disclaimer.txt
new file mode 100644
index 0000000..b4bf11d
--- /dev/null
+++ b/media/codec2/components/amr_nb_wb/patent_disclaimer.txt
@@ -0,0 +1,9 @@
+
+THIS IS NOT A GRANT OF PATENT RIGHTS.
+
+Google makes no representation or warranty that the codecs for which
+source code is made available hereunder are unencumbered by
+third-party patents. Those intending to use this source code in
+hardware or software products are advised that implementations of
+these codecs, including in open source software or shareware, may
+require patent licenses from the relevant patent holders.
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
new file mode 100644
index 0000000..d883951
--- /dev/null
+++ b/media/codec2/components/avc/Android.bp
@@ -0,0 +1,37 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2avcdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ static_libs: ["libavcdec"],
+
+ srcs: ["C2SoftAvcDec.cpp"],
+
+ include_dirs: [
+ "external/libavc/decoder",
+ "external/libavc/common",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2avcenc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ static_libs: ["libavcenc"],
+
+ srcs: ["C2SoftAvcEnc.cpp"],
+
+ include_dirs: [
+ "external/libavc/encoder",
+ "external/libavc/common",
+ ],
+
+ cflags: [
+ "-Wno-unused-variable",
+ ],
+}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
new file mode 100644
index 0000000..3e62744
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -0,0 +1,978 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAvcDec.h"
+#include "ih264d.h"
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.avc.decoder";
+
+} // namespace
+
+class C2SoftAvcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO,
+ MEDIA_MIMETYPE_VIDEO_AVC) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ // TODO: output latency and reordering
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ // coded and output picture size is the same for this codec
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4080, 2),
+ C2F(mSize, height).inRange(2, 4080, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4080, 2),
+ C2F(mSize, height).inRange(2, 4080, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_AVC_CONSTRAINED_BASELINE, C2Config::LEVEL_AVC_5_2))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
+ C2Config::PROFILE_AVC_BASELINE,
+ C2Config::PROFILE_AVC_MAIN,
+ C2Config::PROFILE_AVC_CONSTRAINED_HIGH,
+ C2Config::PROFILE_AVC_PROGRESSIVE_HIGH,
+ C2Config::PROFILE_AVC_HIGH}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B, C2Config::LEVEL_AVC_1_1,
+ C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3,
+ C2Config::LEVEL_AVC_2, C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2,
+ C2Config::LEVEL_AVC_3, C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2,
+ C2Config::LEVEL_AVC_4, C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2,
+ C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1, C2Config::LEVEL_AVC_5_2
+ })
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() };
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ { C2ChromaOffsetStruct::ITU_YUV_420_0() },
+ 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(
+ DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ addParameter(
+ DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mDefaultColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+ C2P<C2VideoSizeStreamInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs. hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4080u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4080u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 2
+ me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 192);
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ static C2R DefaultColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsTuning::output> &def,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+ ? def.v.primaries : coded.v.primaries;
+ me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+ ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ return C2R::Ok();
+ }
+
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ return mColorAspects;
+ }
+
+private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+};
+
+static size_t getCpuCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
+ (void) ctxt;
+ return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *ctxt, void *mem) {
+ (void) ctxt;
+ free(mem);
+}
+
+C2SoftAvcDec::C2SoftAvcDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecHandle(nullptr),
+ mOutBufferFlush(nullptr),
+ mIvColorFormat(IV_YUV_420P),
+ mWidth(320),
+ mHeight(240),
+ mHeaderDecoded(false) {
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+}
+
+C2SoftAvcDec::~C2SoftAvcDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftAvcDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAvcDec::onStop() {
+ if (OK != resetDecoder()) return C2_CORRUPTED;
+ resetPlugin();
+ return C2_OK;
+}
+
+void C2SoftAvcDec::onReset() {
+ (void) onStop();
+}
+
+void C2SoftAvcDec::onRelease() {
+ (void) deleteDecoder();
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+}
+
+c2_status_t C2SoftAvcDec::onFlush_sm() {
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+
+ uint32_t bufferSize = mStride * mHeight * 3 / 2;
+ mOutBufferFlush = (uint8_t *)ivd_aligned_malloc(nullptr, 128, bufferSize);
+ if (!mOutBufferFlush) {
+ ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize);
+ return C2_NO_MEMORY;
+ }
+
+ while (true) {
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+
+ setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (0 == s_decode_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+
+ return C2_OK;
+}
+
+status_t C2SoftAvcDec::createDecoder() {
+ ivdext_create_ip_t s_create_ip;
+ ivdext_create_op_t s_create_op;
+
+ s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
+ s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
+ s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
+ s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr;
+ s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(nullptr,
+ &s_create_ip,
+ &s_create_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_create_op.s_ivd_create_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mDecHandle = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
+ mDecHandle->pv_fxns = (void *)ivdec_api_function;
+ mDecHandle->u4_size = sizeof(iv_obj_t);
+
+ return OK;
+}
+
+status_t C2SoftAvcDec::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+
+ s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_num_cores_ip.u4_num_cores = mNumCores;
+ s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_num_cores_ip,
+ &s_set_num_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
+ ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
+ ivd_ctl_set_config_op_t s_set_dyn_params_op;
+
+ s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
+ s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+ s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
+ s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_dyn_params_ip,
+ &s_set_dyn_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+void C2SoftAvcDec::getVersion() {
+ ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
+ ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+ UWORD8 au1_buf[512];
+
+ s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_get_versioninfo_ip.pv_version_buffer = au1_buf;
+ s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf);
+ s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_versioninfo_ip,
+ &s_get_versioninfo_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__,
+ s_get_versioninfo_op.u4_error_code);
+ } else {
+ ALOGV("ittiam decoder version number: %s",
+ (char *) s_get_versioninfo_ip.pv_version_buffer);
+ }
+}
+
+status_t C2SoftAvcDec::initDecoder() {
+ if (OK != createDecoder()) return UNKNOWN_ERROR;
+ mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
+ mStride = ALIGN64(mWidth);
+ mSignalledError = false;
+ resetPlugin();
+ (void) setNumCores();
+ if (OK != setParams(mStride, IVD_DECODE_FRAME)) return UNKNOWN_ERROR;
+ (void) getVersion();
+
+ return OK;
+}
+
+bool C2SoftAvcDec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker) {
+ uint32_t displayStride = mStride;
+ uint32_t displayHeight = mHeight;
+ size_t lumaSize = displayStride * displayHeight;
+ size_t chromaSize = lumaSize >> 2;
+
+ ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+ if (inBuffer) {
+ ps_decode_ip->u4_ts = tsMarker;
+ ps_decode_ip->pv_stream_buffer = const_cast<uint8_t *>(inBuffer->data() + inOffset);
+ ps_decode_ip->u4_num_Bytes = inSize;
+ } else {
+ ps_decode_ip->u4_ts = 0;
+ ps_decode_ip->pv_stream_buffer = nullptr;
+ ps_decode_ip->u4_num_Bytes = 0;
+ }
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
+ if (outBuffer) {
+ if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) {
+ ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)",
+ outBuffer->width(), outBuffer->height(), displayStride, displayHeight);
+ return false;
+ }
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y];
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U];
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ } else {
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush;
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize;
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
+ }
+ ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
+ ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ return true;
+}
+
+bool C2SoftAvcDec::getVuiParams() {
+ ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
+ ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+
+ s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
+ s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_vui_params_ip.e_sub_cmd =
+ (IVD_CONTROL_API_COMMAND_TYPE_T) IH264D_CMD_CTL_GET_VUI_PARAMS;
+ s_get_vui_params_op.u4_size = sizeof(ivdext_ctl_get_vui_params_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_vui_params_ip,
+ &s_get_vui_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__, s_get_vui_params_op.u4_error_code);
+ return false;
+ }
+
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = s_get_vui_params_op.u1_colour_primaries;
+ vuiColorAspects.transfer = s_get_vui_params_op.u1_tfr_chars;
+ vuiColorAspects.coeffs = s_get_vui_params_op.u1_matrix_coeffs;
+ vuiColorAspects.fullRange = s_get_vui_params_op.u1_video_full_range_flag;
+
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+ }
+ return true;
+}
+
+status_t C2SoftAvcDec::setFlushMode() {
+ ivd_ctl_flush_ip_t s_set_flush_ip;
+ ivd_ctl_flush_op_t s_set_flush_op;
+
+ s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_flush_ip,
+ &s_set_flush_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftAvcDec::resetDecoder() {
+ ivd_ctl_reset_ip_t s_reset_ip;
+ ivd_ctl_reset_op_t s_reset_op;
+
+ s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_reset_ip,
+ &s_reset_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mStride = 0;
+ (void) setNumCores();
+ mSignalledError = false;
+ mHeaderDecoded = false;
+
+ return OK;
+}
+
+void C2SoftAvcDec::resetPlugin() {
+ mSignalledOutputEos = false;
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+status_t C2SoftAvcDec::deleteDecoder() {
+ if (mDecHandle) {
+ ivdext_delete_ip_t s_delete_ip;
+ ivdext_delete_op_t s_delete_op;
+
+ s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
+ s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
+ s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_delete_ip,
+ &s_delete_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_delete_op.s_ivd_delete_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mDecHandle = nullptr;
+ }
+
+ return OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftAvcDec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mOutBlock),
+ C2Rect(mWidth, mHeight));
+ mOutBlock = nullptr;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+c2_status_t C2SoftAvcDec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+ if (!mDecHandle) {
+ ALOGE("not supposed to be here, invalid decoder context");
+ return C2_CORRUPTED;
+ }
+ if (mStride != ALIGN64(mWidth)) {
+ mStride = ALIGN64(mWidth);
+ if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED;
+ }
+ if (mOutBlock &&
+ (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) {
+ mOutBlock.reset();
+ }
+ if (!mOutBlock) {
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ return err;
+ }
+ ALOGV("provided (%dx%d) required (%dx%d)",
+ mOutBlock->width(), mOutBlock->height(), mStride, mHeight);
+ }
+
+ return C2_OK;
+}
+
+// TODO: can overall error checking be improved?
+// TODO: allow configuration of color format and usage for graphic buffers instead
+// of hard coding them to HAL_PIXEL_FORMAT_YV12
+// TODO: pass coloraspects information to surface
+// TODO: test support for dynamic change in resolution
+// TODO: verify if the decoder sent back all frames
+void C2SoftAvcDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.flags = work->input.flags;
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ bool hasPicture = false;
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ {
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+ inOffset + inPos, inSize - inPos, workIndex)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if (false == mHeaderDecoded) {
+ /* Decode header and get dimensions */
+ setParams(mStride, IVD_DECODE_HEADER);
+ }
+
+ WORD32 delay;
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ WORD32 decodeTime;
+ GETTIME(&mTimeEnd, nullptr);
+ TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
+ ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+ s_decode_op.u4_num_bytes_consumed);
+ }
+ if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGE("allocation failure in decoder");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGV("resolution changed");
+ drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+ resetDecoder();
+ resetPlugin();
+ work->workletsProcessed = 0u;
+
+ /* Decode header and get new dimensions */
+ setParams(mStride, IVD_DECODE_HEADER);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ }
+ if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+ if (mHeaderDecoded == false) {
+ mHeaderDecoded = true;
+ setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+ }
+ if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
+ mWidth = s_decode_op.u4_pic_wd;
+ mHeight = s_decode_op.u4_pic_ht;
+ CHECK_EQ(0u, s_decode_op.u4_output_present);
+
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Cannot set width and height");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ continue;
+ }
+ }
+ (void)getVuiParams();
+ hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ }
+ if (0 == s_decode_op.u4_num_bytes_consumed) {
+ ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
+ break;
+ }
+ inPos += s_decode_op.u4_num_bytes_consumed;
+ if (hasPicture && (inSize - inPos)) {
+ ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
+ (int)inSize - (int)inPos);
+ break;
+ }
+ }
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!hasPicture) {
+ fillEmptyWork(work);
+ }
+}
+
+c2_status_t C2SoftAvcDec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+ while (true) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return C2_CORRUPTED;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ return C2_CORRUPTED;
+ }
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ return C2_CORRUPTED;
+ }
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ } else {
+ fillEmptyWork(work);
+ break;
+ }
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftAvcDecFactory : public C2ComponentFactory {
+public:
+ C2SoftAvcDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAvcDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftAvcDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAvcDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAvcDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAvcDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAvcDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
new file mode 100644
index 0000000..2127a93
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AVC_DEC_H_
+#define ANDROID_C2_SOFT_AVC_DEC_H_
+
+#include <sys/time.h>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <SimpleC2Component.h>
+
+#include "ih264_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+
+namespace android {
+
+#define ivdec_api_function ih264d_api_function
+#define ivdext_create_ip_t ih264d_create_ip_t
+#define ivdext_create_op_t ih264d_create_op_t
+#define ivdext_delete_ip_t ih264d_delete_ip_t
+#define ivdext_delete_op_t ih264d_delete_op_t
+#define ivdext_ctl_set_num_cores_ip_t ih264d_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t
+#define ivdext_ctl_get_vui_params_ip_t ih264d_ctl_get_vui_params_ip_t
+#define ivdext_ctl_get_vui_params_op_t ih264d_ctl_get_vui_params_op_t
+#define ALIGN64(x) ((((x) + 63) >> 6) << 6)
+#define MAX_NUM_CORES 4
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
+#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+#define GETTIME(a, b) gettimeofday(a, b);
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#ifdef FILE_DUMP_ENABLE
+ #define INPUT_DUMP_PATH "/sdcard/clips/avcd_input"
+ #define INPUT_DUMP_EXT "h264"
+ #define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ }
+ #define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+ }
+ #define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)\
+ { \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL && m_offset == 0) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int) m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ } else if (fp == NULL) { \
+ ALOGD("Could not write to file %s", m_filename);\
+ } \
+ if (fp) { \
+ fclose(fp); \
+ } \
+ }
+#else /* FILE_DUMP_ENABLE */
+ #define INPUT_DUMP_PATH
+ #define INPUT_DUMP_EXT
+ #define OUTPUT_DUMP_PATH
+ #define OUTPUT_DUMP_EXT
+ #define GENERATE_FILE_NAMES()
+ #define CREATE_DUMP_FILE(m_filename)
+ #define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)
+#endif /* FILE_DUMP_ENABLE */
+
+
+class C2SoftAvcDec : public SimpleC2Component {
+public:
+ class IntfImpl;
+ C2SoftAvcDec(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftAvcDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ status_t createDecoder();
+ status_t setNumCores();
+ status_t setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode);
+ void getVersion();
+ status_t initDecoder();
+ bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker);
+ bool getVuiParams();
+ c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+ status_t setFlushMode();
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+ status_t resetDecoder();
+ void resetPlugin();
+ status_t deleteDecoder();
+
+ std::shared_ptr<IntfImpl> mIntf;
+
+ // TODO:This is not the right place for this enum. These should
+ // be part of c2-vndk so that they can be accessed by all video plugins
+ // until then, make them feel at home
+ enum {
+ kNotSupported,
+ kPreferBitstream,
+ kPreferContainer,
+ };
+
+ iv_obj_t *mDecHandle;
+ std::shared_ptr<C2GraphicBlock> mOutBlock;
+ uint8_t *mOutBufferFlush;
+
+ size_t mNumCores;
+ IV_COLOR_FORMAT_T mIvColorFormat;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mStride;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+ bool mHeaderDecoded;
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
+
+ // profile
+ struct timeval mTimeStart;
+ struct timeval mTimeEnd;
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ C2_DO_NOT_COPY(C2SoftAvcDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AVC_DEC_H_
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
new file mode 100644
index 0000000..ee5cf27
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -0,0 +1,1559 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcEnc"
+#include <log/log.h>
+#include <utils/misc.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "C2SoftAvcEnc.h"
+#include "ih264e.h"
+#include "ih264e_error.h"
+
+namespace android {
+
+class C2SoftAvcEnc::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_VIDEO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_VIDEO_AVC))
+ .build());
+
+ addParameter(
+ DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
+ .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2560, 2),
+ C2F(mSize, height).inRange(2, 2560, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ // TODO: More restriction?
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+ .withDefault(new C2StreamIntraRefreshTuning::output(
+ 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+ .withFields({
+ C2F(mIntraRefresh, mode).oneOf({
+ C2Config::INTRA_REFRESH_DISABLED, C2Config::INTRA_REFRESH_ARBITRARY }),
+ C2F(mIntraRefresh, period).any()
+ })
+ .withSetter(IntraRefreshSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_AVC_CONSTRAINED_BASELINE, LEVEL_AVC_4_1))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ PROFILE_AVC_BASELINE,
+ PROFILE_AVC_CONSTRAINED_BASELINE,
+ PROFILE_AVC_MAIN,
+ }),
+ C2F(mProfileLevel, level).oneOf({
+ LEVEL_AVC_1,
+ LEVEL_AVC_1B,
+ LEVEL_AVC_1_1,
+ LEVEL_AVC_1_2,
+ LEVEL_AVC_1_3,
+ LEVEL_AVC_2,
+ LEVEL_AVC_2_1,
+ LEVEL_AVC_2_2,
+ LEVEL_AVC_3,
+ LEVEL_AVC_3_1,
+ LEVEL_AVC_3_2,
+ LEVEL_AVC_4,
+ LEVEL_AVC_4_1,
+ LEVEL_AVC_4_2,
+ LEVEL_AVC_5,
+ }),
+ })
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+ .build());
+
+ addParameter(
+ DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+ }
+
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value <= 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
+ C2P<C2StreamPictureSizeInfo::input> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(
+ bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output> &me,
+ const C2P<C2VideoSizeStreamTuning::input> &size,
+ const C2P<C2StreamFrameRateInfo::output> &frameRate,
+ const C2P<C2BitrateTuning::output> &bitrate) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_AVC_CONSTRAINED_BASELINE;
+ }
+
+ struct LevelLimits {
+ C2Config::level_t level;
+ float mbsPerSec;
+ uint64_t mbs;
+ uint32_t bitrate;
+ };
+ constexpr LevelLimits kLimits[] = {
+ { LEVEL_AVC_1, 1485, 99, 64000 },
+ // Decoder does not properly handle level 1b.
+ // { LEVEL_AVC_1B, 1485, 99, 128000 },
+ { LEVEL_AVC_1_1, 3000, 396, 192000 },
+ { LEVEL_AVC_1_2, 6000, 396, 384000 },
+ { LEVEL_AVC_1_3, 11880, 396, 768000 },
+ { LEVEL_AVC_2, 11880, 396, 2000000 },
+ { LEVEL_AVC_2_1, 19800, 792, 4000000 },
+ { LEVEL_AVC_2_2, 20250, 1620, 4000000 },
+ { LEVEL_AVC_3, 40500, 1620, 10000000 },
+ { LEVEL_AVC_3_1, 108000, 3600, 14000000 },
+ { LEVEL_AVC_3_2, 216000, 5120, 20000000 },
+ { LEVEL_AVC_4, 245760, 8192, 20000000 },
+ { LEVEL_AVC_4_1, 245760, 8192, 50000000 },
+ { LEVEL_AVC_4_2, 522240, 8704, 50000000 },
+ { LEVEL_AVC_5, 589824, 22080, 135000000 },
+ };
+
+ uint64_t mbs = uint64_t((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
+ float mbsPerSec = float(mbs) / frameRate.v.value;
+
+ // Check if the supplied level meets the MB / bitrate requirements. If
+ // not, update the level with the lowest level meeting the requirements.
+
+ bool found = false;
+ // By default needsUpdate = false in case the supplied level does meet
+ // the requirements. For Level 1b, we want to update the level anyway,
+ // so we set it to true in that case.
+ bool needsUpdate = (me.v.level == LEVEL_AVC_1B);
+ for (const LevelLimits &limit : kLimits) {
+ if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec &&
+ bitrate.v.value <= limit.bitrate) {
+ // This is the lowest level that meets the requirements, and if
+ // we haven't seen the supplied level yet, that means we don't
+ // need the update.
+ if (needsUpdate) {
+ ALOGD("Given level %x does not cover current configuration: "
+ "adjusting to %x", me.v.level, limit.level);
+ me.set().level = limit.level;
+ }
+ found = true;
+ break;
+ }
+ if (me.v.level == limit.level) {
+ // We break out of the loop when the lowest feasible level is
+ // found. The fact that we're here means that our level doesn't
+ // meet the requirement and needs to be updated.
+ needsUpdate = true;
+ }
+ }
+ if (!found) {
+ // We set to the highest supported level.
+ me.set().level = LEVEL_AVC_5;
+ }
+
+ return C2R::Ok();
+ }
+
+ static C2R IntraRefreshSetter(bool mayBlock, C2P<C2StreamIntraRefreshTuning::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.period < 1) {
+ me.set().mode = C2Config::INTRA_REFRESH_DISABLED;
+ me.set().period = 0;
+ } else {
+ // only support arbitrary mode (cyclic in our case)
+ me.set().mode = C2Config::INTRA_REFRESH_ARBITRARY;
+ }
+ return res;
+ }
+
+ IV_PROFILE_T getProfile_l() const {
+ switch (mProfileLevel->profile) {
+ case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]];
+ case PROFILE_AVC_BASELINE: return IV_PROFILE_BASE;
+ case PROFILE_AVC_MAIN: return IV_PROFILE_MAIN;
+ default:
+ ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+ return IV_PROFILE_DEFAULT;
+ }
+ }
+
+ UWORD32 getLevel_l() const {
+ struct Level {
+ C2Config::level_t c2Level;
+ UWORD32 avcLevel;
+ };
+ constexpr Level levels[] = {
+ { LEVEL_AVC_1, 10 },
+ { LEVEL_AVC_1B, 9 },
+ { LEVEL_AVC_1_1, 11 },
+ { LEVEL_AVC_1_2, 12 },
+ { LEVEL_AVC_1_3, 13 },
+ { LEVEL_AVC_2, 20 },
+ { LEVEL_AVC_2_1, 21 },
+ { LEVEL_AVC_2_2, 22 },
+ { LEVEL_AVC_3, 30 },
+ { LEVEL_AVC_3_1, 31 },
+ { LEVEL_AVC_3_2, 32 },
+ { LEVEL_AVC_4, 40 },
+ { LEVEL_AVC_4_1, 41 },
+ { LEVEL_AVC_4_2, 42 },
+ { LEVEL_AVC_5, 50 },
+ };
+ for (const Level &level : levels) {
+ if (mProfileLevel->level == level.c2Level) {
+ return level.avcLevel;
+ }
+ }
+ ALOGD("Unrecognized level: %x", mProfileLevel->level);
+ return 41;
+ }
+ uint32_t getSyncFramePeriod_l() const {
+ if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ }
+
+ // unsafe getters
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+};
+
+#define ive_api_function ih264e_api_function
+
+constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder";
+
+namespace {
+
+// From external/libavc/encoder/ih264e_bitstream.h
+constexpr uint32_t MIN_STREAM_SIZE = 0x800;
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+} // namespace
+
+C2SoftAvcEnc::C2SoftAvcEnc(
+ const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mAVCEncProfile(IV_PROFILE_BASE),
+ mAVCEncLevel(41),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSawOutputEOS(false),
+ mSignalledError(false),
+ mCodecCtx(nullptr),
+ // TODO: output buffer size
+ mOutBufferSize(524288) {
+
+ // If dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ initEncParams();
+}
+
+C2SoftAvcEnc::~C2SoftAvcEnc() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onInit() {
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::onStop() {
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::onReset() {
+ // TODO: use IVE_CMD_CTL_RESET?
+ releaseEncoder();
+ initEncParams();
+}
+
+void C2SoftAvcEnc::onRelease() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onFlush_sm() {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::initEncParams() {
+ mCodecCtx = nullptr;
+ mMemRecords = nullptr;
+ mNumMemRecords = DEFAULT_MEM_REC_CNT;
+ mHeaderGenerated = 0;
+ mNumCores = GetCPUCoreCount();
+ mArch = DEFAULT_ARCH;
+ mSliceMode = DEFAULT_SLICE_MODE;
+ mSliceParam = DEFAULT_SLICE_PARAM;
+ mHalfPelEnable = DEFAULT_HPEL;
+ mIInterval = DEFAULT_I_INTERVAL;
+ mIDRInterval = DEFAULT_IDR_INTERVAL;
+ mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
+ mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
+ mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
+ mEncSpeed = DEFAULT_ENC_SPEED;
+ mIntra4x4 = DEFAULT_INTRA4x4;
+ mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
+ mPSNREnable = DEFAULT_PSNR_ENABLE;
+ mReconEnable = DEFAULT_RECON_ENABLE;
+ mEntropyMode = DEFAULT_ENTROPY_MODE;
+ mBframes = DEFAULT_B_FRAMES;
+
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+c2_status_t C2SoftAvcEnc::setDimensions() {
+ ive_ctl_set_dimensions_ip_t s_dimensions_ip;
+ ive_ctl_set_dimensions_op_t s_dimensions_op;
+ IV_STATUS_T status;
+
+ s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
+ s_dimensions_ip.u4_ht = mSize->height;
+ s_dimensions_ip.u4_wd = mSize->width;
+
+ s_dimensions_ip.u4_timestamp_high = -1;
+ s_dimensions_ip.u4_timestamp_low = -1;
+
+ s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
+ s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame dimensions = 0x%x\n",
+ s_dimensions_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setNumCores() {
+ IV_STATUS_T status;
+ ive_ctl_set_num_cores_ip_t s_num_cores_ip;
+ ive_ctl_set_num_cores_op_t s_num_cores_op;
+ s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
+ s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
+ s_num_cores_ip.u4_timestamp_high = -1;
+ s_num_cores_ip.u4_timestamp_low = -1;
+ s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
+
+ s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
+
+ status = ive_api_function(
+ mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set processor params = 0x%x\n",
+ s_num_cores_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameRate() {
+ ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
+ ive_ctl_set_frame_rate_op_t s_frame_rate_op;
+ IV_STATUS_T status;
+
+ s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
+
+ s_frame_rate_ip.u4_src_frame_rate = mFrameRate->value + 0.5;
+ s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate->value + 0.5;
+
+ s_frame_rate_ip.u4_timestamp_high = -1;
+ s_frame_rate_ip.u4_timestamp_low = -1;
+
+ s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
+ s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame rate = 0x%x\n",
+ s_frame_rate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setIpeParams() {
+ ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
+ ive_ctl_set_ipe_params_op_t s_ipe_params_op;
+ IV_STATUS_T status;
+
+ s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
+
+ s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
+ s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
+ s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
+
+ s_ipe_params_ip.u4_timestamp_high = -1;
+ s_ipe_params_ip.u4_timestamp_low = -1;
+
+ s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
+ s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set ipe params = 0x%x\n",
+ s_ipe_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setBitRate() {
+ ive_ctl_set_bitrate_ip_t s_bitrate_ip;
+ ive_ctl_set_bitrate_op_t s_bitrate_op;
+ IV_STATUS_T status;
+
+ s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
+
+ s_bitrate_ip.u4_target_bitrate = mBitrate->value;
+
+ s_bitrate_ip.u4_timestamp_high = -1;
+ s_bitrate_ip.u4_timestamp_low = -1;
+
+ s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
+ s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
+ ive_ctl_set_frame_type_ip_t s_frame_type_ip;
+ ive_ctl_set_frame_type_op_t s_frame_type_op;
+ IV_STATUS_T status;
+ s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
+
+ s_frame_type_ip.e_frame_type = e_frame_type;
+
+ s_frame_type_ip.u4_timestamp_high = -1;
+ s_frame_type_ip.u4_timestamp_low = -1;
+
+ s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
+ s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame type = 0x%x\n",
+ s_frame_type_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setQp() {
+ ive_ctl_set_qp_ip_t s_qp_ip;
+ ive_ctl_set_qp_op_t s_qp_op;
+ IV_STATUS_T status;
+
+ s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
+
+ s_qp_ip.u4_i_qp = DEFAULT_I_QP;
+ s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_p_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_b_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_timestamp_high = -1;
+ s_qp_ip.u4_timestamp_low = -1;
+
+ s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
+ s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
+ IV_STATUS_T status;
+ ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
+ ive_ctl_set_enc_mode_op_t s_enc_mode_op;
+
+ s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
+
+ s_enc_mode_ip.e_enc_mode = e_enc_mode;
+
+ s_enc_mode_ip.u4_timestamp_high = -1;
+ s_enc_mode_ip.u4_timestamp_low = -1;
+
+ s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
+ s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set in header encode mode = 0x%x\n",
+ s_enc_mode_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setVbvParams() {
+ ive_ctl_set_vbv_params_ip_t s_vbv_ip;
+ ive_ctl_set_vbv_params_op_t s_vbv_op;
+ IV_STATUS_T status;
+
+ s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
+
+ s_vbv_ip.u4_vbv_buf_size = 0;
+ s_vbv_ip.u4_vbv_buffer_delay = 1000;
+
+ s_vbv_ip.u4_timestamp_high = -1;
+ s_vbv_ip.u4_timestamp_low = -1;
+
+ s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
+ s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set VBV params = 0x%x\n", s_vbv_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setAirParams() {
+ ive_ctl_set_air_params_ip_t s_air_ip;
+ ive_ctl_set_air_params_op_t s_air_op;
+ IV_STATUS_T status;
+
+ s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
+
+ s_air_ip.e_air_mode =
+ (mIntraRefresh->mode == C2Config::INTRA_REFRESH_DISABLED || mIntraRefresh->period < 1)
+ ? IVE_AIR_MODE_NONE : IVE_AIR_MODE_CYCLIC;
+ s_air_ip.u4_air_refresh_period = mIntraRefresh->period;
+
+ s_air_ip.u4_timestamp_high = -1;
+ s_air_ip.u4_timestamp_low = -1;
+
+ s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
+ s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setMeParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_me_params_ip_t s_me_params_ip;
+ ive_ctl_set_me_params_op_t s_me_params_op;
+
+ s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
+
+ s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
+ s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
+
+ s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
+ s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
+ s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
+ s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
+ s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
+
+ s_me_params_ip.u4_timestamp_high = -1;
+ s_me_params_ip.u4_timestamp_low = -1;
+
+ s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
+ s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setGopParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_gop_params_ip_t s_gop_params_ip;
+ ive_ctl_set_gop_params_op_t s_gop_params_op;
+
+ s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
+
+ s_gop_params_ip.u4_i_frm_interval = mIInterval;
+ s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
+
+ s_gop_params_ip.u4_timestamp_high = -1;
+ s_gop_params_ip.u4_timestamp_low = -1;
+
+ s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
+ s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set GOP params = 0x%x\n",
+ s_gop_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setProfileParams() {
+ IntfImpl::Lock lock = mIntf->lock();
+
+ IV_STATUS_T status;
+ ive_ctl_set_profile_params_ip_t s_profile_params_ip;
+ ive_ctl_set_profile_params_op_t s_profile_params_op;
+
+ s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
+
+ s_profile_params_ip.e_profile = mIntf->getProfile_l();
+ s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;
+ s_profile_params_ip.u4_timestamp_high = -1;
+ s_profile_params_ip.u4_timestamp_low = -1;
+
+ s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
+ s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
+ lock.unlock();
+
+ status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set profile params = 0x%x\n",
+ s_profile_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setDeblockParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
+ ive_ctl_set_deblock_params_op_t s_deblock_params_op;
+
+ s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
+
+ s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
+
+ s_deblock_params_ip.u4_timestamp_high = -1;
+ s_deblock_params_ip.u4_timestamp_low = -1;
+
+ s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
+ s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to enable/disable deblock params = 0x%x\n",
+ s_deblock_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::logVersion() {
+ ive_ctl_getversioninfo_ip_t s_ctl_ip;
+ ive_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
+ s_ctl_ip.pu1_version = au1_buf;
+ s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
+
+ status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
+ }
+ return;
+}
+
+c2_status_t C2SoftAvcEnc::initEncoder() {
+ IV_STATUS_T status;
+ WORD32 level;
+
+ CHECK(!mStarted);
+
+ c2_status_t errType = C2_OK;
+
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mIntraRefresh = mIntf->getIntraRefresh_l();
+ mAVCEncLevel = mIntf->getLevel_l();
+ mIInterval = mIntf->getSyncFramePeriod_l();
+ mIDRInterval = mIntf->getSyncFramePeriod_l();
+ }
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+
+ mStride = width;
+
+ // TODO
+ mIvVideoColorFormat = IV_YUV_420P;
+
+ ALOGD("Params width %d height %d level %d colorFormat %d", width,
+ height, mAVCEncLevel, mIvVideoColorFormat);
+
+ /* Getting Number of MemRecords */
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
+ s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
+
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ status = ive_api_function(nullptr, &s_num_mem_rec_ip, &s_num_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Get number of memory records failed = 0x%x\n",
+ s_num_mem_rec_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ /* Allocate array to hold memory records */
+ if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
+ ALOGE("requested memory size is too big.");
+ return C2_CORRUPTED;
+ }
+ mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (nullptr == mMemRecords) {
+ ALOGE("Unable to allocate memory for hold memory records: Size %zu",
+ mNumMemRecords * sizeof(iv_mem_rec_t));
+ mSignalledError = true;
+ return C2_CORRUPTED;
+ }
+
+ {
+ iv_mem_rec_t *ps_mem_rec;
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
+ ps_mem_rec->pv_base = nullptr;
+ ps_mem_rec->u4_mem_size = 0;
+ ps_mem_rec->u4_mem_alignment = 0;
+ ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Getting MemRecords Attributes */
+ {
+ iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
+ iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+
+ s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
+ s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+
+ s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
+ s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
+ s_fill_mem_rec_ip.u4_max_wd = width;
+ s_fill_mem_rec_ip.u4_max_ht = height;
+ s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
+ s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
+ s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+
+ status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Fill memory records failed = 0x%x\n",
+ s_fill_mem_rec_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Allocating Memory for Mem Records */
+ {
+ WORD32 total_size;
+ iv_mem_rec_t *ps_mem_rec;
+ total_size = 0;
+ ps_mem_rec = mMemRecords;
+
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ive_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == nullptr) {
+ ALOGE("Allocation failure for mem record id %zu size %u\n", i,
+ ps_mem_rec->u4_mem_size);
+ return C2_CORRUPTED;
+
+ }
+ total_size += ps_mem_rec->u4_mem_size;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Codec Instance Creation */
+ {
+ ive_init_ip_t s_init_ip;
+ ive_init_op_t s_init_op;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+ mCodecCtx->pv_fxns = (void *)ive_api_function;
+
+ s_init_ip.u4_size = sizeof(ive_init_ip_t);
+ s_init_op.u4_size = sizeof(ive_init_op_t);
+
+ s_init_ip.e_cmd = IV_CMD_INIT;
+ s_init_ip.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.ps_mem_rec = mMemRecords;
+ s_init_ip.u4_max_wd = width;
+ s_init_ip.u4_max_ht = height;
+ s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_init_ip.u4_max_level = mAVCEncLevel;
+ s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+
+ if (mReconEnable || mPSNREnable) {
+ s_init_ip.u4_enable_recon = 1;
+ } else {
+ s_init_ip.u4_enable_recon = 0;
+ }
+ s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+ s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
+ s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+ s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
+ s_init_ip.u4_num_bframes = mBframes;
+ s_init_ip.e_content_type = IV_PROGRESSIVE;
+ s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ s_init_ip.e_slice_mode = mSliceMode;
+ s_init_ip.u4_slice_param = mSliceParam;
+ s_init_ip.e_arch = mArch;
+ s_init_ip.e_soc = DEFAULT_SOC;
+
+ status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Get Codec Version */
+ logVersion();
+
+ /* set processor details */
+ setNumCores();
+
+ /* Video control Set Frame dimensions */
+ setDimensions();
+
+ /* Video control Set Frame rates */
+ setFrameRate();
+
+ /* Video control Set IPE Params */
+ setIpeParams();
+
+ /* Video control Set Bitrate */
+ setBitRate();
+
+ /* Video control Set QP */
+ setQp();
+
+ /* Video control Set AIR params */
+ setAirParams();
+
+ /* Video control Set VBV params */
+ setVbvParams();
+
+ /* Video control Set Motion estimation params */
+ setMeParams();
+
+ /* Video control Set GOP params */
+ setGopParams();
+
+ /* Video control Set Deblock params */
+ setDeblockParams();
+
+ /* Video control Set Profile params */
+ setProfileParams();
+
+ /* Video control Set in Encode header mode */
+ setEncMode(IVE_ENC_MODE_HEADER);
+
+ ALOGV("init_codec successfull");
+
+ mSpsPpsHeaderReceived = false;
+ mStarted = true;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::releaseEncoder() {
+ IV_STATUS_T status = IV_SUCCESS;
+ iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+ iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ if (!mStarted) {
+ return C2_OK;
+ }
+
+ s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+ s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+ s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+ s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
+
+ status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to retrieve memory records = 0x%x\n",
+ s_retrieve_mem_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ /* Free memory records */
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
+ if (ps_mem_rec) ive_aligned_free(ps_mem_rec->pv_base);
+ else {
+ ALOGE("memory record is null.");
+ return C2_CORRUPTED;
+ }
+ ps_mem_rec++;
+ }
+
+ if (mMemRecords) free(mMemRecords);
+
+ // clear other pointers into the space being free()d
+ mCodecCtx = nullptr;
+
+ mStarted = false;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp) {
+ iv_raw_buf_t *ps_inp_raw_buf;
+
+ ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
+ ps_encode_ip->s_out_buf.pv_buf = base;
+ ps_encode_ip->s_out_buf.u4_bytes = 0;
+ ps_encode_ip->s_out_buf.u4_bufsize = capacity;
+ ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
+ ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
+
+ ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
+ ps_encode_ip->pv_bufs = nullptr;
+ ps_encode_ip->pv_mb_info = nullptr;
+ ps_encode_ip->pv_pic_info = nullptr;
+ ps_encode_ip->u4_mb_info_type = 0;
+ ps_encode_ip->u4_pic_info_type = 0;
+ ps_encode_ip->u4_is_last = 0;
+ ps_encode_ip->u4_timestamp_high = timestamp >> 32;
+ ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF;
+ ps_encode_op->s_out_buf.pv_buf = nullptr;
+
+ /* Initialize color formats */
+ memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t));
+ ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
+ ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
+ if (input == nullptr) {
+ if (mSawInputEOS){
+ ps_encode_ip->u4_is_last = 1;
+ }
+ return C2_OK;
+ }
+
+ if (input->width() < mSize->width ||
+ input->height() < mSize->height) {
+ /* Expect width height to be configured */
+ ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(),
+ mSize->width, input->height(), mSize->height);
+ return C2_BAD_VALUE;
+ }
+ ALOGV("width = %d, height = %d", input->width(), input->height());
+ const C2PlanarLayout &layout = input->layout();
+ uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *vPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+ // width and height are always even (as block size is 16x16)
+ CHECK_EQ((width & 1u), 0u);
+ CHECK_EQ((height & 1u), 0u);
+ size_t yPlaneSize = width * height;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ [[fallthrough]];
+ case C2PlanarLayout::TYPE_RGBA: {
+ ALOGV("yPlaneSize = %zu", yPlaneSize);
+ MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *input);
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*input)) {
+ ALOGE("input is not YUV420");
+ return C2_BAD_VALUE;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 1
+ && layout.planes[layout.PLANE_V].colInc == 1
+ && uStride == vStride
+ && yStride == 2 * vStride) {
+ // I420 compatible - already set up above
+ break;
+ }
+
+ // copy to I420
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
+ MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ return C2_BAD_VALUE;
+ }
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ break;
+
+ }
+
+ case C2PlanarLayout::TYPE_YUVA:
+ ALOGE("YUVA plane type is not supported");
+ return C2_BAD_VALUE;
+
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ {
+ // input buffer is supposed to be const but Ittiam API wants bare pointer.
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+ ps_inp_raw_buf->apv_bufs[2] = vPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
+ ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ ps_inp_raw_buf->au4_strd[2] = vStride;
+ break;
+ }
+
+ case IV_YUV_422ILE:
+ {
+ // TODO
+ // ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ // ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
+ // ps_inp_raw_buf->au4_ht[0] = mHeight;
+ // ps_inp_raw_buf->au4_strd[0] = mStride * 2;
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width();
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ break;
+ }
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ IV_STATUS_T status;
+ WORD32 timeDelay, timeTaken;
+ uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+
+ // Initialize encoder if not already initialized
+ if (mCodecCtx == nullptr) {
+ if (C2_OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ if (mSignalledError) {
+ return;
+ }
+
+ // while (!mSawOutputEOS && !outQueue.empty()) {
+ c2_status_t error;
+ ive_video_encode_ip_t s_encode_ip;
+ ive_video_encode_op_t s_encode_op;
+
+ if (!mSpsPpsHeaderReceived) {
+ constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
+ uint8_t header[kHeaderLength];
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, timestamp);
+ if (error != C2_OK) {
+ ALOGE("setEncodeArgs failed: %d", error);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Encode header failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ return;
+ } else {
+ ALOGV("Bytes Generated in header %d\n",
+ s_encode_op.s_out_buf.u4_bytes);
+ }
+
+ mSpsPpsHeaderReceived = true;
+
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+
+ DUMP_TO_FILE(
+ mOutFile, csd->m.value, csd->flexCount());
+ }
+
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
+ std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
+ lock.unlock();
+
+ if (bitrate != mBitrate) {
+ mBitrate = bitrate;
+ setBitRate();
+ }
+
+ if (intraRefresh != mIntraRefresh) {
+ mIntraRefresh = intraRefresh;
+ setAirParams();
+ }
+
+ if (requestSync != mRequestSync) {
+ // we can handle IDR immediately
+ if (requestSync->value) {
+ // unset request
+ C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
+ ALOGV("Got sync request");
+ setFrameType(IV_IDR_FRAME);
+ }
+ mRequestSync = requestSync;
+ }
+ }
+
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ mSawInputEOS = true;
+ }
+
+ /* In normal mode, store inputBufferInfo and this will be returned
+ when encoder consumes this input */
+ // if (!mInputDataIsMeta && (inputBufferInfo != NULL)) {
+ // for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
+ // if (NULL == mInputBufferInfo[i]) {
+ // mInputBufferInfo[i] = inputBufferInfo;
+ // break;
+ // }
+ // }
+ // }
+ std::shared_ptr<const C2GraphicView> view;
+ std::shared_ptr<C2Buffer> inputBuffer;
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ view = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (view->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view->error());
+ return;
+ }
+ }
+
+ std::shared_ptr<C2LinearBlock> block;
+
+ do {
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetch linear block err = %d", err);
+ work->result = err;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error() != C2_OK) {
+ ALOGE("write view map err = %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), timestamp);
+ if (error != C2_OK) {
+ ALOGE("setEncodeArgs failed : %d", error);
+ mSignalledError = true;
+ work->result = error;
+ return;
+ }
+
+ // DUMP_TO_FILE(
+ // mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
+ // (mHeight * mStride * 3 / 2));
+
+ GETTIME(&mTimeStart, nullptr);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+ // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
+ mOutBufferSize *= 2;
+ continue;
+ }
+ ALOGE("Encode Frame failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } while (IV_SUCCESS != status);
+
+ // Hold input buffer reference
+ if (inputBuffer) {
+ mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer;
+ }
+
+ GETTIME(&mTimeEnd, nullptr);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_encode_op.s_out_buf.u4_bytes);
+
+ void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ /* If encoder frees up an input buffer, mark it as free */
+ if (freed != nullptr) {
+ if (mBuffers.count(freed) == 0u) {
+ ALOGD("buffer not tracked");
+ } else {
+ // Release input buffer reference
+ mBuffers.erase(freed);
+ mConversionBuffersInUse.erase(freed);
+ }
+ }
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp =
+ ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low;
+ work->worklets.front()->output.buffers.clear();
+
+ if (s_encode_op.s_out_buf.u4_bytes) {
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes);
+ if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
+ ALOGV("IDR frame produced");
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ }
+
+ if (s_encode_op.u4_is_last) {
+ // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+ mSawOutputEOS = true;
+ } else {
+ // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ }
+}
+
+c2_status_t C2SoftAvcEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ (void)drainMode;
+ (void)pool;
+ return C2_OK;
+}
+
+
+class C2SoftAvcEncFactory : public C2ComponentFactory {
+public:
+ C2SoftAvcEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAvcEnc(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftAvcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAvcEnc::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAvcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAvcEncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAvcEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
new file mode 100644
index 0000000..aa3ca61
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AVC_ENC_H__
+#define ANDROID_C2_SOFT_AVC_ENC_H__
+
+#include <map>
+
+#include <utils/Vector.h>
+
+#include <SimpleC2Component.h>
+
+#include "ih264_typedefs.h"
+#include "iv2.h"
+#include "ive2.h"
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+#define LEN_STATUS_BUFFER (10 * 1024)
+#define MAX_VBV_BUFF_SIZE (120 * 16384)
+#define MAX_NUM_IO_BUFS 3
+
+#define DEFAULT_MAX_REF_FRM 2
+#define DEFAULT_MAX_REORDER_FRM 0
+#define DEFAULT_QP_MIN 10
+#define DEFAULT_QP_MAX 40
+#define DEFAULT_MAX_BITRATE 20000000
+#define DEFAULT_MAX_SRCH_RANGE_X 256
+#define DEFAULT_MAX_SRCH_RANGE_Y 256
+#define DEFAULT_MAX_FRAMERATE 120000
+#define DEFAULT_NUM_CORES 1
+#define DEFAULT_NUM_CORES_PRE_ENC 0
+#define DEFAULT_FPS 30
+#define DEFAULT_ENC_SPEED IVE_NORMAL
+
+#define DEFAULT_MEM_REC_CNT 0
+#define DEFAULT_RECON_ENABLE 0
+#define DEFAULT_CHKSUM_ENABLE 0
+#define DEFAULT_START_FRM 0
+#define DEFAULT_NUM_FRMS 0xFFFFFFFF
+#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU
+#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P
+#define DEFAULT_LOOPBACK 0
+#define DEFAULT_SRC_FRAME_RATE 30
+#define DEFAULT_TGT_FRAME_RATE 30
+#define DEFAULT_MAX_WD 1920
+#define DEFAULT_MAX_HT 1920
+#define DEFAULT_MAX_LEVEL 41
+#define DEFAULT_STRIDE 0
+#define DEFAULT_WD 1280
+#define DEFAULT_HT 720
+#define DEFAULT_PSNR_ENABLE 0
+#define DEFAULT_ME_SPEED 100
+#define DEFAULT_ENABLE_FAST_SAD 0
+#define DEFAULT_ENABLE_ALT_REF 0
+#define DEFAULT_RC_MODE IVE_RC_STORAGE
+#define DEFAULT_BITRATE 6000000
+#define DEFAULT_I_QP 22
+#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_P_QP 28
+#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_B_QP 22
+#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_AIR IVE_AIR_MODE_NONE
+#define DEFAULT_AIR_REFRESH_PERIOD 30
+#define DEFAULT_SRCH_RNG_X 64
+#define DEFAULT_SRCH_RNG_Y 48
+#define DEFAULT_I_INTERVAL 30
+#define DEFAULT_IDR_INTERVAL 1000
+#define DEFAULT_B_FRAMES 0
+#define DEFAULT_DISABLE_DEBLK_LEVEL 0
+#define DEFAULT_HPEL 1
+#define DEFAULT_QPEL 1
+#define DEFAULT_I4 1
+#define DEFAULT_EPROFILE IV_PROFILE_BASE
+#define DEFAULT_ENTROPY_MODE 0
+#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE
+#define DEFAULT_SLICE_PARAM 256
+#define DEFAULT_ARCH ARCH_ARM_A9Q
+#define DEFAULT_SOC SOC_GENERIC
+#define DEFAULT_INTRA4x4 0
+#define STRLENGTH 500
+#define DEFAULT_CONSTRAINED_INTRA 0
+
+#define MIN(a, b) ((a) < (b))? (a) : (b)
+#define MAX(a, b) ((a) > (b))? (a) : (b)
+#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
+#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ive_aligned_free(buf) free(buf)
+
+struct C2SoftAvcEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftAvcEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+protected:
+ virtual ~C2SoftAvcEnc();
+
+private:
+ // OMX input buffer's timestamp and flags
+ typedef struct {
+ int64_t mTimeUs;
+ int32_t mFlags;
+ } InputBufferInfo;
+
+ std::shared_ptr<IntfImpl> mIntf;
+
+ int32_t mStride;
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+
+ IV_PROFILE_T mAVCEncProfile __unused;
+ WORD32 mAVCEncLevel;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+
+ bool mSawInputEOS;
+ bool mSawOutputEOS;
+ bool mSignalledError;
+ bool mIntra4x4;
+ bool mEnableFastSad;
+ bool mEnableAltRef;
+ bool mReconEnable;
+ bool mPSNREnable;
+ bool mEntropyMode;
+ bool mConstrainedIntraFlag;
+ IVE_SPEED_CONFIG mEncSpeed;
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by codec
+ size_t mNumCores; // Number of cores used by the codec
+
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+
+ uint32_t mOutBufferSize;
+ UWORD32 mHeaderGenerated;
+ UWORD32 mBframes;
+ IV_ARCH_T mArch;
+ IVE_SLICE_MODE_T mSliceMode;
+ UWORD32 mSliceParam;
+ bool mHalfPelEnable;
+ UWORD32 mIInterval;
+ UWORD32 mIDRInterval;
+ UWORD32 mDisableDeblkLevel;
+ std::map<const void *, std::shared_ptr<C2Buffer>> mBuffers;
+ MemoryBlockPool mConversionBuffers;
+ std::map<const void *, MemoryBlock> mConversionBuffersInUse;
+
+ void initEncParams();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+
+ c2_status_t setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type);
+ c2_status_t setQp();
+ c2_status_t setEncMode(IVE_ENC_MODE_T e_enc_mode);
+ c2_status_t setDimensions();
+ c2_status_t setNumCores();
+ c2_status_t setFrameRate();
+ c2_status_t setIpeParams();
+ c2_status_t setBitRate();
+ c2_status_t setAirParams();
+ c2_status_t setMeParams();
+ c2_status_t setGopParams();
+ c2_status_t setProfileParams();
+ c2_status_t setDeblockParams();
+ c2_status_t setVbvParams();
+ void logVersion();
+ c2_status_t setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp);
+
+ C2_DO_NOT_COPY(C2SoftAvcEnc);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/avce_input"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output"
+#define OUTPUT_DUMP_EXT "h264"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ if (fp != NULL) \
+ fclose(fp); \
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AVC_ENC_H__
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
new file mode 100644
index 0000000..ad456e2
--- /dev/null
+++ b/media/codec2/components/base/Android.bp
@@ -0,0 +1,141 @@
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libstagefright_soft_c2-defaults instead
+cc_library_shared {
+ name: "libstagefright_soft_c2common",
+ defaults: ["libstagefright_codec2-impl-defaults"],
+ vendor_available: true,
+
+ srcs: [
+ "SimpleC2Component.cpp",
+ "SimpleC2Interface.cpp",
+ ],
+
+ export_include_dirs: [
+ "include",
+ ],
+
+ export_shared_lib_headers: [
+ "libstagefright_ccodec_utils",
+ ],
+
+ shared_libs: [
+ "libcutils", // for properties
+ "liblog", // for ALOG
+ "libstagefright_ccodec_utils", // for ImageCopy
+ "libstagefright_foundation", // for Mutexed
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+
+ ldflags: ["-Wl,-Bsymbolic"],
+}
+
+// public dependency for software codec implementation
+// to be used by code under media/codecs/* only as its stability is not guaranteed
+cc_defaults {
+ name: "libstagefright_soft_c2-defaults",
+ defaults: ["libstagefright_codec2-impl-defaults"],
+ vendor_available: true,
+
+ export_shared_lib_headers: [
+ "libstagefright_ccodec_utils",
+ ],
+
+ shared_libs: [
+ "libcutils", // for properties
+ "liblog", // for ALOG
+ "libstagefright_foundation", // for ColorUtils and MIME
+ "libstagefright_ccodec_utils", // for ImageCopy
+ "libstagefright_soft_c2common",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+
+ ldflags: ["-Wl,-Bsymbolic"],
+}
+
+// public dependency for software codec implementation
+// to be used by code under media/codecs/* only
+cc_defaults {
+ name: "libstagefright_soft_c2_sanitize_all-defaults",
+
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+}
+
+// public dependency for software codec implementation
+// to be used by code under media/codecs/* only
+cc_defaults {
+ name: "libstagefright_soft_c2_sanitize_signed-defaults",
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+}
+
+// TEMP: used by cheets2 project - remove when no longer used
+cc_library_shared {
+ name: "libstagefright_simple_c2component",
+ vendor_available: true,
+
+ srcs: [
+ "SimpleC2Interface.cpp",
+ ],
+
+ local_include_dirs: [
+ "include",
+ ],
+
+ export_include_dirs: [
+ "include",
+ ],
+
+ shared_libs: [
+ "libcutils",
+ "liblog",
+ "libstagefright_codec2",
+ "libstagefright_codec2_vndk",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+
+ ldflags: ["-Wl,-Bsymbolic"],
+}
+
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
new file mode 100644
index 0000000..7990ee5
--- /dev/null
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -0,0 +1,562 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleC2Component"
+#include <log/log.h>
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <inttypes.h>
+
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Component.h>
+
+namespace android {
+
+std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
+ std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
+ mQueue.pop_front();
+ return work;
+}
+
+void SimpleC2Component::WorkQueue::push_back(std::unique_ptr<C2Work> work) {
+ mQueue.push_back({ std::move(work), NO_DRAIN });
+}
+
+bool SimpleC2Component::WorkQueue::empty() const {
+ return mQueue.empty();
+}
+
+void SimpleC2Component::WorkQueue::clear() {
+ mQueue.clear();
+}
+
+uint32_t SimpleC2Component::WorkQueue::drainMode() const {
+ return mQueue.front().drainMode;
+}
+
+void SimpleC2Component::WorkQueue::markDrain(uint32_t drainMode) {
+ mQueue.push_back({ nullptr, drainMode });
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+SimpleC2Component::WorkHandler::WorkHandler() : mRunning(false) {}
+
+void SimpleC2Component::WorkHandler::setComponent(
+ const std::shared_ptr<SimpleC2Component> &thiz) {
+ mThiz = thiz;
+}
+
+static void Reply(const sp<AMessage> &msg, int32_t *err = nullptr) {
+ sp<AReplyToken> replyId;
+ CHECK(msg->senderAwaitsResponse(&replyId));
+ sp<AMessage> reply = new AMessage;
+ if (err) {
+ reply->setInt32("err", *err);
+ }
+ reply->postReply(replyId);
+}
+
+void SimpleC2Component::WorkHandler::onMessageReceived(const sp<AMessage> &msg) {
+ std::shared_ptr<SimpleC2Component> thiz = mThiz.lock();
+ if (!thiz) {
+ ALOGD("component not yet set; msg = %s", msg->debugString().c_str());
+ sp<AReplyToken> replyId;
+ if (msg->senderAwaitsResponse(&replyId)) {
+ sp<AMessage> reply = new AMessage;
+ reply->setInt32("err", C2_CORRUPTED);
+ reply->postReply(replyId);
+ }
+ return;
+ }
+
+ switch (msg->what()) {
+ case kWhatProcess: {
+ if (mRunning) {
+ if (thiz->processQueue()) {
+ (new AMessage(kWhatProcess, this))->post();
+ }
+ } else {
+ ALOGV("Ignore process message as we're not running");
+ }
+ break;
+ }
+ case kWhatInit: {
+ int32_t err = thiz->onInit();
+ Reply(msg, &err);
+ [[fallthrough]];
+ }
+ case kWhatStart: {
+ mRunning = true;
+ break;
+ }
+ case kWhatStop: {
+ int32_t err = thiz->onStop();
+ Reply(msg, &err);
+ break;
+ }
+ case kWhatReset: {
+ thiz->onReset();
+ mRunning = false;
+ Reply(msg);
+ break;
+ }
+ case kWhatRelease: {
+ thiz->onRelease();
+ mRunning = false;
+ Reply(msg);
+ break;
+ }
+ default: {
+ ALOGD("Unrecognized msg: %d", msg->what());
+ break;
+ }
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+namespace {
+
+struct DummyReadView : public C2ReadView {
+ DummyReadView() : C2ReadView(C2_NO_INIT) {}
+};
+
+} // namespace
+
+SimpleC2Component::SimpleC2Component(
+ const std::shared_ptr<C2ComponentInterface> &intf)
+ : mDummyReadView(DummyReadView()),
+ mIntf(intf),
+ mLooper(new ALooper),
+ mHandler(new WorkHandler) {
+ mLooper->setName(intf->getName().c_str());
+ (void)mLooper->registerHandler(mHandler);
+ mLooper->start(false, false, ANDROID_PRIORITY_VIDEO);
+}
+
+SimpleC2Component::~SimpleC2Component() {
+ mLooper->unregisterHandler(mHandler->id());
+ (void)mLooper->stop();
+}
+
+c2_status_t SimpleC2Component::setListener_vb(
+ const std::shared_ptr<C2Component::Listener> &listener, c2_blocking_t mayBlock) {
+ mHandler->setComponent(shared_from_this());
+
+ Mutexed<ExecState>::Locked state(mExecState);
+ if (state->mState == RUNNING) {
+ if (listener) {
+ return C2_BAD_STATE;
+ } else if (!mayBlock) {
+ return C2_BLOCKING;
+ }
+ }
+ state->mListener = listener;
+ // TODO: wait for listener change to have taken place before returning
+ // (e.g. if there is an ongoing listener callback)
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::queue_nb(std::list<std::unique_ptr<C2Work>> * const items) {
+ {
+ Mutexed<ExecState>::Locked state(mExecState);
+ if (state->mState != RUNNING) {
+ return C2_BAD_STATE;
+ }
+ }
+ bool queueWasEmpty = false;
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ queueWasEmpty = queue->empty();
+ while (!items->empty()) {
+ queue->push_back(std::move(items->front()));
+ items->pop_front();
+ }
+ }
+ if (queueWasEmpty) {
+ (new AMessage(WorkHandler::kWhatProcess, mHandler))->post();
+ }
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::announce_nb(const std::vector<C2WorkOutline> &items) {
+ (void)items;
+ return C2_OMITTED;
+}
+
+c2_status_t SimpleC2Component::flush_sm(
+ flush_mode_t flushMode, std::list<std::unique_ptr<C2Work>>* const flushedWork) {
+ (void)flushMode;
+ {
+ Mutexed<ExecState>::Locked state(mExecState);
+ if (state->mState != RUNNING) {
+ return C2_BAD_STATE;
+ }
+ }
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ queue->incGeneration();
+ // TODO: queue->splicedBy(flushedWork, flushedWork->end());
+ while (!queue->empty()) {
+ std::unique_ptr<C2Work> work = queue->pop_front();
+ if (work) {
+ flushedWork->push_back(std::move(work));
+ }
+ }
+ }
+ {
+ Mutexed<PendingWork>::Locked pending(mPendingWork);
+ while (!pending->empty()) {
+ flushedWork->push_back(std::move(pending->begin()->second));
+ pending->erase(pending->begin());
+ }
+ }
+
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::drain_nb(drain_mode_t drainMode) {
+ if (drainMode == DRAIN_CHAIN) {
+ return C2_OMITTED;
+ }
+ {
+ Mutexed<ExecState>::Locked state(mExecState);
+ if (state->mState != RUNNING) {
+ return C2_BAD_STATE;
+ }
+ }
+ bool queueWasEmpty = false;
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ queueWasEmpty = queue->empty();
+ queue->markDrain(drainMode);
+ }
+ if (queueWasEmpty) {
+ (new AMessage(WorkHandler::kWhatProcess, mHandler))->post();
+ }
+
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::start() {
+ Mutexed<ExecState>::Locked state(mExecState);
+ if (state->mState == RUNNING) {
+ return C2_BAD_STATE;
+ }
+ bool needsInit = (state->mState == UNINITIALIZED);
+ state.unlock();
+ if (needsInit) {
+ sp<AMessage> reply;
+ (new AMessage(WorkHandler::kWhatInit, mHandler))->postAndAwaitResponse(&reply);
+ int32_t err;
+ CHECK(reply->findInt32("err", &err));
+ if (err != C2_OK) {
+ return (c2_status_t)err;
+ }
+ } else {
+ (new AMessage(WorkHandler::kWhatStart, mHandler))->post();
+ }
+ state.lock();
+ state->mState = RUNNING;
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::stop() {
+ ALOGV("stop");
+ {
+ Mutexed<ExecState>::Locked state(mExecState);
+ if (state->mState != RUNNING) {
+ return C2_BAD_STATE;
+ }
+ state->mState = STOPPED;
+ }
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ queue->clear();
+ }
+ {
+ Mutexed<PendingWork>::Locked pending(mPendingWork);
+ pending->clear();
+ }
+ sp<AMessage> reply;
+ (new AMessage(WorkHandler::kWhatStop, mHandler))->postAndAwaitResponse(&reply);
+ int32_t err;
+ CHECK(reply->findInt32("err", &err));
+ if (err != C2_OK) {
+ return (c2_status_t)err;
+ }
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::reset() {
+ ALOGV("reset");
+ {
+ Mutexed<ExecState>::Locked state(mExecState);
+ state->mState = UNINITIALIZED;
+ }
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ queue->clear();
+ }
+ {
+ Mutexed<PendingWork>::Locked pending(mPendingWork);
+ pending->clear();
+ }
+ sp<AMessage> reply;
+ (new AMessage(WorkHandler::kWhatReset, mHandler))->postAndAwaitResponse(&reply);
+ return C2_OK;
+}
+
+c2_status_t SimpleC2Component::release() {
+ ALOGV("release");
+ sp<AMessage> reply;
+ (new AMessage(WorkHandler::kWhatRelease, mHandler))->postAndAwaitResponse(&reply);
+ return C2_OK;
+}
+
+std::shared_ptr<C2ComponentInterface> SimpleC2Component::intf() {
+ return mIntf;
+}
+
+namespace {
+
+std::list<std::unique_ptr<C2Work>> vec(std::unique_ptr<C2Work> &work) {
+ std::list<std::unique_ptr<C2Work>> ret;
+ ret.push_back(std::move(work));
+ return ret;
+}
+
+} // namespace
+
+void SimpleC2Component::finish(
+ uint64_t frameIndex, std::function<void(const std::unique_ptr<C2Work> &)> fillWork) {
+ std::unique_ptr<C2Work> work;
+ {
+ Mutexed<PendingWork>::Locked pending(mPendingWork);
+ if (pending->count(frameIndex) == 0) {
+ ALOGW("unknown frame index: %" PRIu64, frameIndex);
+ return;
+ }
+ work = std::move(pending->at(frameIndex));
+ pending->erase(frameIndex);
+ }
+ if (work) {
+ fillWork(work);
+ std::shared_ptr<C2Component::Listener> listener = mExecState.lock()->mListener;
+ listener->onWorkDone_nb(shared_from_this(), vec(work));
+ ALOGV("returning pending work");
+ }
+}
+
+void SimpleC2Component::cloneAndSend(
+ uint64_t frameIndex,
+ const std::unique_ptr<C2Work> ¤tWork,
+ std::function<void(const std::unique_ptr<C2Work> &)> fillWork) {
+ std::unique_ptr<C2Work> work(new C2Work);
+ if (currentWork->input.ordinal.frameIndex == frameIndex) {
+ work->input.flags = currentWork->input.flags;
+ work->input.ordinal = currentWork->input.ordinal;
+ } else {
+ Mutexed<PendingWork>::Locked pending(mPendingWork);
+ if (pending->count(frameIndex) == 0) {
+ ALOGW("unknown frame index: %" PRIu64, frameIndex);
+ return;
+ }
+ work->input.flags = pending->at(frameIndex)->input.flags;
+ work->input.ordinal = pending->at(frameIndex)->input.ordinal;
+ }
+ work->worklets.emplace_back(new C2Worklet);
+ if (work) {
+ fillWork(work);
+ std::shared_ptr<C2Component::Listener> listener = mExecState.lock()->mListener;
+ listener->onWorkDone_nb(shared_from_this(), vec(work));
+ ALOGV("cloned and sending work");
+ }
+}
+
+bool SimpleC2Component::processQueue() {
+ std::unique_ptr<C2Work> work;
+ uint64_t generation;
+ int32_t drainMode;
+ bool isFlushPending = false;
+ bool hasQueuedWork = false;
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ if (queue->empty()) {
+ return false;
+ }
+
+ generation = queue->generation();
+ drainMode = queue->drainMode();
+ isFlushPending = queue->popPendingFlush();
+ work = queue->pop_front();
+ hasQueuedWork = !queue->empty();
+ }
+ if (isFlushPending) {
+ ALOGV("processing pending flush");
+ c2_status_t err = onFlush_sm();
+ if (err != C2_OK) {
+ ALOGD("flush err: %d", err);
+ // TODO: error
+ }
+ }
+
+ if (!mOutputBlockPool) {
+ c2_status_t err = [this] {
+ // TODO: don't use query_vb
+ C2StreamFormatConfig::output outputFormat(0u);
+ std::vector<std::unique_ptr<C2Param>> params;
+ c2_status_t err = intf()->query_vb(
+ { &outputFormat },
+ { C2PortBlockPoolsTuning::output::PARAM_TYPE },
+ C2_DONT_BLOCK,
+ ¶ms);
+ if (err != C2_OK && err != C2_BAD_INDEX) {
+ ALOGD("query err = %d", err);
+ return err;
+ }
+ C2BlockPool::local_id_t poolId =
+ outputFormat.value == C2FormatVideo
+ ? C2BlockPool::BASIC_GRAPHIC
+ : C2BlockPool::BASIC_LINEAR;
+ if (params.size()) {
+ C2PortBlockPoolsTuning::output *outputPools =
+ C2PortBlockPoolsTuning::output::From(params[0].get());
+ if (outputPools && outputPools->flexCount() >= 1) {
+ poolId = outputPools->m.values[0];
+ }
+ }
+
+ err = GetCodec2BlockPool(poolId, shared_from_this(), &mOutputBlockPool);
+ ALOGD("Using output block pool with poolID %llu => got %llu - %d",
+ (unsigned long long)poolId,
+ (unsigned long long)(
+ mOutputBlockPool ? mOutputBlockPool->getLocalId() : 111000111),
+ err);
+ return err;
+ }();
+ if (err != C2_OK) {
+ Mutexed<ExecState>::Locked state(mExecState);
+ std::shared_ptr<C2Component::Listener> listener = state->mListener;
+ state.unlock();
+ listener->onError_nb(shared_from_this(), err);
+ return hasQueuedWork;
+ }
+ }
+
+ if (!work) {
+ c2_status_t err = drain(drainMode, mOutputBlockPool);
+ if (err != C2_OK) {
+ Mutexed<ExecState>::Locked state(mExecState);
+ std::shared_ptr<C2Component::Listener> listener = state->mListener;
+ state.unlock();
+ listener->onError_nb(shared_from_this(), err);
+ }
+ return hasQueuedWork;
+ }
+
+ {
+ std::vector<C2Param *> updates;
+ for (const std::unique_ptr<C2Param> ¶m: work->input.configUpdate) {
+ if (param) {
+ updates.emplace_back(param.get());
+ }
+ }
+ if (!updates.empty()) {
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = intf()->config_vb(updates, C2_MAY_BLOCK, &failures);
+ ALOGD("applied %zu configUpdates => %s (%d)", updates.size(), asString(err), err);
+ }
+ }
+
+ ALOGV("start processing frame #%" PRIu64, work->input.ordinal.frameIndex.peeku());
+ process(work, mOutputBlockPool);
+ ALOGV("processed frame #%" PRIu64, work->input.ordinal.frameIndex.peeku());
+ {
+ Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+ if (queue->generation() != generation) {
+ ALOGD("work form old generation: was %" PRIu64 " now %" PRIu64,
+ queue->generation(), generation);
+ work->result = C2_NOT_FOUND;
+ queue.unlock();
+ {
+ Mutexed<ExecState>::Locked state(mExecState);
+ std::shared_ptr<C2Component::Listener> listener = state->mListener;
+ state.unlock();
+ listener->onWorkDone_nb(shared_from_this(), vec(work));
+ }
+ queue.lock();
+ return hasQueuedWork;
+ }
+ }
+ if (work->workletsProcessed != 0u) {
+ Mutexed<ExecState>::Locked state(mExecState);
+ ALOGV("returning this work");
+ std::shared_ptr<C2Component::Listener> listener = state->mListener;
+ state.unlock();
+ listener->onWorkDone_nb(shared_from_this(), vec(work));
+ } else {
+ ALOGV("queue pending work");
+ work->input.buffers.clear();
+ std::unique_ptr<C2Work> unexpected;
+ {
+ Mutexed<PendingWork>::Locked pending(mPendingWork);
+ uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
+ if (pending->count(frameIndex) != 0) {
+ unexpected = std::move(pending->at(frameIndex));
+ pending->erase(frameIndex);
+ }
+ (void)pending->insert({ frameIndex, std::move(work) });
+ }
+ if (unexpected) {
+ ALOGD("unexpected pending work");
+ unexpected->result = C2_CORRUPTED;
+ Mutexed<ExecState>::Locked state(mExecState);
+ std::shared_ptr<C2Component::Listener> listener = state->mListener;
+ state.unlock();
+ listener->onWorkDone_nb(shared_from_this(), vec(unexpected));
+ }
+ }
+ return hasQueuedWork;
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block) {
+ return createLinearBuffer(block, block->offset(), block->size());
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
+ return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block) {
+ return createGraphicBuffer(block, C2Rect(block->width(), block->height()));
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block, const C2Rect &crop) {
+ return C2Buffer::CreateGraphicBuffer(block->share(crop, ::C2Fence()));
+}
+
+} // namespace android
diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp
new file mode 100644
index 0000000..c849a4e
--- /dev/null
+++ b/media/codec2/components/base/SimpleC2Interface.cpp
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleC2Interface"
+#include <utils/Log.h>
+
+// use MediaDefs here vs. MediaCodecConstants as this is not MediaCodec specific/dependent
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <SimpleC2Interface.h>
+
+namespace android {
+
+/* SimpleInterface */
+
+SimpleInterface<void>::BaseParams::BaseParams(
+ const std::shared_ptr<C2ReflectorHelper> &reflector,
+ C2String name,
+ C2Component::kind_t kind,
+ C2Component::domain_t domain,
+ C2String mediaType,
+ std::vector<C2String> aliases)
+ : C2InterfaceHelper(reflector) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mName, C2_PARAMKEY_COMPONENT_NAME)
+ .withConstValue(AllocSharedString<C2ComponentNameSetting>(name.c_str()))
+ .build());
+
+ if (aliases.size()) {
+ C2String joined;
+ for (const C2String &alias : aliases) {
+ if (joined.length()) {
+ joined += ",";
+ }
+ joined += alias;
+ }
+ addParameter(
+ DefineParam(mAliases, C2_PARAMKEY_COMPONENT_ALIASES)
+ .withConstValue(AllocSharedString<C2ComponentAliasesSetting>(joined.c_str()))
+ .build());
+ }
+
+ addParameter(
+ DefineParam(mKind, C2_PARAMKEY_COMPONENT_KIND)
+ .withConstValue(new C2ComponentKindSetting(kind))
+ .build());
+
+ addParameter(
+ DefineParam(mDomain, C2_PARAMKEY_COMPONENT_DOMAIN)
+ .withConstValue(new C2ComponentDomainSetting(domain))
+ .build());
+
+ // simple interfaces have single streams
+ addParameter(
+ DefineParam(mInputStreamCount, C2_PARAMKEY_INPUT_STREAM_COUNT)
+ .withConstValue(new C2PortStreamCountTuning::input(1))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputStreamCount, C2_PARAMKEY_OUTPUT_STREAM_COUNT)
+ .withConstValue(new C2PortStreamCountTuning::output(1))
+ .build());
+
+ // set up buffer formats and allocators
+
+ // default to linear buffers and no media type
+ C2BufferData::type_t rawBufferType = C2BufferData::LINEAR;
+ C2String rawMediaType;
+ C2Allocator::id_t rawAllocator = C2AllocatorStore::DEFAULT_LINEAR;
+ C2BlockPool::local_id_t rawPoolId = C2BlockPool::BASIC_LINEAR;
+ C2BufferData::type_t codedBufferType = C2BufferData::LINEAR;
+ C2Allocator::id_t codedAllocator = C2AllocatorStore::DEFAULT_LINEAR;
+ C2BlockPool::local_id_t codedPoolId = C2BlockPool::BASIC_LINEAR;
+
+ switch (domain) {
+ case C2Component::DOMAIN_IMAGE:
+ case C2Component::DOMAIN_VIDEO:
+ // TODO: should we define raw image? The only difference is timestamp handling
+ rawBufferType = C2BufferData::GRAPHIC;
+ rawMediaType = MEDIA_MIMETYPE_VIDEO_RAW;
+ rawAllocator = C2AllocatorStore::DEFAULT_GRAPHIC;
+ rawPoolId = C2BlockPool::BASIC_GRAPHIC;
+ break;
+ case C2Component::DOMAIN_AUDIO:
+ rawBufferType = C2BufferData::LINEAR;
+ rawMediaType = MEDIA_MIMETYPE_AUDIO_RAW;
+ rawAllocator = C2AllocatorStore::DEFAULT_LINEAR;
+ rawPoolId = C2BlockPool::BASIC_LINEAR;
+ break;
+ default:
+ break;
+ }
+ bool isEncoder = kind == C2Component::KIND_ENCODER;
+
+ // handle raw decoders
+ if (mediaType == rawMediaType) {
+ codedBufferType = rawBufferType;
+ codedAllocator = rawAllocator;
+ codedPoolId = rawPoolId;
+ }
+
+ addParameter(
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(
+ 0u, isEncoder ? rawBufferType : codedBufferType))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
+ isEncoder ? rawMediaType : mediaType))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(
+ 0u, isEncoder ? codedBufferType : rawBufferType))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
+ isEncoder ? mediaType : rawMediaType))
+ .build());
+
+ C2Allocator::id_t inputAllocators[1] = { isEncoder ? rawAllocator : codedAllocator };
+ C2Allocator::id_t outputAllocators[1] = { isEncoder ? codedAllocator : rawAllocator };
+ C2BlockPool::local_id_t outputPoolIds[1] = { isEncoder ? codedPoolId : rawPoolId };
+
+ addParameter(
+ DefineParam(mInputAllocators, C2_PARAMKEY_INPUT_ALLOCATORS)
+ .withDefault(C2PortAllocatorsTuning::input::AllocShared(inputAllocators))
+ .withFields({ C2F(mInputAllocators, m.values[0]).any(),
+ C2F(mInputAllocators, m.values).inRange(0, 1) })
+ .withSetter(Setter<C2PortAllocatorsTuning::input>::NonStrictValuesWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mOutputAllocators, C2_PARAMKEY_OUTPUT_ALLOCATORS)
+ .withDefault(C2PortAllocatorsTuning::output::AllocShared(outputAllocators))
+ .withFields({ C2F(mOutputAllocators, m.values[0]).any(),
+ C2F(mOutputAllocators, m.values).inRange(0, 1) })
+ .withSetter(Setter<C2PortAllocatorsTuning::output>::NonStrictValuesWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mOutputPoolIds, C2_PARAMKEY_OUTPUT_BLOCK_POOLS)
+ .withDefault(C2PortBlockPoolsTuning::output::AllocShared(outputPoolIds))
+ .withFields({ C2F(mOutputPoolIds, m.values[0]).any(),
+ C2F(mOutputPoolIds, m.values).inRange(0, 1) })
+ .withSetter(Setter<C2PortBlockPoolsTuning::output>::NonStrictValuesWithNoDeps)
+ .build());
+
+ // add stateless params
+ addParameter(
+ DefineParam(mSubscribedParamIndices, C2_PARAMKEY_SUBSCRIBED_PARAM_INDICES)
+ .withDefault(C2SubscribedParamIndicesTuning::AllocShared(0u))
+ .withFields({ C2F(mSubscribedParamIndices, m.values[0]).any(),
+ C2F(mSubscribedParamIndices, m.values).any() })
+ .withSetter(Setter<C2SubscribedParamIndicesTuning>::NonStrictValuesWithNoDeps)
+ .build());
+
+ /* TODO
+
+ addParameter(
+ DefineParam(mCurrentWorkOrdinal, C2_PARAMKEY_CURRENT_WORK)
+ .withDefault(new C2CurrentWorkTuning())
+ .withFields({ C2F(mCurrentWorkOrdinal, m.timeStamp).any(),
+ C2F(mCurrentWorkOrdinal, m.frameIndex).any(),
+ C2F(mCurrentWorkOrdinal, m.customOrdinal).any() })
+ .withSetter(Setter<C2CurrentWorkTuning>::NonStrictValuesWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mLastInputQueuedWorkOrdinal, C2_PARAMKEY_LAST_INPUT_QUEUED)
+ .withDefault(new C2LastWorkQueuedTuning::input())
+ .withFields({ C2F(mLastInputQueuedWorkOrdinal, m.timeStamp).any(),
+ C2F(mLastInputQueuedWorkOrdinal, m.frameIndex).any(),
+ C2F(mLastInputQueuedWorkOrdinal, m.customOrdinal).any() })
+ .withSetter(Setter<C2LastWorkQueuedTuning::input>::NonStrictValuesWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mLastOutputQueuedWorkOrdinal, C2_PARAMKEY_LAST_OUTPUT_QUEUED)
+ .withDefault(new C2LastWorkQueuedTuning::output())
+ .withFields({ C2F(mLastOutputQueuedWorkOrdinal, m.timeStamp).any(),
+ C2F(mLastOutputQueuedWorkOrdinal, m.frameIndex).any(),
+ C2F(mLastOutputQueuedWorkOrdinal, m.customOrdinal).any() })
+ .withSetter(Setter<C2LastWorkQueuedTuning::output>::NonStrictValuesWithNoDeps)
+ .build());
+
+ std::shared_ptr<C2OutOfMemoryTuning> mOutOfMemory;
+
+ std::shared_ptr<C2PortConfigCounterTuning::input> mInputConfigCounter;
+ std::shared_ptr<C2PortConfigCounterTuning::output> mOutputConfigCounter;
+ std::shared_ptr<C2ConfigCounterTuning> mDirectConfigCounter;
+
+ */
+}
+
+void SimpleInterface<void>::BaseParams::noInputLatency() {
+ addParameter(
+ DefineParam(mRequestedInputDelay, C2_PARAMKEY_INPUT_DELAY_REQUEST)
+ .withConstValue(new C2PortRequestedDelayTuning::input(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
+ .withConstValue(new C2PortActualDelayTuning::input(0u))
+ .build());
+}
+
+void SimpleInterface<void>::BaseParams::noOutputLatency() {
+ addParameter(
+ DefineParam(mRequestedOutputDelay, C2_PARAMKEY_OUTPUT_DELAY_REQUEST)
+ .withConstValue(new C2PortRequestedDelayTuning::output(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+ .withConstValue(new C2PortActualDelayTuning::output(0u))
+ .build());
+}
+
+void SimpleInterface<void>::BaseParams::noPipelineLatency() {
+ addParameter(
+ DefineParam(mRequestedPipelineDelay, C2_PARAMKEY_PIPELINE_DELAY_REQUEST)
+ .withConstValue(new C2RequestedPipelineDelayTuning(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mActualPipelineDelay, C2_PARAMKEY_PIPELINE_DELAY)
+ .withConstValue(new C2ActualPipelineDelayTuning(0u))
+ .build());
+}
+
+void SimpleInterface<void>::BaseParams::noPrivateBuffers() {
+ addParameter(
+ DefineParam(mPrivateAllocators, C2_PARAMKEY_PRIVATE_ALLOCATORS)
+ .withConstValue(C2PrivateAllocatorsTuning::AllocShared(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mMaxPrivateBufferCount, C2_PARAMKEY_MAX_PRIVATE_BUFFER_COUNT)
+ .withConstValue(C2MaxPrivateBufferCountTuning::AllocShared(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mPrivatePoolIds, C2_PARAMKEY_PRIVATE_BLOCK_POOLS)
+ .withConstValue(C2PrivateBlockPoolsTuning::AllocShared(0u))
+ .build());
+}
+
+void SimpleInterface<void>::BaseParams::noInputReferences() {
+ addParameter(
+ DefineParam(mMaxInputReferenceAge, C2_PARAMKEY_INPUT_MAX_REFERENCE_AGE)
+ .withConstValue(new C2StreamMaxReferenceAgeTuning::input(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputReferenceCount, C2_PARAMKEY_INPUT_MAX_REFERENCE_COUNT)
+ .withConstValue(new C2StreamMaxReferenceCountTuning::input(0u))
+ .build());
+}
+
+void SimpleInterface<void>::BaseParams::noOutputReferences() {
+ addParameter(
+ DefineParam(mMaxOutputReferenceAge, C2_PARAMKEY_OUTPUT_MAX_REFERENCE_AGE)
+ .withConstValue(new C2StreamMaxReferenceAgeTuning::output(0u))
+ .build());
+
+ addParameter(
+ DefineParam(mMaxOutputReferenceCount, C2_PARAMKEY_OUTPUT_MAX_REFERENCE_COUNT)
+ .withConstValue(new C2StreamMaxReferenceCountTuning::output(0u))
+ .build());
+}
+
+void SimpleInterface<void>::BaseParams::noTimeStretch() {
+ addParameter(
+ DefineParam(mTimeStretch, C2_PARAMKEY_TIME_STRETCH)
+ .withConstValue(new C2ComponentTimeStretchTuning(1.f))
+ .build());
+}
+
+/*
+ Clients need to handle the following base params due to custom dependency.
+
+ std::shared_ptr<C2ApiLevelSetting> mApiLevel;
+ std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
+ std::shared_ptr<C2ComponentAttributesSetting> mAttrib;
+
+ std::shared_ptr<C2PortSuggestedBufferCountTuning::input> mSuggestedInputBufferCount;
+ std::shared_ptr<C2PortSuggestedBufferCountTuning::output> mSuggestedOutputBufferCount;
+
+ std::shared_ptr<C2TrippedTuning> mTripped;
+
+*/
+
+} // namespace android
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
new file mode 100644
index 0000000..b3a98f4
--- /dev/null
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -0,0 +1,244 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_C2_COMPONENT_H_
+#define SIMPLE_C2_COMPONENT_H_
+
+#include <list>
+#include <unordered_map>
+
+#include <C2Component.h>
+
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/Mutexed.h>
+
+namespace android {
+
+class SimpleC2Component
+ : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
+public:
+ explicit SimpleC2Component(
+ const std::shared_ptr<C2ComponentInterface> &intf);
+ virtual ~SimpleC2Component();
+
+ // C2Component
+ // From C2Component
+ virtual c2_status_t setListener_vb(
+ const std::shared_ptr<Listener> &listener, c2_blocking_t mayBlock) override;
+ virtual c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) override;
+ virtual c2_status_t announce_nb(const std::vector<C2WorkOutline> &items) override;
+ virtual c2_status_t flush_sm(
+ flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) override;
+ virtual c2_status_t drain_nb(drain_mode_t mode) override;
+ virtual c2_status_t start() override;
+ virtual c2_status_t stop() override;
+ virtual c2_status_t reset() override;
+ virtual c2_status_t release() override;
+ virtual std::shared_ptr<C2ComponentInterface> intf() override;
+
+ // for handler
+ bool processQueue();
+
+protected:
+ /**
+ * Initialize internal states of the component according to the config set
+ * in the interface.
+ *
+ * This method is called during start(), but only at the first invocation or
+ * after reset().
+ */
+ virtual c2_status_t onInit() = 0;
+
+ /**
+ * Stop the component.
+ */
+ virtual c2_status_t onStop() = 0;
+
+ /**
+ * Reset the component.
+ */
+ virtual void onReset() = 0;
+
+ /**
+ * Release the component.
+ */
+ virtual void onRelease() = 0;
+
+ /**
+ * Flush the component.
+ */
+ virtual c2_status_t onFlush_sm() = 0;
+
+ /**
+ * Process the given work and finish pending work using finish().
+ *
+ * \param[in,out] work the work to process
+ * \param[in] pool the pool to use for allocating output blocks.
+ */
+ virtual void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) = 0;
+
+ /**
+ * Drain the component and finish pending work using finish().
+ *
+ * \param[in] drainMode mode of drain.
+ * \param[in] pool the pool to use for allocating output blocks.
+ *
+ * \retval C2_OK The component has drained all pending output
+ * work.
+ * \retval C2_OMITTED Unsupported mode (e.g. DRAIN_CHAIN)
+ */
+ virtual c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) = 0;
+
+ // for derived classes
+ /**
+ * Finish pending work.
+ *
+ * This method will retrieve the pending work according to |frameIndex| and
+ * feed the work into |fillWork| function. |fillWork| must be
+ * "non-blocking". Once |fillWork| returns the filled work will be returned
+ * to the client.
+ *
+ * \param[in] frameIndex the index of the pending work
+ * \param[in] fillWork the function to fill the retrieved work.
+ */
+ void finish(uint64_t frameIndex, std::function<void(const std::unique_ptr<C2Work> &)> fillWork);
+
+ /**
+ * Clone pending or current work and send the work back to client.
+ *
+ * This method will retrieve and clone the pending or current work according
+ * to |frameIndex| and feed the work into |fillWork| function. |fillWork|
+ * must be "non-blocking". Once |fillWork| returns the filled work will be
+ * returned to the client.
+ *
+ * \param[in] frameIndex the index of the work
+ * \param[in] currentWork the current work under processing
+ * \param[in] fillWork the function to fill the retrieved work.
+ */
+ void cloneAndSend(
+ uint64_t frameIndex,
+ const std::unique_ptr<C2Work> ¤tWork,
+ std::function<void(const std::unique_ptr<C2Work> &)> fillWork);
+
+
+ std::shared_ptr<C2Buffer> createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block);
+
+ std::shared_ptr<C2Buffer> createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
+
+ std::shared_ptr<C2Buffer> createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block);
+
+ std::shared_ptr<C2Buffer> createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block,
+ const C2Rect &crop);
+
+ static constexpr uint32_t NO_DRAIN = ~0u;
+
+ C2ReadView mDummyReadView;
+
+private:
+ const std::shared_ptr<C2ComponentInterface> mIntf;
+
+ class WorkHandler : public AHandler {
+ public:
+ enum {
+ kWhatProcess,
+ kWhatInit,
+ kWhatStart,
+ kWhatStop,
+ kWhatReset,
+ kWhatRelease,
+ };
+
+ WorkHandler();
+ ~WorkHandler() override = default;
+
+ void setComponent(const std::shared_ptr<SimpleC2Component> &thiz);
+
+ protected:
+ void onMessageReceived(const sp<AMessage> &msg) override;
+
+ private:
+ std::weak_ptr<SimpleC2Component> mThiz;
+ bool mRunning;
+ };
+
+ enum {
+ UNINITIALIZED,
+ STOPPED,
+ RUNNING,
+ };
+
+ struct ExecState {
+ ExecState() : mState(UNINITIALIZED) {}
+
+ int mState;
+ std::shared_ptr<C2Component::Listener> mListener;
+ };
+ Mutexed<ExecState> mExecState;
+
+ sp<ALooper> mLooper;
+ sp<WorkHandler> mHandler;
+
+ class WorkQueue {
+ public:
+ inline WorkQueue() : mFlush(false), mGeneration(0ul) {}
+
+ inline uint64_t generation() const { return mGeneration; }
+ inline void incGeneration() { ++mGeneration; mFlush = true; }
+
+ std::unique_ptr<C2Work> pop_front();
+ void push_back(std::unique_ptr<C2Work> work);
+ bool empty() const;
+ uint32_t drainMode() const;
+ void markDrain(uint32_t drainMode);
+ inline bool popPendingFlush() {
+ bool flush = mFlush;
+ mFlush = false;
+ return flush;
+ }
+ void clear();
+
+ private:
+ struct Entry {
+ std::unique_ptr<C2Work> work;
+ uint32_t drainMode;
+ };
+
+ bool mFlush;
+ uint64_t mGeneration;
+ std::list<Entry> mQueue;
+ };
+ Mutexed<WorkQueue> mWorkQueue;
+
+ typedef std::unordered_map<uint64_t, std::unique_ptr<C2Work>> PendingWork;
+ Mutexed<PendingWork> mPendingWork;
+
+ std::shared_ptr<C2BlockPool> mOutputBlockPool;
+
+ SimpleC2Component() = delete;
+};
+
+} // namespace android
+
+#endif // SIMPLE_C2_COMPONENT_H_
diff --git a/media/codec2/components/base/include/SimpleC2Interface.h b/media/codec2/components/base/include/SimpleC2Interface.h
new file mode 100644
index 0000000..2051d3d
--- /dev/null
+++ b/media/codec2/components/base/include/SimpleC2Interface.h
@@ -0,0 +1,236 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SIMPLE_C2_INTERFACE_H_
+#define ANDROID_SIMPLE_C2_INTERFACE_H_
+
+#include <C2Component.h>
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
+
+namespace android {
+
+/**
+ * Wrap a common interface object (such as Codec2Client::Interface, or C2InterfaceHelper into
+ * a C2ComponentInterface.
+ *
+ * \param T common interface type
+ */
+template <typename T>
+class SimpleC2Interface : public C2ComponentInterface {
+public:
+ SimpleC2Interface(const char *name, c2_node_id_t id, const std::shared_ptr<T> &impl)
+ : mName(name),
+ mId(id),
+ mImpl(impl) {
+ }
+
+ ~SimpleC2Interface() override = default;
+
+ // From C2ComponentInterface
+ C2String getName() const override { return mName; }
+ c2_node_id_t getId() const override { return mId; }
+ c2_status_t query_vb(
+ const std::vector<C2Param*> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const override {
+ return mImpl->query(stackParams, heapParamIndices, mayBlock, heapParams);
+ }
+ c2_status_t config_vb(
+ const std::vector<C2Param*> ¶ms,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
+ return mImpl->config(params, mayBlock, failures);
+ }
+ c2_status_t createTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+ c2_status_t releaseTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+ c2_status_t querySupportedParams_nb(
+ std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override {
+ return mImpl->querySupportedParams(params);
+ }
+ c2_status_t querySupportedValues_vb(
+ std::vector<C2FieldSupportedValuesQuery> &fields,
+ c2_blocking_t mayBlock) const override {
+ return mImpl->querySupportedValues(fields, mayBlock);
+ }
+
+private:
+ C2String mName;
+ const c2_node_id_t mId;
+ const std::shared_ptr<T> mImpl;
+};
+
+/**
+ * Utility classes for common interfaces.
+ */
+template<>
+class SimpleC2Interface<void> {
+public:
+ /**
+ * Base Codec 2.0 parameters required for all components.
+ */
+ struct BaseParams : C2InterfaceHelper {
+ explicit BaseParams(
+ const std::shared_ptr<C2ReflectorHelper> &helper,
+ C2String name,
+ C2Component::kind_t kind,
+ C2Component::domain_t domain,
+ C2String mediaType,
+ std::vector<C2String> aliases = std::vector<C2String>());
+
+ /// Marks that this component has no input latency. Otherwise, component must
+ /// add support for C2PortRequestedDelayTuning::input and C2PortActualDelayTuning::input.
+ void noInputLatency();
+
+ /// Marks that this component has no output latency. Otherwise, component must
+ /// add support for C2PortRequestedDelayTuning::output and C2PortActualDelayTuning::output.
+ void noOutputLatency();
+
+ /// Marks that this component has no pipeline latency. Otherwise, component must
+ /// add support for C2RequestedPipelineDelayTuning and C2ActualPipelineDelayTuning.
+ void noPipelineLatency();
+
+ /// Marks that this component has no need for private buffers. Otherwise, component must
+ /// add support for C2MaxPrivateBufferCountTuning, C2PrivateAllocatorsTuning and
+ /// C2PrivateBlockPoolsTuning.
+ void noPrivateBuffers();
+
+ /// Marks that this component holds no references to input buffers. Otherwise, component
+ /// must add support for C2StreamMaxReferenceAgeTuning::input and
+ /// C2StreamMaxReferenceCountTuning::input.
+ void noInputReferences();
+
+ /// Marks that this component holds no references to output buffers. Otherwise, component
+ /// must add support for C2StreamMaxReferenceAgeTuning::output and
+ /// C2StreamMaxReferenceCountTuning::output.
+ void noOutputReferences();
+
+ /// Marks that this component does not stretch time. Otherwise, component
+ /// must add support for C2ComponentTimeStretchTuning.
+ void noTimeStretch();
+
+ std::shared_ptr<C2ApiLevelSetting> mApiLevel;
+ std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
+
+ std::shared_ptr<C2PlatformLevelSetting> mPlatformLevel;
+ std::shared_ptr<C2PlatformFeaturesSetting> mPlatformFeatures;
+
+ std::shared_ptr<C2ComponentNameSetting> mName;
+ std::shared_ptr<C2ComponentAliasesSetting> mAliases;
+ std::shared_ptr<C2ComponentKindSetting> mKind;
+ std::shared_ptr<C2ComponentDomainSetting> mDomain;
+ std::shared_ptr<C2ComponentAttributesSetting> mAttrib;
+ std::shared_ptr<C2ComponentTimeStretchTuning> mTimeStretch;
+
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+
+ std::shared_ptr<C2PortRequestedDelayTuning::input> mRequestedInputDelay;
+ std::shared_ptr<C2PortRequestedDelayTuning::output> mRequestedOutputDelay;
+ std::shared_ptr<C2RequestedPipelineDelayTuning> mRequestedPipelineDelay;
+
+ std::shared_ptr<C2PortActualDelayTuning::input> mActualInputDelay;
+ std::shared_ptr<C2PortActualDelayTuning::output> mActualOutputDelay;
+ std::shared_ptr<C2ActualPipelineDelayTuning> mActualPipelineDelay;
+
+ std::shared_ptr<C2StreamMaxReferenceAgeTuning::input> mMaxInputReferenceAge;
+ std::shared_ptr<C2StreamMaxReferenceCountTuning::input> mMaxInputReferenceCount;
+ std::shared_ptr<C2StreamMaxReferenceAgeTuning::output> mMaxOutputReferenceAge;
+ std::shared_ptr<C2StreamMaxReferenceCountTuning::output> mMaxOutputReferenceCount;
+ std::shared_ptr<C2MaxPrivateBufferCountTuning> mMaxPrivateBufferCount;
+
+ std::shared_ptr<C2PortStreamCountTuning::input> mInputStreamCount;
+ std::shared_ptr<C2PortStreamCountTuning::output> mOutputStreamCount;
+
+ std::shared_ptr<C2SubscribedParamIndicesTuning> mSubscribedParamIndices;
+ std::shared_ptr<C2PortSuggestedBufferCountTuning::input> mSuggestedInputBufferCount;
+ std::shared_ptr<C2PortSuggestedBufferCountTuning::output> mSuggestedOutputBufferCount;
+
+ std::shared_ptr<C2CurrentWorkTuning> mCurrentWorkOrdinal;
+ std::shared_ptr<C2LastWorkQueuedTuning::input> mLastInputQueuedWorkOrdinal;
+ std::shared_ptr<C2LastWorkQueuedTuning::output> mLastOutputQueuedWorkOrdinal;
+
+ std::shared_ptr<C2PortAllocatorsTuning::input> mInputAllocators;
+ std::shared_ptr<C2PortAllocatorsTuning::output> mOutputAllocators;
+ std::shared_ptr<C2PrivateAllocatorsTuning> mPrivateAllocators;
+ std::shared_ptr<C2PortBlockPoolsTuning::output> mOutputPoolIds;
+ std::shared_ptr<C2PrivateBlockPoolsTuning> mPrivatePoolIds;
+
+ std::shared_ptr<C2TrippedTuning> mTripped;
+ std::shared_ptr<C2OutOfMemoryTuning> mOutOfMemory;
+
+ std::shared_ptr<C2PortConfigCounterTuning::input> mInputConfigCounter;
+ std::shared_ptr<C2PortConfigCounterTuning::output> mOutputConfigCounter;
+ std::shared_ptr<C2ConfigCounterTuning> mDirectConfigCounter;
+ };
+};
+
+template<typename T>
+using SimpleInterface = SimpleC2Interface<T>;
+
+template<typename T, typename ...Args>
+std::shared_ptr<T> AllocSharedString(const Args(&... args), const char *str) {
+ size_t len = strlen(str) + 1;
+ std::shared_ptr<T> ret = T::AllocShared(len, args...);
+ strcpy(ret->m.value, str);
+ return ret;
+}
+
+template<typename T, typename ...Args>
+std::shared_ptr<T> AllocSharedString(const Args(&... args), const std::string &str) {
+ std::shared_ptr<T> ret = T::AllocShared(str.length() + 1, args...);
+ strcpy(ret->m.value, str.c_str());
+ return ret;
+}
+
+template <typename T>
+struct Setter {
+ typedef typename std::remove_reference<T>::type type;
+
+ static C2R NonStrictValueWithNoDeps(
+ bool mayBlock, C2InterfaceHelper::C2P<type> &me) {
+ (void)mayBlock;
+ return me.F(me.v.value).validatePossible(me.v.value);
+ }
+
+ static C2R NonStrictValuesWithNoDeps(
+ bool mayBlock, C2InterfaceHelper::C2P<type> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
+ res.plus(me.F(me.v.m.values[ix]).validatePossible(me.v.m.values[ix]));
+ }
+ return res;
+ }
+
+ static C2R StrictValueWithNoDeps(
+ bool mayBlock,
+ const C2InterfaceHelper::C2P<type> &old,
+ C2InterfaceHelper::C2P<type> &me) {
+ (void)mayBlock;
+ if (!me.F(me.v.value).supportsNow(me.v.value)) {
+ me.set().value = old.v.value;
+ }
+ return me.F(me.v.value).validatePossible(me.v.value);
+ }
+};
+
+} // namespace android
+
+#endif // ANDROID_SIMPLE_C2_INTERFACE_H_
diff --git a/media/codec2/components/cmds/Android.bp b/media/codec2/components/cmds/Android.bp
new file mode 100644
index 0000000..8fb9bf9
--- /dev/null
+++ b/media/codec2/components/cmds/Android.bp
@@ -0,0 +1,40 @@
+cc_binary {
+ name: "codec2",
+ defaults: ["libstagefright_codec2-impl-defaults"],
+
+ srcs: [
+ "codec2.cpp",
+ ],
+
+ include_dirs: [
+ ],
+
+ shared_libs: [
+ "libbase",
+ "libbinder",
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libmediaextractor",
+ "libstagefright",
+ "libstagefright_foundation",
+ "libui",
+ "libutils",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ diag: {
+ cfi: true,
+ },
+ },
+}
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
new file mode 100644
index 0000000..f2cf545
--- /dev/null
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -0,0 +1,483 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+
+#include <thread>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "codec2"
+#include <log/log.h>
+
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <media/DataSource.h>
+#include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <gui/GLConsumer.h>
+#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2Buffer.h>
+#include <C2BufferPriv.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <C2Work.h>
+
+using namespace android;
+using namespace std::chrono_literals;
+
+namespace {
+
+class LinearBuffer : public C2Buffer {
+public:
+ explicit LinearBuffer(const std::shared_ptr<C2LinearBlock> &block)
+ : C2Buffer({ block->share(block->offset(), block->size(), ::C2Fence()) }) {}
+};
+
+class Listener;
+
+class SimplePlayer {
+public:
+ SimplePlayer();
+ ~SimplePlayer();
+
+ void onWorkDone(std::weak_ptr<C2Component> component,
+ std::list<std::unique_ptr<C2Work>> workItems);
+ void onTripped(std::weak_ptr<C2Component> component,
+ std::vector<std::shared_ptr<C2SettingResult>> settingResult);
+ void onError(std::weak_ptr<C2Component> component, uint32_t errorCode);
+
+ void play(const sp<IMediaSource> &source);
+
+private:
+ typedef std::unique_lock<std::mutex> ULock;
+
+ std::shared_ptr<Listener> mListener;
+ std::shared_ptr<C2Component> mComponent;
+
+ sp<IProducerListener> mProducerListener;
+
+ std::atomic_int mLinearPoolId;
+
+ std::shared_ptr<C2Allocator> mAllocIon;
+ std::shared_ptr<C2BlockPool> mLinearPool;
+
+ std::mutex mQueueLock;
+ std::condition_variable mQueueCondition;
+ std::list<std::unique_ptr<C2Work>> mWorkQueue;
+
+ std::mutex mProcessedLock;
+ std::condition_variable mProcessedCondition;
+ std::list<std::unique_ptr<C2Work>> mProcessedWork;
+
+ sp<Surface> mSurface;
+ sp<SurfaceComposerClient> mComposerClient;
+ sp<SurfaceControl> mControl;
+};
+
+class Listener : public C2Component::Listener {
+public:
+ explicit Listener(SimplePlayer *thiz) : mThis(thiz) {}
+ virtual ~Listener() = default;
+
+ virtual void onWorkDone_nb(std::weak_ptr<C2Component> component,
+ std::list<std::unique_ptr<C2Work>> workItems) override {
+ mThis->onWorkDone(component, std::move(workItems));
+ }
+
+ virtual void onTripped_nb(std::weak_ptr<C2Component> component,
+ std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
+ mThis->onTripped(component, settingResult);
+ }
+
+ virtual void onError_nb(std::weak_ptr<C2Component> component,
+ uint32_t errorCode) override {
+ mThis->onError(component, errorCode);
+ }
+
+private:
+ SimplePlayer * const mThis;
+};
+
+
+SimplePlayer::SimplePlayer()
+ : mListener(new Listener(this)),
+ mProducerListener(new DummyProducerListener),
+ mLinearPoolId(C2BlockPool::PLATFORM_START),
+ mComposerClient(new SurfaceComposerClient) {
+ CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
+
+ std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
+ CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mAllocIon), C2_OK);
+ mLinearPool = std::make_shared<C2PooledBlockPool>(mAllocIon, mLinearPoolId++);
+
+ mControl = mComposerClient->createSurface(
+ String8("A Surface"),
+ 1280,
+ 800,
+ HAL_PIXEL_FORMAT_YV12);
+ //PIXEL_FORMAT_RGB_565);
+
+ CHECK(mControl != nullptr);
+ CHECK(mControl->isValid());
+
+ SurfaceComposerClient::Transaction{}
+ .setLayer(mControl, INT_MAX)
+ .show(mControl)
+ .apply();
+
+ mSurface = mControl->getSurface();
+ CHECK(mSurface != nullptr);
+ mSurface->connect(NATIVE_WINDOW_API_CPU, mProducerListener);
+}
+
+SimplePlayer::~SimplePlayer() {
+ mComposerClient->dispose();
+}
+
+void SimplePlayer::onWorkDone(
+ std::weak_ptr<C2Component> component, std::list<std::unique_ptr<C2Work>> workItems) {
+ ALOGV("SimplePlayer::onWorkDone");
+ (void) component;
+ ULock l(mProcessedLock);
+ for (auto & item : workItems) {
+ mProcessedWork.push_back(std::move(item));
+ }
+ mProcessedCondition.notify_all();
+}
+
+void SimplePlayer::onTripped(
+ std::weak_ptr<C2Component> component,
+ std::vector<std::shared_ptr<C2SettingResult>> settingResult) {
+ (void) component;
+ (void) settingResult;
+ // TODO
+}
+
+void SimplePlayer::onError(std::weak_ptr<C2Component> component, uint32_t errorCode) {
+ (void) component;
+ (void) errorCode;
+ // TODO
+}
+
+void SimplePlayer::play(const sp<IMediaSource> &source) {
+ ALOGV("SimplePlayer::play");
+ sp<AMessage> format;
+ (void) convertMetaDataToMessage(source->getFormat(), &format);
+
+ sp<ABuffer> csd0, csd1;
+ format->findBuffer("csd-0", &csd0);
+ format->findBuffer("csd-1", &csd1);
+
+ status_t err = source->start();
+
+ if (err != OK) {
+ fprintf(stderr, "source returned error %d (0x%08x)\n", err, err);
+ return;
+ }
+
+ std::shared_ptr<C2ComponentStore> store = GetCodec2PlatformComponentStore();
+ std::shared_ptr<C2Component> component;
+ (void)store->createComponent("c2.android.avc.decoder", &component);
+
+ (void)component->setListener_vb(mListener, C2_DONT_BLOCK);
+ std::unique_ptr<C2PortBlockPoolsTuning::output> pools =
+ C2PortBlockPoolsTuning::output::AllocUnique({ (uint64_t)C2BlockPool::BASIC_GRAPHIC });
+ std::vector<std::unique_ptr<C2SettingResult>> result;
+ (void)component->intf()->config_vb({pools.get()}, C2_DONT_BLOCK, &result);
+ component->start();
+
+ for (int i = 0; i < 8; ++i) {
+ mWorkQueue.emplace_back(new C2Work);
+ }
+
+ std::atomic_bool running(true);
+ std::thread surfaceThread([this, &running]() {
+ const sp<IGraphicBufferProducer> &igbp = mSurface->getIGraphicBufferProducer();
+ while (running) {
+ std::unique_ptr<C2Work> work;
+ {
+ ULock l(mProcessedLock);
+ if (mProcessedWork.empty()) {
+ mProcessedCondition.wait_for(l, 100ms);
+ if (mProcessedWork.empty()) {
+ continue;
+ }
+ }
+ work.swap(mProcessedWork.front());
+ mProcessedWork.pop_front();
+ }
+ int slot;
+ sp<Fence> fence;
+ ALOGV("Render: Frame #%lld", work->worklets.front()->output.ordinal.frameIndex.peekll());
+ const std::shared_ptr<C2Buffer> &output = work->worklets.front()->output.buffers[0];
+ if (output) {
+ const C2ConstGraphicBlock block = output->data().graphicBlocks().front();
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(block.handle());
+ sp<GraphicBuffer> buffer(new GraphicBuffer(
+ grallocHandle,
+ GraphicBuffer::CLONE_HANDLE,
+ block.width(),
+ block.height(),
+ HAL_PIXEL_FORMAT_YV12,
+ 1,
+ (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ block.width()));
+ native_handle_delete(grallocHandle);
+
+ status_t err = igbp->attachBuffer(&slot, buffer);
+
+ IGraphicBufferProducer::QueueBufferInput qbi(
+ (work->worklets.front()->output.ordinal.timestamp * 1000ll).peekll(),
+ false,
+ HAL_DATASPACE_UNKNOWN,
+ Rect(block.width(), block.height()),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW,
+ 0,
+ Fence::NO_FENCE,
+ 0);
+ IGraphicBufferProducer::QueueBufferOutput qbo;
+ err = igbp->queueBuffer(slot, qbi, &qbo);
+ }
+
+ work->input.buffers.clear();
+ work->worklets.clear();
+
+ ULock l(mQueueLock);
+ mWorkQueue.push_back(std::move(work));
+ mQueueCondition.notify_all();
+ }
+ ALOGV("render loop finished");
+ });
+
+ long numFrames = 0;
+ mLinearPool.reset(new C2PooledBlockPool(mAllocIon, mLinearPoolId++));
+
+ for (;;) {
+ size_t size = 0u;
+ void *data = nullptr;
+ int64_t timestamp = 0u;
+ MediaBufferBase *buffer = nullptr;
+ sp<ABuffer> csd;
+ if (csd0 != nullptr) {
+ csd = csd0;
+ csd0 = nullptr;
+ } else if (csd1 != nullptr) {
+ csd = csd1;
+ csd1 = nullptr;
+ } else {
+ status_t err = source->read(&buffer);
+ if (err != OK) {
+ CHECK(buffer == nullptr);
+
+ if (err == INFO_FORMAT_CHANGED) {
+ continue;
+ }
+
+ break;
+ }
+ MetaDataBase &meta = buffer->meta_data();
+ CHECK(meta.findInt64(kKeyTime, ×tamp));
+
+ size = buffer->size();
+ data = buffer->data();
+ }
+
+ if (csd != nullptr) {
+ size = csd->size();
+ data = csd->data();
+ }
+
+ // Prepare C2Work
+
+ std::unique_ptr<C2Work> work;
+ while (!work) {
+ ULock l(mQueueLock);
+ if (!mWorkQueue.empty()) {
+ work.swap(mWorkQueue.front());
+ mWorkQueue.pop_front();
+ } else {
+ mQueueCondition.wait_for(l, 100ms);
+ }
+ }
+ work->input.flags = (C2FrameData::flags_t)0;
+ work->input.ordinal.timestamp = timestamp;
+ work->input.ordinal.frameIndex = numFrames;
+
+ std::shared_ptr<C2LinearBlock> block;
+ mLinearPool->fetchLinearBlock(
+ size,
+ { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+ &block);
+ C2WriteView view = block->map().get();
+ if (view.error() != C2_OK) {
+ fprintf(stderr, "C2LinearBlock::map() failed : %d", view.error());
+ break;
+ }
+ memcpy(view.base(), data, size);
+
+ work->input.buffers.clear();
+ work->input.buffers.emplace_back(new LinearBuffer(block));
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+
+ std::list<std::unique_ptr<C2Work>> items;
+ items.push_back(std::move(work));
+
+ ALOGV("Frame #%ld size = %zu", numFrames, size);
+ // DO THE DECODING
+ component->queue_nb(&items);
+
+ if (buffer) {
+ buffer->release();
+ buffer = nullptr;
+ }
+
+ ++numFrames;
+ }
+ ALOGV("main loop finished");
+ source->stop();
+ running.store(false);
+ surfaceThread.join();
+
+ component->release();
+ printf("\n");
+}
+
+} // namespace
+
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s [options] [input_filename]\n", me);
+ fprintf(stderr, " -h(elp)\n");
+}
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ int res;
+ while ((res = getopt(argc, argv, "h")) >= 0) {
+ switch (res) {
+ case 'h':
+ default:
+ {
+ usage(argv[0]);
+ exit(1);
+ break;
+ }
+ }
+ }
+
+ argc -= optind;
+ argv += optind;
+
+ if (argc < 1) {
+ fprintf(stderr, "No input file specified\n");
+ return 1;
+ }
+
+ status_t err = OK;
+ SimplePlayer player;
+
+ for (int k = 0; k < argc && err == OK; ++k) {
+ const char *filename = argv[k];
+
+ sp<DataSource> dataSource =
+ DataSourceFactory::CreateFromURI(nullptr /* httpService */, filename);
+
+ if (strncasecmp(filename, "sine:", 5) && dataSource == nullptr) {
+ fprintf(stderr, "Unable to create data source.\n");
+ return 1;
+ }
+
+ Vector<sp<IMediaSource> > mediaSources;
+ sp<IMediaSource> mediaSource;
+
+ sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
+
+ if (extractor == nullptr) {
+ fprintf(stderr, "could not create extractor.\n");
+ return -1;
+ }
+
+ sp<MetaData> meta = extractor->getMetaData();
+
+ if (meta != nullptr) {
+ const char *mime;
+ if (!meta->findCString(kKeyMIMEType, &mime)) {
+ fprintf(stderr, "extractor did not provide MIME type.\n");
+ return -1;
+ }
+ }
+
+ size_t numTracks = extractor->countTracks();
+
+ size_t i;
+ for (i = 0; i < numTracks; ++i) {
+ meta = extractor->getTrackMetaData(i, 0);
+
+ if (meta == nullptr) {
+ break;
+ }
+ const char *mime;
+ meta->findCString(kKeyMIMEType, &mime);
+
+ // TODO: allowing AVC only for the time being
+ if (!strncasecmp(mime, "video/avc", 9)) {
+ break;
+ }
+
+ meta = nullptr;
+ }
+
+ if (meta == nullptr) {
+ fprintf(stderr, "No AVC track found.\n");
+ return -1;
+ }
+
+ mediaSource = extractor->getTrack(i);
+ if (mediaSource == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
+ return -1;
+ }
+
+ player.play(mediaSource);
+ }
+
+ return 0;
+}
diff --git a/media/codec2/components/flac/Android.bp b/media/codec2/components/flac/Android.bp
new file mode 100644
index 0000000..32c3b36
--- /dev/null
+++ b/media/codec2/components/flac/Android.bp
@@ -0,0 +1,27 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2flacdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ header_libs: ["libFLAC-headers"],
+
+ srcs: ["C2SoftFlacDec.cpp"],
+
+ shared_libs: [
+ "libstagefright_flacdec",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2flacenc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftFlacEnc.cpp"],
+
+ static_libs: ["libFLAC"],
+}
diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp
new file mode 100644
index 0000000..f1e2f51
--- /dev/null
+++ b/media/codec2/components/flac/C2SoftFlacDec.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftFlacDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftFlacDec.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.flac.decoder";
+
+class C2SoftFlacDec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_FLAC))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
+ .withFields({C2F(mSampleRate, value).inRange(1, 655350)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 768000))
+ .withFields({C2F(mBitrate, value).inRange(1, 21000000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 32768))
+ .build());
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftFlacDec::C2SoftFlacDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mFLACDecoder(nullptr) {
+}
+
+C2SoftFlacDec::~C2SoftFlacDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftFlacDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_NO_MEMORY;
+}
+
+c2_status_t C2SoftFlacDec::onStop() {
+ if (mFLACDecoder) mFLACDecoder->flush();
+ memset(&mStreamInfo, 0, sizeof(mStreamInfo));
+ mHasStreamInfo = false;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ return C2_OK;
+}
+
+void C2SoftFlacDec::onReset() {
+ mInputBufferCount = 0;
+ (void)onStop();
+}
+
+void C2SoftFlacDec::onRelease() {
+ mInputBufferCount = 0;
+ if (mFLACDecoder) delete mFLACDecoder;
+ mFLACDecoder = nullptr;
+}
+
+c2_status_t C2SoftFlacDec::onFlush_sm() {
+ return onStop();
+}
+
+status_t C2SoftFlacDec::initDecoder() {
+ if (mFLACDecoder) {
+ delete mFLACDecoder;
+ }
+ mFLACDecoder = FLACDecoder::Create();
+ if (!mFLACDecoder) {
+ ALOGE("initDecoder: failed to create FLACDecoder");
+ mSignalledError = true;
+ return NO_MEMORY;
+ }
+
+ memset(&mStreamInfo, 0, sizeof(mStreamInfo));
+ mHasStreamInfo = false;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mInputBufferCount = 0;
+
+ return OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+// (TODO) add multiframe support, in plugin and FLACDecoder.cpp
+void C2SoftFlacDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ C2ReadView rView = mDummyReadView;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+ bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+
+ if (inSize == 0) {
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ if (mInputBufferCount == 0 && !codecConfig) {
+ ALOGV("First frame has to include configuration, forcing config");
+ codecConfig = true;
+ }
+
+ uint8_t *input = const_cast<uint8_t *>(rView.data() + inOffset);
+ if (codecConfig) {
+ status_t decoderErr = mFLACDecoder->parseMetadata(input, inSize);
+ if (decoderErr != OK && decoderErr != WOULD_BLOCK) {
+ ALOGE("process: FLACDecoder parseMetaData returns error %d", decoderErr);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ mInputBufferCount++;
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+
+ if (decoderErr == WOULD_BLOCK) {
+ ALOGV("process: parseMetadata is Blocking, Continue %d", decoderErr);
+ } else {
+ mStreamInfo = mFLACDecoder->getStreamInfo();
+ if (mStreamInfo.sample_rate && mStreamInfo.max_blocksize &&
+ mStreamInfo.channels) {
+ mHasStreamInfo = true;
+ C2StreamSampleRateInfo::output sampleRateInfo(
+ 0u, mStreamInfo.sample_rate);
+ C2StreamChannelCountInfo::output channelCountInfo(
+ 0u, mStreamInfo.channels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err =
+ mIntf->config({&sampleRateInfo, &channelCountInfo},
+ C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(sampleRateInfo));
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ ALOGD("process: decoder configuration : %d Hz, %d channels, %d samples,"
+ " %d block size", mStreamInfo.sample_rate, mStreamInfo.channels,
+ (int)mStreamInfo.total_samples, mStreamInfo.max_blocksize);
+ }
+ return;
+ }
+
+ size_t outSize;
+ if (mHasStreamInfo)
+ outSize = mStreamInfo.max_blocksize * mStreamInfo.channels * sizeof(short);
+ else
+ outSize = kMaxBlockSize * FLACDecoder::kMaxChannels * sizeof(short);
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ short *output = reinterpret_cast<short *>(wView.data());
+ status_t decoderErr = mFLACDecoder->decodeOneFrame(
+ input, inSize, output, &outSize);
+ if (decoderErr != OK) {
+ ALOGE("process: FLACDecoder decodeOneFrame returns error %d", decoderErr);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ mInputBufferCount++;
+ ALOGV("out buffer attr. size %zu", outSize);
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+c2_status_t C2SoftFlacDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (mFLACDecoder) mFLACDecoder->flush();
+
+ return C2_OK;
+}
+
+class C2SoftFlacDecFactory : public C2ComponentFactory {
+public:
+ C2SoftFlacDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftFlacDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftFlacDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftFlacDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftFlacDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftFlacDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftFlacDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/flac/C2SoftFlacDec.h b/media/codec2/components/flac/C2SoftFlacDec.h
new file mode 100644
index 0000000..b491bfd
--- /dev/null
+++ b/media/codec2/components/flac/C2SoftFlacDec.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_FLAC_DEC_H_
+#define ANDROID_C2_SOFT_FLAC_DEC_H_
+
+#include <SimpleC2Component.h>
+
+#include "FLACDecoder.h"
+
+namespace android {
+
+struct C2SoftFlacDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftFlacDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftFlacDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ enum {
+ kMaxBlockSize = 4096
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ FLACDecoder *mFLACDecoder;
+ FLAC__StreamMetadata_StreamInfo mStreamInfo;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+ bool mHasStreamInfo;
+ size_t mInputBufferCount;
+
+ status_t initDecoder();
+
+ C2_DO_NOT_COPY(C2SoftFlacDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_FLAC_DEC_H_
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
new file mode 100644
index 0000000..e4192c7
--- /dev/null
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -0,0 +1,460 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftFlacEnc"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftFlacEnc.h"
+
+namespace android {
+
+class C2SoftFlacEnc::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ .build());
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_FLAC))
+ .build());
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::input(0u, 44100))
+ .withFields({C2F(mSampleRate, value).inRange(1, 655350)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 2)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 768000))
+ .withFields({C2F(mBitrate, value).inRange(1, 21000000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
+ .build());
+ }
+
+ uint32_t getSampleRate() const { return mSampleRate->value; }
+ uint32_t getChannelCount() const { return mChannelCount->value; }
+ uint32_t getBitrate() const { return mBitrate->value; }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+constexpr char COMPONENT_NAME[] = "c2.android.flac.encoder";
+
+C2SoftFlacEnc::C2SoftFlacEnc(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mFlacStreamEncoder(nullptr),
+ mInputBufferPcm32(nullptr) {
+}
+
+C2SoftFlacEnc::~C2SoftFlacEnc() {
+ onRelease();
+}
+
+c2_status_t C2SoftFlacEnc::onInit() {
+ mFlacStreamEncoder = FLAC__stream_encoder_new();
+ if (!mFlacStreamEncoder) return C2_CORRUPTED;
+
+ mInputBufferPcm32 = (FLAC__int32*) malloc(
+ kInBlockSize * kMaxNumChannels * sizeof(FLAC__int32));
+ if (!mInputBufferPcm32) return C2_NO_MEMORY;
+
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mCompressionLevel = FLAC_COMPRESSION_LEVEL_DEFAULT;
+ mIsFirstFrame = true;
+ mAnchorTimeStamp = 0ull;
+ mProcessedSamples = 0u;
+ mEncoderWriteData = false;
+ mEncoderReturnedNbBytes = 0;
+ mHeaderOffset = 0;
+ mWroteHeader = false;
+
+ status_t err = configureEncoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+void C2SoftFlacEnc::onRelease() {
+ if (mFlacStreamEncoder) {
+ FLAC__stream_encoder_delete(mFlacStreamEncoder);
+ mFlacStreamEncoder = nullptr;
+ }
+
+ if (mInputBufferPcm32) {
+ free(mInputBufferPcm32);
+ mInputBufferPcm32 = nullptr;
+ }
+}
+
+void C2SoftFlacEnc::onReset() {
+ mCompressionLevel = FLAC_COMPRESSION_LEVEL_DEFAULT;
+ (void) onStop();
+}
+
+c2_status_t C2SoftFlacEnc::onStop() {
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mIsFirstFrame = true;
+ mAnchorTimeStamp = 0ull;
+ mProcessedSamples = 0u;
+ mEncoderWriteData = false;
+ mEncoderReturnedNbBytes = 0;
+ mHeaderOffset = 0;
+ mWroteHeader = false;
+
+ c2_status_t status = drain(DRAIN_COMPONENT_NO_EOS, nullptr);
+ if (C2_OK != status) return status;
+
+ status_t err = configureEncoder();
+ if (err != OK) mSignalledError = true;
+ return C2_OK;
+}
+
+c2_status_t C2SoftFlacEnc::onFlush_sm() {
+ return onStop();
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+}
+
+void C2SoftFlacEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ C2ReadView rView = mDummyReadView;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+ if (mIsFirstFrame && inSize) {
+ mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+ mIsFirstFrame = false;
+ }
+
+ if (!mWroteHeader) {
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(mHeaderOffset, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, mHeader, mHeaderOffset);
+ ALOGV("put csd, %d bytes", mHeaderOffset);
+
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+ mWroteHeader = true;
+ }
+
+ uint32_t sampleRate = mIntf->getSampleRate();
+ uint32_t channelCount = mIntf->getChannelCount();
+ uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+
+ size_t outCapacity = inSize;
+ outCapacity += mBlockSize * channelCount * sizeof(int16_t);
+
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = mOutputBlock->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ mEncoderWriteData = true;
+ mEncoderReturnedNbBytes = 0;
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ const uint8_t *inPtr = rView.data() + inOffset;
+ size_t processSize = MIN(kInBlockSize * channelCount * sizeof(int16_t), (inSize - inPos));
+ const unsigned nbInputFrames = processSize / (channelCount * sizeof(int16_t));
+ const unsigned nbInputSamples = processSize / sizeof(int16_t);
+ const int16_t *pcm16 = reinterpret_cast<const int16_t *>(inPtr + inPos);
+ ALOGV("about to encode %zu bytes", processSize);
+
+ for (unsigned i = 0; i < nbInputSamples; i++) {
+ mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ }
+
+ FLAC__bool ok = FLAC__stream_encoder_process_interleaved(
+ mFlacStreamEncoder, mInputBufferPcm32, nbInputFrames);
+ if (!ok) {
+ ALOGE("error encountered during encoding");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ mOutputBlock.reset();
+ return;
+ }
+ inPos += processSize;
+ }
+ if (eos && (C2_OK != drain(DRAIN_COMPONENT_WITH_EOS, pool))) {
+ ALOGE("error encountered during encoding");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ mOutputBlock.reset();
+ return;
+ }
+ fillEmptyWork(work);
+ if (mEncoderReturnedNbBytes != 0) {
+ std::shared_ptr<C2Buffer> buffer = createLinearBuffer(std::move(mOutputBlock), 0, mEncoderReturnedNbBytes);
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ } else {
+ ALOGV("encoder process_interleaved returned without data to write");
+ }
+ mOutputBlock = nullptr;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ mEncoderWriteData = false;
+ mEncoderReturnedNbBytes = 0;
+}
+
+FLAC__StreamEncoderWriteStatus C2SoftFlacEnc::onEncodedFlacAvailable(
+ const FLAC__byte buffer[], size_t bytes, unsigned samples,
+ unsigned current_frame) {
+ (void) current_frame;
+ ALOGV("%s (bytes=%zu, samples=%u, curr_frame=%u)", __func__, bytes, samples,
+ current_frame);
+
+ if (samples == 0) {
+ ALOGI("saving %zu bytes of header", bytes);
+ memcpy(mHeader + mHeaderOffset, buffer, bytes);
+ mHeaderOffset += bytes;// will contain header size when finished receiving header
+ return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
+ }
+
+ if ((samples == 0) || !mEncoderWriteData) {
+ // called by the encoder because there's header data to save, but it's not the role
+ // of this component (unless WRITE_FLAC_HEADER_IN_FIRST_BUFFER is defined)
+ ALOGV("ignoring %zu bytes of header data (samples=%d)", bytes, samples);
+ return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
+ }
+
+ // write encoded data
+ C2WriteView wView = mOutputBlock->map().get();
+ uint8_t* outData = wView.data();
+ ALOGV("writing %zu bytes of encoded data on output", bytes);
+ // increment mProcessedSamples to maintain audio synchronization during
+ // play back
+ mProcessedSamples += samples;
+ if (bytes + mEncoderReturnedNbBytes > mOutputBlock->capacity()) {
+ ALOGE("not enough space left to write encoded data, dropping %zu bytes", bytes);
+ // a fatal error would stop the encoding
+ return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
+ }
+ memcpy(outData + mEncoderReturnedNbBytes, buffer, bytes);
+ mEncoderReturnedNbBytes += bytes;
+ return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
+}
+
+
+status_t C2SoftFlacEnc::configureEncoder() {
+ ALOGV("%s numChannel=%d, sampleRate=%d", __func__, mIntf->getChannelCount(), mIntf->getSampleRate());
+
+ if (mSignalledError || !mFlacStreamEncoder) {
+ ALOGE("can't configure encoder: no encoder or invalid state");
+ return UNKNOWN_ERROR;
+ }
+
+ FLAC__bool ok = true;
+ ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mIntf->getChannelCount());
+ ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mIntf->getSampleRate());
+ ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, 16);
+ ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder, mCompressionLevel);
+ ok = ok && FLAC__stream_encoder_set_verify(mFlacStreamEncoder, false);
+ if (!ok) {
+ ALOGE("unknown error when configuring encoder");
+ return UNKNOWN_ERROR;
+ }
+
+ ok &= FLAC__STREAM_ENCODER_INIT_STATUS_OK ==
+ FLAC__stream_encoder_init_stream(mFlacStreamEncoder,
+ flacEncoderWriteCallback /*write_callback*/,
+ nullptr /*seek_callback*/,
+ nullptr /*tell_callback*/,
+ nullptr /*metadata_callback*/,
+ (void *) this /*client_data*/);
+
+ if (!ok) {
+ ALOGE("unknown error when configuring encoder");
+ return UNKNOWN_ERROR;
+ }
+
+ mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
+
+ ALOGV("encoder successfully configured");
+ return OK;
+}
+
+FLAC__StreamEncoderWriteStatus C2SoftFlacEnc::flacEncoderWriteCallback(
+ const FLAC__StreamEncoder *,
+ const FLAC__byte buffer[],
+ size_t bytes,
+ unsigned samples,
+ unsigned current_frame,
+ void *client_data) {
+ return ((C2SoftFlacEnc*) client_data)->onEncodedFlacAvailable(
+ buffer, bytes, samples, current_frame);
+}
+
+c2_status_t C2SoftFlacEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ switch (drainMode) {
+ case NO_DRAIN:
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ case DRAIN_CHAIN:
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ case DRAIN_COMPONENT_WITH_EOS:
+ // TODO: This flag is not being sent back to the client
+ // because there are no items in PendingWork queue as all the
+ // inputs are being sent back with emptywork or valid encoded data
+ // mSignalledOutputEos = true;
+ case DRAIN_COMPONENT_NO_EOS:
+ break;
+ default:
+ return C2_BAD_VALUE;
+ }
+ FLAC__bool ok = FLAC__stream_encoder_finish(mFlacStreamEncoder);
+ if (!ok) return C2_CORRUPTED;
+ mIsFirstFrame = true;
+ mAnchorTimeStamp = 0ull;
+ mProcessedSamples = 0u;
+
+ return C2_OK;
+}
+
+class C2SoftFlacEncFactory : public C2ComponentFactory {
+public:
+ C2SoftFlacEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftFlacEnc(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftFlacEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftFlacEnc::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftFlacEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftFlacEncFactory() override = default;
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftFlacEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
new file mode 100644
index 0000000..cdf305e
--- /dev/null
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_FLAC_ENC_H_
+#define ANDROID_C2_SOFT_FLAC_ENC_H_
+
+#include <SimpleC2Component.h>
+
+#include "FLAC/stream_encoder.h"
+
+#define FLAC_COMPRESSION_LEVEL_MIN 0
+#define FLAC_COMPRESSION_LEVEL_DEFAULT 5
+#define FLAC_COMPRESSION_LEVEL_MAX 8
+
+#define FLAC_HEADER_SIZE 128
+
+#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+
+namespace android {
+
+class C2SoftFlacEnc : public SimpleC2Component {
+public:
+ class IntfImpl;
+
+ C2SoftFlacEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftFlacEnc();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ status_t configureEncoder();
+ static FLAC__StreamEncoderWriteStatus flacEncoderWriteCallback(
+ const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[],
+ size_t bytes, unsigned samples, unsigned current_frame,
+ void *client_data);
+ FLAC__StreamEncoderWriteStatus onEncodedFlacAvailable(
+ const FLAC__byte buffer[], size_t bytes, unsigned samples,
+ unsigned current_frame);
+
+ std::shared_ptr<IntfImpl> mIntf;
+ const unsigned int kInBlockSize = 1152;
+ const unsigned int kMaxNumChannels = 2;
+ FLAC__StreamEncoder* mFlacStreamEncoder;
+ FLAC__int32* mInputBufferPcm32;
+ std::shared_ptr<C2LinearBlock> mOutputBlock;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+ uint32_t mCompressionLevel;
+ uint32_t mBlockSize;
+ bool mIsFirstFrame;
+ uint64_t mAnchorTimeStamp;
+ uint64_t mProcessedSamples;
+ // should the data received by the callback be written to the output port
+ bool mEncoderWriteData;
+ size_t mEncoderReturnedNbBytes;
+ unsigned mHeaderOffset;
+ bool mWroteHeader;
+ char mHeader[FLAC_HEADER_SIZE];
+
+ C2_DO_NOT_COPY(C2SoftFlacEnc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_FLAC_ENC_H_
diff --git a/media/codec2/components/flac/MODULE_LICENSE_APACHE2 b/media/codec2/components/flac/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/flac/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/flac/NOTICE b/media/codec2/components/flac/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/flac/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/g711/Android.bp b/media/codec2/components/g711/Android.bp
new file mode 100644
index 0000000..56cbc20
--- /dev/null
+++ b/media/codec2/components/g711/Android.bp
@@ -0,0 +1,23 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2g711alawdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftG711Dec.cpp"],
+
+ cflags: [
+ "-DALAW",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2g711mlawdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftG711Dec.cpp"],
+}
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
new file mode 100644
index 0000000..1c71d45
--- /dev/null
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -0,0 +1,323 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftG711Dec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftG711Dec.h"
+
+namespace android {
+
+#ifdef ALAW
+constexpr char COMPONENT_NAME[] = "c2.android.g711.alaw.decoder";
+#else
+constexpr char COMPONENT_NAME[] = "c2.android.g711.mlaw.decoder";
+#endif
+
+class C2SoftG711Dec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+#ifdef ALAW
+ MEDIA_MIMETYPE_AUDIO_G711_ALAW
+#else
+ MEDIA_MIMETYPE_AUDIO_G711_MLAW
+#endif
+ )).build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 8000))
+ .withFields({C2F(mSampleRate, value).inRange(8000, 48000)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).equalTo(1)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 64000))
+ .withFields({C2F(mBitrate, value).equalTo(64000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftG711Dec::C2SoftG711Dec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl) {
+}
+
+C2SoftG711Dec::~C2SoftG711Dec() {
+ onRelease();
+}
+
+c2_status_t C2SoftG711Dec::onInit() {
+ mSignalledOutputEos = false;
+ return C2_OK;
+}
+
+c2_status_t C2SoftG711Dec::onStop() {
+ mSignalledOutputEos = false;
+ return C2_OK;
+}
+
+void C2SoftG711Dec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftG711Dec::onRelease() {
+}
+
+c2_status_t C2SoftG711Dec::onFlush_sm() {
+ return onStop();
+}
+
+void C2SoftG711Dec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ C2ReadView rView = mDummyReadView;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+ int outSize = inSize * sizeof(int16_t);
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+
+ if (inSize == 0) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ uint8_t *inputptr = const_cast<uint8_t *>(rView.data() + inOffset);
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ int16_t *outputptr = reinterpret_cast<int16_t *>(wView.data());
+
+#ifdef ALAW
+ DecodeALaw(outputptr, inputptr, inSize);
+#else
+ DecodeMLaw(outputptr, inputptr, inSize);
+#endif
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+c2_status_t C2SoftG711Dec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+#ifdef ALAW
+void C2SoftG711Dec::DecodeALaw(
+ int16_t *out, const uint8_t *in, size_t inSize) {
+ while (inSize > 0) {
+ inSize--;
+ int32_t x = *in++;
+
+ int32_t ix = x ^ 0x55;
+ ix &= 0x7f;
+
+ int32_t iexp = ix >> 4;
+ int32_t mant = ix & 0x0f;
+
+ if (iexp > 0) {
+ mant += 16;
+ }
+
+ mant = (mant << 4) + 8;
+
+ if (iexp > 1) {
+ mant = mant << (iexp - 1);
+ }
+
+ *out++ = (x > 127) ? mant : -mant;
+ }
+}
+#else
+void C2SoftG711Dec::DecodeMLaw(
+ int16_t *out, const uint8_t *in, size_t inSize) {
+ while (inSize > 0) {
+ inSize--;
+ int32_t x = *in++;
+
+ int32_t mantissa = ~x;
+ int32_t exponent = (mantissa >> 4) & 7;
+ int32_t segment = exponent + 1;
+ mantissa &= 0x0f;
+
+ int32_t step = 4 << segment;
+
+ int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33;
+
+ *out++ = (x < 0x80) ? -abs : abs;
+ }
+}
+#endif
+
+class C2SoftG711DecFactory : public C2ComponentFactory {
+public:
+ C2SoftG711DecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftG711Dec(COMPONENT_NAME, id,
+ std::make_shared<C2SoftG711Dec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftG711Dec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftG711Dec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftG711DecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftG711DecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/g711/C2SoftG711Dec.h b/media/codec2/components/g711/C2SoftG711Dec.h
new file mode 100644
index 0000000..23e8ffc
--- /dev/null
+++ b/media/codec2/components/g711/C2SoftG711Dec.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_G711_DEC_H_
+#define ANDROID_C2_SOFT_G711_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+namespace android {
+
+struct C2SoftG711Dec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftG711Dec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftG711Dec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+private:
+ std::shared_ptr<IntfImpl> mIntf;
+ bool mSignalledOutputEos;
+
+#ifdef ALAW
+ void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
+#else
+ void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
+#endif
+
+ C2_DO_NOT_COPY(C2SoftG711Dec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_G711_DEC_H_
diff --git a/media/codec2/components/g711/MODULE_LICENSE_APACHE2 b/media/codec2/components/g711/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/g711/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/g711/NOTICE b/media/codec2/components/g711/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/g711/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/gsm/Android.bp b/media/codec2/components/gsm/Android.bp
new file mode 100644
index 0000000..8075747
--- /dev/null
+++ b/media/codec2/components/gsm/Android.bp
@@ -0,0 +1,11 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2gsmdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftGsmDec.cpp"],
+
+ static_libs: ["libgsm"],
+}
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.cpp b/media/codec2/components/gsm/C2SoftGsmDec.cpp
new file mode 100644
index 0000000..7101c79
--- /dev/null
+++ b/media/codec2/components/gsm/C2SoftGsmDec.cpp
@@ -0,0 +1,311 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftGsmDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftGsmDec.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.gsm.decoder";
+
+class C2SoftGsmDec::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_MSGSM))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 8000))
+ .withFields({C2F(mSampleRate, value).equalTo(8000)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).equalTo(1)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 13200))
+ .withFields({C2F(mBitrate, value).equalTo(13200)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 1024 / MSGSM_IN_FRM_SZ * MSGSM_IN_FRM_SZ))
+ .build());
+ }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftGsmDec::C2SoftGsmDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mGsm(nullptr) {
+}
+
+C2SoftGsmDec::~C2SoftGsmDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftGsmDec::onInit() {
+ if (!mGsm) mGsm = gsm_create();
+ if (!mGsm) return C2_NO_MEMORY;
+ int msopt = 1;
+ (void)gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+ mSignalledError = false;
+ mSignalledEos = false;
+ return C2_OK;
+}
+
+c2_status_t C2SoftGsmDec::onStop() {
+ if (mGsm) {
+ gsm_destroy(mGsm);
+ mGsm = nullptr;
+ }
+ if (!mGsm) mGsm = gsm_create();
+ if (!mGsm) return C2_NO_MEMORY;
+ int msopt = 1;
+ (void)gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+ mSignalledError = false;
+ mSignalledEos = false;
+ return C2_OK;
+}
+
+void C2SoftGsmDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftGsmDec::onRelease() {
+ if (mGsm) {
+ gsm_destroy(mGsm);
+ mGsm = nullptr;
+ }
+}
+
+c2_status_t C2SoftGsmDec::onFlush_sm() {
+ return onStop();
+}
+
+static size_t decodeGSM(gsm handle, int16_t *out, size_t outCapacity,
+ uint8_t *in, size_t inSize) {
+ size_t outSize = 0;
+
+ if (inSize % MSGSM_IN_FRM_SZ == 0
+ && (inSize / MSGSM_IN_FRM_SZ * MSGSM_OUT_FRM_SZ * sizeof(*out)
+ <= outCapacity)) {
+ while (inSize > 0) {
+ gsm_decode(handle, in, out);
+ in += FRGSM_IN_FRM_SZ;
+ inSize -= FRGSM_IN_FRM_SZ;
+ out += FRGSM_OUT_FRM_SZ;
+ outSize += FRGSM_OUT_FRM_SZ;
+
+ gsm_decode(handle, in, out);
+ in += FRGSM_IN_FRM_SZ_MINUS_1;
+ inSize -= FRGSM_IN_FRM_SZ_MINUS_1;
+ out += FRGSM_OUT_FRM_SZ;
+ outSize += FRGSM_OUT_FRM_SZ;
+ }
+ }
+
+ return outSize * sizeof(int16_t);
+}
+
+void C2SoftGsmDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ C2ReadView rView = mDummyReadView;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+
+ if (inSize == 0) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (eos) {
+ mSignalledEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+
+ size_t outCapacity = (inSize / MSGSM_IN_FRM_SZ ) * MSGSM_OUT_FRM_SZ * sizeof(int16_t);
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ int16_t *output = reinterpret_cast<int16_t *>(wView.data());
+ uint8_t *input = const_cast<uint8_t *>(rView.data() + inOffset);
+ size_t outSize = decodeGSM(mGsm, output, outCapacity, input, inSize);
+ if (!outSize) {
+ ALOGE("encountered improper insize or outsize");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ ALOGV("out buffer attr. size %zu", outSize);
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (eos) {
+ mSignalledEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+c2_status_t C2SoftGsmDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+class C2SoftGSMDecFactory : public C2ComponentFactory {
+public:
+ C2SoftGSMDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftGsmDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftGsmDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftGsmDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftGsmDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftGSMDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftGSMDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.h b/media/codec2/components/gsm/C2SoftGsmDec.h
new file mode 100644
index 0000000..2b209fe
--- /dev/null
+++ b/media/codec2/components/gsm/C2SoftGsmDec.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_GSM_DEC_H_
+#define ANDROID_C2_SOFT_GSM_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+extern "C" {
+ #include "gsm.h"
+}
+
+namespace android {
+
+#define FRGSM_IN_FRM_SZ 33
+#define FRGSM_IN_FRM_SZ_MINUS_1 32
+#define FRGSM_OUT_FRM_SZ 160
+#define MSGSM_IN_FRM_SZ (FRGSM_IN_FRM_SZ + FRGSM_IN_FRM_SZ_MINUS_1)
+#define MSGSM_OUT_FRM_SZ (FRGSM_OUT_FRM_SZ * 2)
+
+struct C2SoftGsmDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftGsmDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftGsmDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ private:
+ std::shared_ptr<IntfImpl> mIntf;
+ gsm mGsm;
+ bool mSignalledError;
+ bool mSignalledEos;
+
+ C2_DO_NOT_COPY(C2SoftGsmDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_GSM_DEC_H_
diff --git a/media/codec2/components/gsm/MODULE_LICENSE_APACHE2 b/media/codec2/components/gsm/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/gsm/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/gsm/NOTICE b/media/codec2/components/gsm/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/gsm/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
new file mode 100644
index 0000000..519de68
--- /dev/null
+++ b/media/codec2/components/hevc/Android.bp
@@ -0,0 +1,16 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2hevcdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ srcs: ["C2SoftHevcDec.cpp"],
+
+ static_libs: ["libhevcdec"],
+
+ include_dirs: [
+ "external/libhevc/decoder",
+ "external/libhevc/common",
+ ],
+}
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
new file mode 100644
index 0000000..99892ce
--- /dev/null
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -0,0 +1,976 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftHevcDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftHevcDec.h"
+#include "ihevcd_cxa.h"
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.hevc.decoder";
+
+} // namespace
+
+class C2SoftHevcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO,
+ MEDIA_MIMETYPE_VIDEO_HEVC) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ // TODO: output latency and reordering
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_HEVC_MAIN, C2Config::LEVEL_HEVC_MAIN_5_1))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_HEVC_MAIN,
+ C2Config::PROFILE_HEVC_MAIN_STILL}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_HEVC_MAIN_1,
+ C2Config::LEVEL_HEVC_MAIN_2, C2Config::LEVEL_HEVC_MAIN_2_1,
+ C2Config::LEVEL_HEVC_MAIN_3, C2Config::LEVEL_HEVC_MAIN_3_1,
+ C2Config::LEVEL_HEVC_MAIN_4, C2Config::LEVEL_HEVC_MAIN_4_1,
+ C2Config::LEVEL_HEVC_MAIN_5, C2Config::LEVEL_HEVC_MAIN_5_1,
+ C2Config::LEVEL_HEVC_MAIN_5_2, C2Config::LEVEL_HEVC_HIGH_4,
+ C2Config::LEVEL_HEVC_HIGH_4_1, C2Config::LEVEL_HEVC_HIGH_5,
+ C2Config::LEVEL_HEVC_HIGH_5_1, C2Config::LEVEL_HEVC_HIGH_5_2
+ })
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() };
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+ {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
+ C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(
+ DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ addParameter(
+ DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mDefaultColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+ C2P<C2VideoSizeStreamInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs. hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 2
+ me.set().value = (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072);
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ static C2R DefaultColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsTuning::output> &def,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+ ? def.v.primaries : coded.v.primaries;
+ me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+ ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ return C2R::Ok();
+ }
+
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ return mColorAspects;
+ }
+
+private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+};
+
+static size_t getCpuCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
+ (void) ctxt;
+ return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *ctxt, void *mem) {
+ (void) ctxt;
+ free(mem);
+}
+
+C2SoftHevcDec::C2SoftHevcDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecHandle(nullptr),
+ mOutBufferFlush(nullptr),
+ mIvColorformat(IV_YUV_420P),
+ mWidth(320),
+ mHeight(240),
+ mHeaderDecoded(false) {
+}
+
+C2SoftHevcDec::~C2SoftHevcDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftHevcDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftHevcDec::onStop() {
+ if (OK != resetDecoder()) return C2_CORRUPTED;
+ resetPlugin();
+ return C2_OK;
+}
+
+void C2SoftHevcDec::onReset() {
+ (void) onStop();
+}
+
+void C2SoftHevcDec::onRelease() {
+ (void) deleteDecoder();
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+}
+
+c2_status_t C2SoftHevcDec::onFlush_sm() {
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+
+ uint32_t displayStride = mStride;
+ uint32_t displayHeight = mHeight;
+ uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
+ mOutBufferFlush = (uint8_t *)ivd_aligned_malloc(nullptr, 128, bufferSize);
+ if (!mOutBufferFlush) {
+ ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize);
+ return C2_NO_MEMORY;
+ }
+
+ while (true) {
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+
+ setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (0 == s_decode_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+
+ return C2_OK;
+}
+
+status_t C2SoftHevcDec::createDecoder() {
+ ivdext_create_ip_t s_create_ip;
+ ivdext_create_op_t s_create_op;
+
+ s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
+ s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
+ s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
+ s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr;
+ s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_create_ip,
+ &s_create_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_create_op.s_ivd_create_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mDecHandle = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
+ mDecHandle->pv_fxns = (void *)ivdec_api_function;
+ mDecHandle->u4_size = sizeof(iv_obj_t);
+
+ return OK;
+}
+
+status_t C2SoftHevcDec::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+
+ s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_num_cores_ip.u4_num_cores = mNumCores;
+ s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_num_cores_ip,
+ &s_set_num_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftHevcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
+ ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
+ ivd_ctl_set_config_op_t s_set_dyn_params_op;
+
+ s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
+ s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+ s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
+ s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_dyn_params_ip,
+ &s_set_dyn_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftHevcDec::getVersion() {
+ ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
+ ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+ UWORD8 au1_buf[512];
+
+ s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_get_versioninfo_ip.pv_version_buffer = au1_buf;
+ s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf);
+ s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_versioninfo_ip,
+ &s_get_versioninfo_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__,
+ s_get_versioninfo_op.u4_error_code);
+ } else {
+ ALOGV("ittiam decoder version number: %s",
+ (char *) s_get_versioninfo_ip.pv_version_buffer);
+ }
+
+ return OK;
+}
+
+status_t C2SoftHevcDec::initDecoder() {
+ if (OK != createDecoder()) return UNKNOWN_ERROR;
+ mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
+ mStride = ALIGN64(mWidth);
+ mSignalledError = false;
+ resetPlugin();
+ (void) setNumCores();
+ if (OK != setParams(mStride, IVD_DECODE_FRAME)) return UNKNOWN_ERROR;
+ (void) getVersion();
+
+ return OK;
+}
+
+bool C2SoftHevcDec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker) {
+ uint32_t displayStride = mStride;
+ uint32_t displayHeight = mHeight;
+ size_t lumaSize = displayStride * displayHeight;
+ size_t chromaSize = lumaSize >> 2;
+
+ ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+ if (inBuffer) {
+ ps_decode_ip->u4_ts = tsMarker;
+ ps_decode_ip->pv_stream_buffer = const_cast<uint8_t *>(inBuffer->data() + inOffset);
+ ps_decode_ip->u4_num_Bytes = inSize;
+ } else {
+ ps_decode_ip->u4_ts = 0;
+ ps_decode_ip->pv_stream_buffer = nullptr;
+ ps_decode_ip->u4_num_Bytes = 0;
+ }
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
+ if (outBuffer) {
+ if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) {
+ ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)",
+ outBuffer->width(), outBuffer->height(), displayStride, displayHeight);
+ return false;
+ }
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y];
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U];
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ } else {
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush;
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize;
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
+ }
+ ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
+ ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+ ps_decode_op->u4_output_present = 0;
+
+ return true;
+}
+
+bool C2SoftHevcDec::getVuiParams() {
+ ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
+ ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+
+ s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
+ s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_vui_params_ip.e_sub_cmd =
+ (IVD_CONTROL_API_COMMAND_TYPE_T) IHEVCD_CXA_CMD_CTL_GET_VUI_PARAMS;
+ s_get_vui_params_op.u4_size = sizeof(ivdext_ctl_get_vui_params_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_vui_params_ip,
+ &s_get_vui_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__, s_get_vui_params_op.u4_error_code);
+ return false;
+ }
+
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = s_get_vui_params_op.u1_colour_primaries;
+ vuiColorAspects.transfer = s_get_vui_params_op.u1_transfer_characteristics;
+ vuiColorAspects.coeffs = s_get_vui_params_op.u1_matrix_coefficients;
+ vuiColorAspects.fullRange = s_get_vui_params_op.u1_video_full_range_flag;
+
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+ }
+ return true;
+}
+
+status_t C2SoftHevcDec::setFlushMode() {
+ ivd_ctl_flush_ip_t s_set_flush_ip;
+ ivd_ctl_flush_op_t s_set_flush_op;
+
+ s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_flush_ip,
+ &s_set_flush_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftHevcDec::resetDecoder() {
+ ivd_ctl_reset_ip_t s_reset_ip;
+ ivd_ctl_reset_op_t s_reset_op;
+
+ s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_reset_ip,
+ &s_reset_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mStride = 0;
+ (void) setNumCores();
+ mSignalledError = false;
+ mHeaderDecoded = false;
+ return OK;
+}
+
+void C2SoftHevcDec::resetPlugin() {
+ mSignalledOutputEos = false;
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+status_t C2SoftHevcDec::deleteDecoder() {
+ if (mDecHandle) {
+ ivdext_delete_ip_t s_delete_ip;
+ ivdext_delete_op_t s_delete_op;
+
+ s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
+ s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
+ s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_delete_ip,
+ &s_delete_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_delete_op.s_ivd_delete_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mDecHandle = nullptr;
+ }
+
+ return OK;
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftHevcDec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mOutBlock),
+ C2Rect(mWidth, mHeight));
+ mOutBlock = nullptr;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+c2_status_t C2SoftHevcDec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+ if (!mDecHandle) {
+ ALOGE("not supposed to be here, invalid decoder context");
+ return C2_CORRUPTED;
+ }
+ if (mStride != ALIGN64(mWidth)) {
+ mStride = ALIGN64(mWidth);
+ if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED;
+ }
+ if (mOutBlock &&
+ (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) {
+ mOutBlock.reset();
+ }
+ if (!mOutBlock) {
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ return err;
+ }
+ ALOGV("provided (%dx%d) required (%dx%d)",
+ mOutBlock->width(), mOutBlock->height(), mStride, mHeight);
+ }
+
+ return C2_OK;
+}
+
+// TODO: can overall error checking be improved?
+// TODO: allow configuration of color format and usage for graphic buffers instead
+// of hard coding them to HAL_PIXEL_FORMAT_YV12
+// TODO: pass coloraspects information to surface
+// TODO: test support for dynamic change in resolution
+// TODO: verify if the decoder sent back all frames
+void C2SoftHevcDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ bool hasPicture = false;
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+ inOffset + inPos, inSize - inPos, workIndex)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if (false == mHeaderDecoded) {
+ /* Decode header and get dimensions */
+ setParams(mStride, IVD_DECODE_HEADER);
+ }
+ WORD32 delay;
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ WORD32 decodeTime;
+ GETTIME(&mTimeEnd, nullptr);
+ TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
+ ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+ s_decode_op.u4_num_bytes_consumed);
+ if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGE("allocation failure in decoder");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGV("resolution changed");
+ drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+ resetDecoder();
+ resetPlugin();
+ work->workletsProcessed = 0u;
+
+ /* Decode header and get new dimensions */
+ setParams(mStride, IVD_DECODE_HEADER);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ }
+ if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+ if (mHeaderDecoded == false) {
+ mHeaderDecoded = true;
+ setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+ }
+ if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
+ mWidth = s_decode_op.u4_pic_wd;
+ mHeight = s_decode_op.u4_pic_ht;
+ CHECK_EQ(0u, s_decode_op.u4_output_present);
+
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err =
+ mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Cannot set width and height");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ continue;
+ }
+ }
+ (void) getVuiParams();
+ hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ }
+ if (0 == s_decode_op.u4_num_bytes_consumed) {
+ ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
+ break;
+ }
+ inPos += s_decode_op.u4_num_bytes_consumed;
+ if (hasPicture && (inSize - inPos)) {
+ ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
+ (int)inSize - (int)inPos);
+ break;
+ }
+ }
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!hasPicture) {
+ fillEmptyWork(work);
+ }
+}
+
+c2_status_t C2SoftHevcDec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+ while (true) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return C2_CORRUPTED;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ return C2_CORRUPTED;
+ }
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ return C2_CORRUPTED;
+ }
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ } else {
+ fillEmptyWork(work);
+ break;
+ }
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftHevcDecFactory : public C2ComponentFactory {
+public:
+ C2SoftHevcDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftHevcDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftHevcDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftHevcDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftHevcDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftHevcDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftHevcDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
new file mode 100644
index 0000000..75111fc
--- /dev/null
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_HEVC_DEC_H_
+#define ANDROID_C2_SOFT_HEVC_DEC_H_
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <SimpleC2Component.h>
+
+#include "ihevc_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+
+namespace android {
+
+#define ivdec_api_function ihevcd_cxa_api_function
+#define ivdext_create_ip_t ihevcd_cxa_create_ip_t
+#define ivdext_create_op_t ihevcd_cxa_create_op_t
+#define ivdext_delete_ip_t ihevcd_cxa_delete_ip_t
+#define ivdext_delete_op_t ihevcd_cxa_delete_op_t
+#define ivdext_ctl_set_num_cores_ip_t ihevcd_cxa_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
+#define ivdext_ctl_get_vui_params_ip_t ihevcd_cxa_ctl_get_vui_params_ip_t
+#define ivdext_ctl_get_vui_params_op_t ihevcd_cxa_ctl_get_vui_params_op_t
+#define ALIGN64(x) ((((x) + 63) >> 6) << 6)
+#define MAX_NUM_CORES 4
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
+#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+#define GETTIME(a, b) gettimeofday(a, b);
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+
+struct C2SoftHevcDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftHevcDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftHevcDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ private:
+ status_t createDecoder();
+ status_t setNumCores();
+ status_t setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode);
+ status_t getVersion();
+ status_t initDecoder();
+ bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker);
+ bool getVuiParams();
+ // TODO:This is not the right place for colorAspects functions. These should
+ // be part of c2-vndk so that they can be accessed by all video plugins
+ // until then, make them feel at home
+ bool colorAspectsDiffer(const ColorAspects &a, const ColorAspects &b);
+ void updateFinalColorAspects(
+ const ColorAspects &otherAspects, const ColorAspects &preferredAspects);
+ status_t handleColorAspectsChange();
+ c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+ status_t setFlushMode();
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+ status_t resetDecoder();
+ void resetPlugin();
+ status_t deleteDecoder();
+
+ // TODO:This is not the right place for this enum. These should
+ // be part of c2-vndk so that they can be accessed by all video plugins
+ // until then, make them feel at home
+ enum {
+ kNotSupported,
+ kPreferBitstream,
+ kPreferContainer,
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ iv_obj_t *mDecHandle;
+ std::shared_ptr<C2GraphicBlock> mOutBlock;
+ uint8_t *mOutBufferFlush;
+
+ size_t mNumCores;
+ IV_COLOR_FORMAT_T mIvColorformat;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mStride;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+ bool mHeaderDecoded;
+
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
+
+ // profile
+ struct timeval mTimeStart;
+ struct timeval mTimeEnd;
+
+ C2_DO_NOT_COPY(C2SoftHevcDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_HEVC_DEC_H_
diff --git a/media/codec2/components/mp3/Android.bp b/media/codec2/components/mp3/Android.bp
new file mode 100644
index 0000000..6e013b8
--- /dev/null
+++ b/media/codec2/components/mp3/Android.bp
@@ -0,0 +1,11 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2mp3dec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftMp3Dec.cpp"],
+
+ static_libs: ["libstagefright_mp3dec"],
+}
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
new file mode 100644
index 0000000..c8b8397
--- /dev/null
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -0,0 +1,558 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftMp3Dec"
+#include <log/log.h>
+
+#include <numeric>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftMp3Dec.h"
+#include "pvmp3decoder_api.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.mp3.decoder";
+
+class C2SoftMP3::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_MPEG))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
+ .withFields({C2F(mSampleRate, value).oneOf({8000, 11025, 12000, 16000,
+ 22050, 24000, 32000, 44100, 48000})})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 2))
+ .withFields({C2F(mChannelCount, value).inRange(1, 2)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(8000, 320000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftMP3::C2SoftMP3(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mConfig(nullptr),
+ mDecoderBuf(nullptr) {
+}
+
+C2SoftMP3::~C2SoftMP3() {
+ onRelease();
+}
+
+c2_status_t C2SoftMP3::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_NO_MEMORY;
+}
+
+c2_status_t C2SoftMP3::onStop() {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+ pvmp3_InitDecoder(mConfig, mDecoderBuf);
+ mSignalledError = false;
+ mIsFirst = true;
+ mSignalledOutputEos = false;
+ mAnchorTimeStamp = 0;
+ mProcessedSamples = 0;
+
+ return C2_OK;
+}
+
+void C2SoftMP3::onReset() {
+ (void)onStop();
+}
+
+void C2SoftMP3::onRelease() {
+ mGaplessBytes = false;
+ if (mDecoderBuf) {
+ free(mDecoderBuf);
+ mDecoderBuf = nullptr;
+ }
+
+ if (mConfig) {
+ delete mConfig;
+ mConfig = nullptr;
+ }
+}
+
+status_t C2SoftMP3::initDecoder() {
+ mConfig = new tPVMP3DecoderExternal{};
+ if (!mConfig) return NO_MEMORY;
+ mConfig->equalizerType = flat;
+ mConfig->crcEnabled = false;
+
+ size_t memRequirements = pvmp3_decoderMemRequirements();
+ mDecoderBuf = malloc(memRequirements);
+ if (!mDecoderBuf) return NO_MEMORY;
+
+ pvmp3_InitDecoder(mConfig, mDecoderBuf);
+
+ mIsFirst = true;
+ mGaplessBytes = false;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mAnchorTimeStamp = 0;
+ mProcessedSamples = 0;
+
+ return OK;
+}
+
+/* The below code is borrowed from ./test/mp3reader.cpp */
+static bool parseMp3Header(uint32_t header, size_t *frame_size,
+ uint32_t *out_sampling_rate = nullptr,
+ uint32_t *out_channels = nullptr,
+ uint32_t *out_bitrate = nullptr,
+ uint32_t *out_num_samples = nullptr) {
+ *frame_size = 0;
+ if (out_sampling_rate) *out_sampling_rate = 0;
+ if (out_channels) *out_channels = 0;
+ if (out_bitrate) *out_bitrate = 0;
+ if (out_num_samples) *out_num_samples = 1152;
+
+ if ((header & 0xffe00000) != 0xffe00000) return false;
+
+ unsigned version = (header >> 19) & 3;
+ if (version == 0x01) return false;
+
+ unsigned layer = (header >> 17) & 3;
+ if (layer == 0x00) return false;
+
+ unsigned bitrate_index = (header >> 12) & 0x0f;
+ if (bitrate_index == 0 || bitrate_index == 0x0f) return false;
+
+ unsigned sampling_rate_index = (header >> 10) & 3;
+ if (sampling_rate_index == 3) return false;
+
+ static const int kSamplingRateV1[] = { 44100, 48000, 32000 };
+ int sampling_rate = kSamplingRateV1[sampling_rate_index];
+ if (version == 2 /* V2 */) {
+ sampling_rate /= 2;
+ } else if (version == 0 /* V2.5 */) {
+ sampling_rate /= 4;
+ }
+
+ unsigned padding = (header >> 9) & 1;
+
+ if (layer == 3) { // layer I
+ static const int kBitrateV1[] =
+ {
+ 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448
+ };
+ static const int kBitrateV2[] =
+ {
+ 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256
+ };
+
+ int bitrate = (version == 3 /* V1 */) ? kBitrateV1[bitrate_index - 1] :
+ kBitrateV2[bitrate_index - 1];
+
+ if (out_bitrate) {
+ *out_bitrate = bitrate;
+ }
+ *frame_size = (12000 * bitrate / sampling_rate + padding) * 4;
+ if (out_num_samples) {
+ *out_num_samples = 384;
+ }
+ } else { // layer II or III
+ static const int kBitrateV1L2[] =
+ {
+ 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384
+ };
+
+ static const int kBitrateV1L3[] =
+ {
+ 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320
+ };
+
+ static const int kBitrateV2[] =
+ {
+ 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160
+ };
+
+ int bitrate;
+ if (version == 3 /* V1 */) {
+ bitrate = (layer == 2 /* L2 */) ? kBitrateV1L2[bitrate_index - 1] :
+ kBitrateV1L3[bitrate_index - 1];
+
+ if (out_num_samples) {
+ *out_num_samples = 1152;
+ }
+ } else { // V2 (or 2.5)
+ bitrate = kBitrateV2[bitrate_index - 1];
+ if (out_num_samples) {
+ *out_num_samples = (layer == 1 /* L3 */) ? 576 : 1152;
+ }
+ }
+
+ if (out_bitrate) {
+ *out_bitrate = bitrate;
+ }
+
+ if (version == 3 /* V1 */) {
+ *frame_size = 144000 * bitrate / sampling_rate + padding;
+ } else { // V2 or V2.5
+ size_t tmp = (layer == 1 /* L3 */) ? 72000 : 144000;
+ *frame_size = tmp * bitrate / sampling_rate + padding;
+ }
+ }
+
+ if (out_sampling_rate) {
+ *out_sampling_rate = sampling_rate;
+ }
+
+ if (out_channels) {
+ int channel_mode = (header >> 6) & 3;
+
+ *out_channels = (channel_mode == 3) ? 1 : 2;
+ }
+
+ return true;
+}
+
+static uint32_t U32_AT(const uint8_t *ptr) {
+ return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
+}
+
+static status_t calculateOutSize(uint8 *header, size_t inSize,
+ std::vector<size_t> *decodedSizes) {
+ uint32_t channels;
+ uint32_t numSamples;
+ size_t frameSize;
+ size_t totalInSize = 0;
+
+ while (totalInSize + 4 < inSize) {
+ if (!parseMp3Header(U32_AT(header + totalInSize), &frameSize,
+ nullptr, &channels, nullptr, &numSamples)) {
+ ALOGE("Error in parse mp3 header during outSize estimation");
+ return UNKNOWN_ERROR;
+ }
+ totalInSize += frameSize;
+ decodedSizes->push_back(numSamples * channels * sizeof(int16_t));
+ }
+
+ if (decodedSizes->empty()) return UNKNOWN_ERROR;
+
+ return OK;
+}
+
+c2_status_t C2SoftMP3::onFlush_sm() {
+ return onStop();
+}
+
+c2_status_t C2SoftMP3::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+// TODO: Can overall error checking be improved? As in the check for validity of
+// work, pool ptr, work->input.buffers.size() == 1, ...
+// TODO: Blind removal of 529 samples from the output may not work. Because
+// mpeg layer 1 frame size is 384 samples per frame. This should introduce
+// negative values and can cause SEG faults. Soft omx mp3 plugin can have
+// this problem (CHECK!)
+void C2SoftMP3::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+
+ if (inSize == 0 && (!mGaplessBytes || !eos)) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ return;
+ }
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+
+ int32_t numChannels = mConfig->num_channels;
+ size_t calOutSize;
+ std::vector<size_t> decodedSizes;
+ if (inSize && OK != calculateOutSize(const_cast<uint8 *>(rView.data()),
+ inSize, &decodedSizes)) {
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ calOutSize = std::accumulate(decodedSizes.begin(), decodedSizes.end(), 0);
+ if (eos) {
+ calOutSize += kPVMP3DecoderDelay * numChannels * sizeof(int16_t);
+ }
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(calOutSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ int outSize = 0;
+ int outOffset = 0;
+ auto it = decodedSizes.begin();
+ size_t inPos = 0;
+ int32_t samplingRate = mConfig->samplingRate;
+ while (inPos < inSize) {
+ if (it == decodedSizes.end()) {
+ ALOGE("unexpected trailing bytes, ignoring them");
+ break;
+ }
+
+ mConfig->pInputBuffer = const_cast<uint8 *>(rView.data() + inPos);
+ mConfig->inputBufferCurrentLength = (inSize - inPos);
+ mConfig->inputBufferMaxLength = 0;
+ mConfig->inputBufferUsedLength = 0;
+ mConfig->outputFrameSize = (calOutSize - outSize);
+ mConfig->pOutputBuffer = reinterpret_cast<int16_t *> (wView.data() + outSize);
+
+ ERROR_CODE decoderErr;
+ if ((decoderErr = pvmp3_framedecoder(mConfig, mDecoderBuf))
+ != NO_DECODING_ERROR) {
+ ALOGE("mp3 decoder returned error %d", decoderErr);
+ if (decoderErr != NO_ENOUGH_MAIN_DATA_ERROR
+ && decoderErr != SIDE_INFO_ERROR) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ // This is recoverable, just ignore the current frame and
+ // play silence instead.
+ ALOGV("ignoring error and sending silence");
+ if (mConfig->outputFrameSize == 0) {
+ mConfig->outputFrameSize = *it / sizeof(int16_t);
+ }
+ memset(mConfig->pOutputBuffer, 0, mConfig->outputFrameSize * sizeof(int16_t));
+ } else if (mConfig->samplingRate != samplingRate
+ || mConfig->num_channels != numChannels) {
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ samplingRate, mConfig->samplingRate,
+ numChannels, mConfig->num_channels);
+ samplingRate = mConfig->samplingRate;
+ numChannels = mConfig->num_channels;
+
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, samplingRate);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, numChannels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config(
+ { &sampleRateInfo, &channelCountInfo },
+ C2_MAY_BLOCK,
+ &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ if (*it != mConfig->outputFrameSize * sizeof(int16_t)) {
+ ALOGE("panic, parsed size does not match decoded size");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ outSize += mConfig->outputFrameSize * sizeof(int16_t);
+ inPos += mConfig->inputBufferUsedLength;
+ it++;
+ }
+ if (mIsFirst) {
+ mIsFirst = false;
+ mGaplessBytes = true;
+ // The decoder delay is 529 samples, so trim that many samples off
+ // the start of the first output buffer. This essentially makes this
+ // decoder have zero delay, which the rest of the pipeline assumes.
+ outOffset = kPVMP3DecoderDelay * numChannels * sizeof(int16_t);
+ mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+ }
+ if (eos) {
+ if (calOutSize >=
+ outSize + kPVMP3DecoderDelay * numChannels * sizeof(int16_t)) {
+ if (!memset(reinterpret_cast<int16_t*>(wView.data() + outSize), 0,
+ kPVMP3DecoderDelay * numChannels * sizeof(int16_t))) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ ALOGV("Adding 529 samples at end");
+ mGaplessBytes = false;
+ outSize += kPVMP3DecoderDelay * numChannels * sizeof(int16_t);
+ }
+ }
+
+ uint64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+ mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
+ ALOGV("out buffer attr. offset %d size %d timestamp %u", outOffset, outSize - outOffset,
+ (uint32_t)(mAnchorTimeStamp + outTimeStamp));
+ decodedSizes.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(
+ createLinearBuffer(block, outOffset, outSize - outOffset));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+class C2SoftMp3DecFactory : public C2ComponentFactory {
+public:
+ C2SoftMp3DecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftMP3(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftMP3::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftMP3::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftMP3::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftMp3DecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftMp3DecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
+
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.h b/media/codec2/components/mp3/C2SoftMp3Dec.h
new file mode 100644
index 0000000..402bdc4
--- /dev/null
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_MP3_DEC_H_
+#define ANDROID_C2_SOFT_MP3_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+struct tPVMP3DecoderExternal;
+
+bool parseMp3Header(uint32_t header, size_t *frame_size,
+ uint32_t *out_sampling_rate = nullptr,
+ uint32_t *out_channels = nullptr,
+ uint32_t *out_bitrate = nullptr,
+ uint32_t *out_num_samples = nullptr);
+
+namespace android {
+
+struct C2SoftMP3 : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftMP3(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftMP3();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ enum {
+ kPVMP3DecoderDelay = 529 // samples
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ tPVMP3DecoderExternal *mConfig;
+ void *mDecoderBuf;
+
+ bool mIsFirst;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+ bool mGaplessBytes;
+ uint64_t mAnchorTimeStamp;
+ uint64_t mProcessedSamples;
+
+ status_t initDecoder();
+
+ C2_DO_NOT_COPY(C2SoftMP3);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_MP3_DEC_H_
diff --git a/media/codec2/components/mp3/MODULE_LICENSE_APACHE2 b/media/codec2/components/mp3/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/mp3/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/mp3/NOTICE b/media/codec2/components/mp3/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/mp3/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/mp3/patent_disclaimer.txt b/media/codec2/components/mp3/patent_disclaimer.txt
new file mode 100644
index 0000000..b4bf11d
--- /dev/null
+++ b/media/codec2/components/mp3/patent_disclaimer.txt
@@ -0,0 +1,9 @@
+
+THIS IS NOT A GRANT OF PATENT RIGHTS.
+
+Google makes no representation or warranty that the codecs for which
+source code is made available hereunder are unencumbered by
+third-party patents. Those intending to use this source code in
+hardware or software products are advised that implementations of
+these codecs, including in open source software or shareware, may
+require patent licenses from the relevant patent holders.
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
new file mode 100644
index 0000000..85d867e
--- /dev/null
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -0,0 +1,16 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2mpeg2dec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ srcs: ["C2SoftMpeg2Dec.cpp"],
+
+ static_libs: ["libmpeg2dec"],
+
+ include_dirs: [
+ "external/libmpeg2/decoder",
+ "external/libmpeg2/common",
+ ],
+}
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
new file mode 100644
index 0000000..da32ec0
--- /dev/null
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -0,0 +1,1069 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftMpeg2Dec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftMpeg2Dec.h"
+#include "impeg2d.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.mpeg2.decoder";
+
+class C2SoftMpeg2Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO,
+ MEDIA_MIMETYPE_VIDEO_MPEG2) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ // TODO: output latency and reordering
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(16, 1920, 4),
+ C2F(mSize, height).inRange(16, 1088, 4),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_MP2V_SIMPLE, C2Config::LEVEL_MP2V_HIGH))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_MP2V_SIMPLE,
+ C2Config::PROFILE_MP2V_MAIN}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_MP2V_LOW,
+ C2Config::LEVEL_MP2V_MAIN,
+ C2Config::LEVEL_MP2V_HIGH_1440,
+ C2Config::LEVEL_MP2V_HIGH})
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 1920, 2),
+ C2F(mSize, height).inRange(2, 1088, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 2))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() };
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ { C2ChromaOffsetStruct::ITU_YUV_420_0() },
+ 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(
+ DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ addParameter(
+ DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mDefaultColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+ C2P<C2VideoSizeStreamInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs. hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 1
+ me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 384);
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ static C2R DefaultColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsTuning::output> &def,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+ ? def.v.primaries : coded.v.primaries;
+ me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+ ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ return C2R::Ok();
+ }
+
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ return mColorAspects;
+ }
+
+private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+};
+
+static size_t getCpuCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+static void *ivd_aligned_malloc(WORD32 alignment, WORD32 size) {
+ return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *mem) {
+ free(mem);
+}
+
+C2SoftMpeg2Dec::C2SoftMpeg2Dec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecHandle(nullptr),
+ mMemRecords(nullptr),
+ mOutBufferDrain(nullptr),
+ mIvColorformat(IV_YUV_420P),
+ mWidth(320),
+ mHeight(240) {
+ // If input dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+}
+
+C2SoftMpeg2Dec::~C2SoftMpeg2Dec() {
+ onRelease();
+}
+
+c2_status_t C2SoftMpeg2Dec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftMpeg2Dec::onStop() {
+ if (OK != resetDecoder()) return C2_CORRUPTED;
+ resetPlugin();
+ return C2_OK;
+}
+
+void C2SoftMpeg2Dec::onReset() {
+ (void) onStop();
+}
+
+void C2SoftMpeg2Dec::onRelease() {
+ (void) deleteDecoder();
+ if (mOutBufferDrain) {
+ ivd_aligned_free(mOutBufferDrain);
+ mOutBufferDrain = nullptr;
+ }
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+ if (mMemRecords) {
+ ivd_aligned_free(mMemRecords);
+ mMemRecords = nullptr;
+ }
+}
+
+c2_status_t C2SoftMpeg2Dec::onFlush_sm() {
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+
+ uint32_t displayStride = mStride;
+ uint32_t displayHeight = mHeight;
+ uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
+ mOutBufferDrain = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
+ if (!mOutBufferDrain) {
+ ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize);
+ return C2_NO_MEMORY;
+ }
+
+ while (true) {
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+
+ setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (0 == s_decode_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+
+ if (mOutBufferDrain) {
+ ivd_aligned_free(mOutBufferDrain);
+ mOutBufferDrain = nullptr;
+ }
+
+ return C2_OK;
+}
+
+status_t C2SoftMpeg2Dec::getNumMemRecords() {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+ s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
+
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_num_mem_rec_ip,
+ &s_num_mem_rec_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in getting mem records: 0x%x", s_num_mem_rec_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::fillMemRecords() {
+ iv_mem_rec_t *ps_mem_rec = (iv_mem_rec_t *) ivd_aligned_malloc(
+ 128, mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (!ps_mem_rec) {
+ ALOGE("Allocation failure");
+ return NO_MEMORY;
+ }
+ memset(ps_mem_rec, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
+ for (size_t i = 0; i < mNumMemRecords; i++)
+ ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
+ mMemRecords = ps_mem_rec;
+
+ ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
+ ivdext_fill_mem_rec_op_t s_fill_mem_op;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
+ s_fill_mem_ip.u4_share_disp_buf = 0;
+ s_fill_mem_ip.e_output_format = mIvColorformat;
+ s_fill_mem_ip.u4_deinterlace = 1;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = mWidth;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = mHeight;
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size = sizeof(ivdext_fill_mem_rec_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_fill_mem_ip,
+ &s_fill_mem_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in filling mem records: 0x%x",
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ CHECK_EQ(mNumMemRecords, s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled);
+ for (size_t i = 0; i < mNumMemRecords; i++, ps_mem_rec++) {
+ ps_mem_rec->pv_base = ivd_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (!ps_mem_rec->pv_base) {
+ ALOGE("Allocation failure for memory record #%zu of size %u",
+ i, ps_mem_rec->u4_mem_size);
+ return NO_MEMORY;
+ }
+ }
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::createDecoder() {
+ ivdext_init_ip_t s_init_ip;
+ ivdext_init_op_t s_init_op;
+
+ s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
+ s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
+ s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = mWidth;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
+ s_init_ip.u4_share_disp_buf = 0;
+ s_init_ip.u4_deinterlace = 1;
+ s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
+ s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
+
+ mDecHandle = (iv_obj_t *)mMemRecords[0].pv_base;
+ mDecHandle->pv_fxns = (void *)ivdec_api_function;
+ mDecHandle->u4_size = sizeof(iv_obj_t);
+
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_init_ip,
+ &s_init_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_init_op.s_ivd_init_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+
+ s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_num_cores_ip.u4_num_cores = mNumCores;
+ s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_num_cores_ip,
+ &s_set_num_cores_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::setParams(size_t stride) {
+ ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
+ ivd_ctl_set_config_op_t s_set_dyn_params_op;
+
+ s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
+ s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+ s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_set_dyn_params_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
+ s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_dyn_params_ip,
+ &s_set_dyn_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::getVersion() {
+ ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
+ ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+ UWORD8 au1_buf[512];
+
+ s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_get_versioninfo_ip.pv_version_buffer = au1_buf;
+ s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf);
+ s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_versioninfo_ip,
+ &s_get_versioninfo_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__,
+ s_get_versioninfo_op.u4_error_code);
+ } else {
+ ALOGV("ittiam decoder version number: %s",
+ (char *) s_get_versioninfo_ip.pv_version_buffer);
+ }
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::initDecoder() {
+ status_t ret = getNumMemRecords();
+ if (OK != ret) return ret;
+
+ ret = fillMemRecords();
+ if (OK != ret) return ret;
+
+ if (OK != createDecoder()) return UNKNOWN_ERROR;
+
+ mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
+ mStride = ALIGN64(mWidth);
+ mSignalledError = false;
+ resetPlugin();
+ (void) setNumCores();
+ if (OK != setParams(mStride)) return UNKNOWN_ERROR;
+ (void) getVersion();
+
+ return OK;
+}
+
+bool C2SoftMpeg2Dec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker) {
+ uint32_t displayStride = mStride;
+ uint32_t displayHeight = mHeight;
+ size_t lumaSize = displayStride * displayHeight;
+ size_t chromaSize = lumaSize >> 2;
+
+ ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+ if (inBuffer) {
+ ps_decode_ip->u4_ts = tsMarker;
+ ps_decode_ip->pv_stream_buffer = const_cast<uint8_t *>(inBuffer->data() + inOffset);
+ ps_decode_ip->u4_num_Bytes = inSize;
+ } else {
+ ps_decode_ip->u4_ts = 0;
+ ps_decode_ip->pv_stream_buffer = nullptr;
+ ps_decode_ip->u4_num_Bytes = 0;
+ }
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
+ if (outBuffer) {
+ if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) {
+ ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)",
+ outBuffer->width(), outBuffer->height(), displayStride, displayHeight);
+ return false;
+ }
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y];
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U];
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ } else {
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferDrain;
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferDrain + lumaSize;
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferDrain + lumaSize + chromaSize;
+ }
+ ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
+ ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ return true;
+}
+
+
+bool C2SoftMpeg2Dec::getSeqInfo() {
+ ivdext_ctl_get_seq_info_ip_t s_ctl_get_seq_info_ip;
+ ivdext_ctl_get_seq_info_op_t s_ctl_get_seq_info_op;
+
+ s_ctl_get_seq_info_ip.u4_size = sizeof(ivdext_ctl_get_seq_info_ip_t);
+ s_ctl_get_seq_info_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_get_seq_info_ip.e_sub_cmd =
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_GET_SEQ_INFO;
+ s_ctl_get_seq_info_op.u4_size = sizeof(ivdext_ctl_get_seq_info_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_ctl_get_seq_info_ip,
+ &s_ctl_get_seq_info_op);
+ if (status != IV_SUCCESS) {
+ ALOGW("Error in getting Sequence info: 0x%x", s_ctl_get_seq_info_op.u4_error_code);
+ return false;
+ }
+
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = s_ctl_get_seq_info_op.u1_colour_primaries;
+ vuiColorAspects.transfer = s_ctl_get_seq_info_op.u1_transfer_characteristics;
+ vuiColorAspects.coeffs = s_ctl_get_seq_info_op.u1_matrix_coefficients;
+ vuiColorAspects.fullRange = false; // mpeg2 video has limited range.
+
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+ }
+ return true;
+}
+
+status_t C2SoftMpeg2Dec::setFlushMode() {
+ ivd_ctl_flush_ip_t s_set_flush_ip;
+ ivd_ctl_flush_op_t s_set_flush_op;
+
+ s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_flush_ip,
+ &s_set_flush_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::resetDecoder() {
+ ivd_ctl_reset_ip_t s_reset_ip;
+ ivd_ctl_reset_op_t s_reset_op;
+
+ s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_reset_ip,
+ &s_reset_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ (void) setNumCores();
+ mStride = 0;
+ mSignalledError = false;
+
+ return OK;
+}
+
+void C2SoftMpeg2Dec::resetPlugin() {
+ mSignalledOutputEos = false;
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+status_t C2SoftMpeg2Dec::deleteDecoder() {
+ if (mMemRecords) {
+ iv_mem_rec_t *ps_mem_rec = mMemRecords;
+
+ for (size_t i = 0; i < mNumMemRecords; i++, ps_mem_rec++) {
+ if (ps_mem_rec->pv_base) {
+ ivd_aligned_free(ps_mem_rec->pv_base);
+ }
+ }
+ ivd_aligned_free(mMemRecords);
+ mMemRecords = nullptr;
+ }
+ mDecHandle = nullptr;
+
+ return OK;
+}
+
+status_t C2SoftMpeg2Dec::reInitDecoder() {
+ deleteDecoder();
+
+ status_t ret = initDecoder();
+ if (OK != ret) {
+ ALOGE("Failed to initialize decoder");
+ deleteDecoder();
+ return ret;
+ }
+ return OK;
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftMpeg2Dec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mOutBlock),
+ C2Rect(mWidth, mHeight));
+ mOutBlock = nullptr;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+c2_status_t C2SoftMpeg2Dec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+ if (!mDecHandle) {
+ ALOGE("not supposed to be here, invalid decoder context");
+ return C2_CORRUPTED;
+ }
+ if (mStride != ALIGN64(mWidth)) {
+ mStride = ALIGN64(mWidth);
+ if (OK != setParams(mStride)) return C2_CORRUPTED;
+ }
+ if (mOutBlock &&
+ (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) {
+ mOutBlock.reset();
+ }
+ if (!mOutBlock) {
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ return err;
+ }
+ ALOGV("provided (%dx%d) required (%dx%d)",
+ mOutBlock->width(), mOutBlock->height(), mStride, mHeight);
+ }
+
+ return C2_OK;
+}
+
+// TODO: can overall error checking be improved?
+// TODO: allow configuration of color format and usage for graphic buffers instead
+// of hard coding them to HAL_PIXEL_FORMAT_YV12
+// TODO: pass coloraspects information to surface
+// TODO: test support for dynamic change in resolution
+// TODO: verify if the decoder sent back all frames
+void C2SoftMpeg2Dec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ bool hasPicture = false;
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+ inOffset + inPos, inSize - inPos, workIndex)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ // If input dump is enabled, then write to file
+ DUMP_TO_FILE(mInFile, s_decode_ip.pv_stream_buffer, s_decode_ip.u4_num_Bytes);
+ WORD32 delay;
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ WORD32 decodeTime;
+ GETTIME(&mTimeEnd, nullptr);
+ TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
+ ALOGV("decodeTime=%6d delay=%6d numBytes=%6d ", decodeTime, delay,
+ s_decode_op.u4_num_bytes_consumed);
+ if (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_decode_op.u4_error_code) {
+ ALOGV("unsupported resolution : %dx%d", s_decode_op.u4_pic_wd, s_decode_op.u4_pic_ht);
+ drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+ resetPlugin();
+ work->workletsProcessed = 0u;
+ mWidth = s_decode_op.u4_pic_wd;
+ mHeight = s_decode_op.u4_pic_ht;
+
+ ALOGI("Configuring decoder: mWidth %d , mHeight %d ",
+ mWidth, mHeight);
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err =
+ mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Cannot set width and height");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if (OK != reInitDecoder()) {
+ ALOGE("Failed to reinitialize decoder");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ continue;
+ } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGV("resolution changed");
+ drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+ resetDecoder();
+ resetPlugin();
+ work->workletsProcessed = 0u;
+ continue;
+ }
+ if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+ if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
+ mWidth = s_decode_op.u4_pic_wd;
+ mHeight = s_decode_op.u4_pic_ht;
+ CHECK_EQ(0u, s_decode_op.u4_output_present);
+
+ ALOGI("Configuring decoder out: mWidth %d , mHeight %d ",
+ mWidth, mHeight);
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err =
+ mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Cannot set width and height");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
+ (void) getSeqInfo();
+ hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ }
+
+ inPos += s_decode_op.u4_num_bytes_consumed;
+ if (hasPicture && (inSize - inPos) != 0) {
+ ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
+ (int)inSize - (int)inPos);
+ break;
+ }
+ }
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!hasPicture) {
+ fillEmptyWork(work);
+ }
+}
+
+c2_status_t C2SoftMpeg2Dec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+ while (true) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return C2_CORRUPTED;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ return C2_CORRUPTED;
+ }
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ return C2_CORRUPTED;
+ }
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ } else {
+ fillEmptyWork(work);
+ break;
+ }
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftMpeg2Dec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftMpeg2DecFactory : public C2ComponentFactory {
+public:
+ C2SoftMpeg2DecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftMpeg2Dec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftMpeg2Dec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftMpeg2Dec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftMpeg2Dec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftMpeg2DecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftMpeg2DecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
new file mode 100644
index 0000000..9999872
--- /dev/null
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_MPEG2_DEC_H_
+#define ANDROID_C2_SOFT_MPEG2_DEC_H_
+
+#include <SimpleC2Component.h>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include "iv_datatypedef.h"
+#include "iv.h"
+#include "ivd.h"
+
+namespace android {
+
+#define ivdec_api_function impeg2d_api_function
+#define ivdext_init_ip_t impeg2d_init_ip_t
+#define ivdext_init_op_t impeg2d_init_op_t
+#define ivdext_fill_mem_rec_ip_t impeg2d_fill_mem_rec_ip_t
+#define ivdext_fill_mem_rec_op_t impeg2d_fill_mem_rec_op_t
+#define ivdext_ctl_set_num_cores_ip_t impeg2d_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t impeg2d_ctl_set_num_cores_op_t
+#define ivdext_ctl_get_seq_info_ip_t impeg2d_ctl_get_seq_info_ip_t
+#define ivdext_ctl_get_seq_info_op_t impeg2d_ctl_get_seq_info_op_t
+#define ALIGN64(x) ((((x) + 63) >> 6) << 6)
+#define MAX_NUM_CORES 4
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
+#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+#define GETTIME(a, b) gettimeofday(a, b);
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#ifdef FILE_DUMP_ENABLE
+ #define INPUT_DUMP_PATH "/sdcard/clips/mpeg2d_input"
+ #define INPUT_DUMP_EXT "m2v"
+ #define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ }
+ #define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+ }
+ #define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+ { \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ uint32_t i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (uint32_t)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ } \
+ }
+#else /* FILE_DUMP_ENABLE */
+ #define INPUT_DUMP_PATH
+ #define INPUT_DUMP_EXT
+ #define OUTPUT_DUMP_PATH
+ #define OUTPUT_DUMP_EXT
+ #define GENERATE_FILE_NAMES()
+ #define CREATE_DUMP_FILE(m_filename)
+ #define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+struct C2SoftMpeg2Dec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftMpeg2Dec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftMpeg2Dec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ private:
+ status_t getNumMemRecords();
+ status_t fillMemRecords();
+ status_t createDecoder();
+ status_t setNumCores();
+ status_t setParams(size_t stride);
+ status_t getVersion();
+ status_t initDecoder();
+ bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker);
+ bool getSeqInfo();
+ c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+ status_t setFlushMode();
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+ status_t resetDecoder();
+ void resetPlugin();
+ status_t deleteDecoder();
+ status_t reInitDecoder();
+
+ // TODO:This is not the right place for this enum. These should
+ // be part of c2-vndk so that they can be accessed by all video plugins
+ // until then, make them feel at home
+ enum {
+ kNotSupported,
+ kPreferBitstream,
+ kPreferContainer,
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ iv_obj_t *mDecHandle;
+ iv_mem_rec_t *mMemRecords;
+ size_t mNumMemRecords;
+ std::shared_ptr<C2GraphicBlock> mOutBlock;
+ uint8_t *mOutBufferDrain;
+
+ size_t mNumCores;
+ IV_COLOR_FORMAT_T mIvColorformat;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mStride;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
+
+ // profile
+ struct timeval mTimeStart;
+ struct timeval mTimeEnd;
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ C2_DO_NOT_COPY(C2SoftMpeg2Dec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_MPEG2_DEC_H_
diff --git a/media/codec2/components/mpeg4_h263/Android.bp b/media/codec2/components/mpeg4_h263/Android.bp
new file mode 100644
index 0000000..3155bc2
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/Android.bp
@@ -0,0 +1,66 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2mpeg4dec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ srcs: ["C2SoftMpeg4Dec.cpp"],
+
+ static_libs: ["libstagefright_m4vh263dec"],
+
+ cflags: [
+ "-DOSCL_IMPORT_REF=",
+ "-DMPEG4",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2h263dec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ srcs: ["C2SoftMpeg4Dec.cpp"],
+
+ static_libs: ["libstagefright_m4vh263dec"],
+
+ cflags: [
+ "-DOSCL_IMPORT_REF=",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2mpeg4enc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+
+ srcs: ["C2SoftMpeg4Enc.cpp"],
+
+ static_libs: ["libstagefright_m4vh263enc"],
+
+ cflags: [
+ "-DMPEG4",
+ "-DOSCL_IMPORT_REF=",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2h263enc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ srcs: ["C2SoftMpeg4Enc.cpp"],
+
+ static_libs: [ "libstagefright_m4vh263enc" ],
+
+ cflags: [
+ "-DOSCL_IMPORT_REF=",
+ ],
+}
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
new file mode 100644
index 0000000..901f5ed
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -0,0 +1,746 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#ifdef MPEG4
+ #define LOG_TAG "C2SoftMpeg4Dec"
+#else
+ #define LOG_TAG "C2SoftH263Dec"
+#endif
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftMpeg4Dec.h"
+#include "mp4dec_api.h"
+
+namespace android {
+
+#ifdef MPEG4
+constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.decoder";
+#else
+constexpr char COMPONENT_NAME[] = "c2.android.h263.decoder";
+#endif
+
+class C2SoftMpeg4Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO,
+#ifdef MPEG4
+ MEDIA_MIMETYPE_VIDEO_MPEG4
+#else
+ MEDIA_MIMETYPE_VIDEO_H263
+#endif
+ ) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ // TODO: output latency and reordering
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 176, 144))
+ .withFields({
+#ifdef MPEG4
+ C2F(mSize, width).inRange(2, 1920, 2),
+ C2F(mSize, height).inRange(2, 1088, 2),
+#else
+ C2F(mSize, width).inRange(2, 352, 2),
+ C2F(mSize, height).inRange(2, 288, 2),
+#endif
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+#ifdef MPEG4
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_MP4V_SIMPLE, C2Config::LEVEL_MP4V_3))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ C2Config::PROFILE_MP4V_SIMPLE),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_MP4V_0,
+ C2Config::LEVEL_MP4V_0B,
+ C2Config::LEVEL_MP4V_1,
+ C2Config::LEVEL_MP4V_2,
+ C2Config::LEVEL_MP4V_3,
+ C2Config::LEVEL_MP4V_3B,
+ C2Config::LEVEL_MP4V_4,
+ C2Config::LEVEL_MP4V_4A,
+ C2Config::LEVEL_MP4V_5,
+ C2Config::LEVEL_MP4V_6})
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+#else
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_H263_BASELINE, C2Config::LEVEL_H263_30))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_H263_BASELINE,
+ C2Config::PROFILE_H263_ISWV2}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_H263_10,
+ C2Config::LEVEL_H263_20,
+ C2Config::LEVEL_H263_30,
+ C2Config::LEVEL_H263_40,
+ C2Config::LEVEL_H263_45})
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+#endif
+
+ addParameter(
+ DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+#ifdef MPEG4
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 1920, 1088))
+#else
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 352, 288))
+#endif
+ .withFields({
+#ifdef MPEG4
+ C2F(mSize, width).inRange(2, 1920, 2),
+ C2F(mSize, height).inRange(2, 1088, 2),
+#else
+ C2F(mSize, width).inRange(2, 352, 2),
+ C2F(mSize, height).inRange(2, 288, 2),
+#endif
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+#ifdef MPEG4
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 1920 * 1088 * 3 / 2))
+#else
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 352 * 288 * 3 / 2))
+#endif
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() };
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ { C2ChromaOffsetStruct::ITU_YUV_420_0() },
+ 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(
+ DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+ C2P<C2VideoSizeStreamInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs. hardcoding
+#ifdef MPEG4
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u);
+#else
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 352u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 288u);
+#endif
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 1
+ me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 384);
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ uint32_t getMaxWidth() const { return mMaxSize->width; }
+ uint32_t getMaxHeight() const { return mMaxSize->height; }
+
+private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+};
+
+C2SoftMpeg4Dec::C2SoftMpeg4Dec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecHandle(nullptr),
+ mOutputBuffer{},
+ mInitialized(false) {
+}
+
+C2SoftMpeg4Dec::~C2SoftMpeg4Dec() {
+ onRelease();
+}
+
+c2_status_t C2SoftMpeg4Dec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftMpeg4Dec::onStop() {
+ if (mInitialized) {
+ if (mDecHandle) {
+ PVCleanUpVideoDecoder(mDecHandle);
+ }
+ mInitialized = false;
+ }
+ for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
+ if (mOutputBuffer[i]) {
+ free(mOutputBuffer[i]);
+ mOutputBuffer[i] = nullptr;
+ }
+ }
+ mNumSamplesOutput = 0;
+ mFramesConfigured = false;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+
+ return C2_OK;
+}
+
+void C2SoftMpeg4Dec::onReset() {
+ (void)onStop();
+ (void)onInit();
+}
+
+void C2SoftMpeg4Dec::onRelease() {
+ if (mInitialized) {
+ if (mDecHandle) {
+ PVCleanUpVideoDecoder(mDecHandle);
+ delete mDecHandle;
+ mDecHandle = nullptr;
+ }
+ mInitialized = false;
+ }
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+ for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
+ if (mOutputBuffer[i]) {
+ free(mOutputBuffer[i]);
+ mOutputBuffer[i] = nullptr;
+ }
+ }
+}
+
+c2_status_t C2SoftMpeg4Dec::onFlush_sm() {
+ if (mInitialized) {
+ if (PV_TRUE != PVResetVideoDecoder(mDecHandle)) {
+ return C2_CORRUPTED;
+ }
+ }
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+ return C2_OK;
+}
+
+status_t C2SoftMpeg4Dec::initDecoder() {
+#ifdef MPEG4
+ mIsMpeg4 = true;
+#else
+ mIsMpeg4 = false;
+#endif
+ if (!mDecHandle) {
+ mDecHandle = new tagvideoDecControls;
+ }
+ if (!mDecHandle) {
+ ALOGE("mDecHandle is null");
+ return NO_MEMORY;
+ }
+ memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+
+ /* TODO: bring these values to 352 and 288. It cannot be done as of now
+ * because, h263 doesn't seem to allow port reconfiguration. In OMX, the
+ * problem of larger width and height than default width and height is
+ * overcome by adaptivePlayBack() api call. This call gets width and height
+ * information from extractor. Such a thing is not possible here.
+ * So we are configuring to larger values.*/
+ mWidth = 1408;
+ mHeight = 1152;
+ mNumSamplesOutput = 0;
+ mInitialized = false;
+ mFramesConfigured = false;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+
+ return OK;
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftMpeg4Dec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mOutBlock),
+ C2Rect(mWidth, mHeight));
+ mOutBlock = nullptr;
+ auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+ (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+c2_status_t C2SoftMpeg4Dec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+ if (!mDecHandle) {
+ ALOGE("not supposed to be here, invalid decoder context");
+ return C2_CORRUPTED;
+ }
+
+ mOutputBufferSize = align(mIntf->getMaxWidth(), 16) * align(mIntf->getMaxHeight(), 16) * 3 / 2;
+ for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
+ if (!mOutputBuffer[i]) {
+ mOutputBuffer[i] = (uint8_t *)malloc(mOutputBufferSize);
+ if (!mOutputBuffer[i]) {
+ return C2_NO_MEMORY;
+ }
+ }
+ }
+ if (mOutBlock &&
+ (mOutBlock->width() != align(mWidth, 16) || mOutBlock->height() != mHeight)) {
+ mOutBlock.reset();
+ }
+ if (!mOutBlock) {
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ return err;
+ }
+ ALOGV("provided (%dx%d) required (%dx%d)",
+ mOutBlock->width(), mOutBlock->height(), mWidth, mHeight);
+ }
+ return C2_OK;
+}
+
+bool C2SoftMpeg4Dec::handleResChange(const std::unique_ptr<C2Work> &work) {
+ uint32_t disp_width, disp_height;
+ PVGetVideoDimensions(mDecHandle, (int32 *)&disp_width, (int32 *)&disp_height);
+
+ uint32_t buf_width, buf_height;
+ PVGetBufferDimensions(mDecHandle, (int32 *)&buf_width, (int32 *)&buf_height);
+
+ CHECK_LE(disp_width, buf_width);
+ CHECK_LE(disp_height, buf_height);
+
+ ALOGV("display size (%dx%d), buffer size (%dx%d)",
+ disp_width, disp_height, buf_width, buf_height);
+
+ bool resChanged = false;
+ if (disp_width != mWidth || disp_height != mHeight) {
+ mWidth = disp_width;
+ mHeight = disp_height;
+ resChanged = true;
+ for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
+ if (mOutputBuffer[i]) {
+ free(mOutputBuffer[i]);
+ mOutputBuffer[i] = nullptr;
+ }
+ }
+
+ if (!mIsMpeg4) {
+ PVCleanUpVideoDecoder(mDecHandle);
+
+ uint8_t *vol_data[1]{};
+ int32_t vol_size = 0;
+
+ if (!PVInitVideoDecoder(
+ mDecHandle, vol_data, &vol_size, 1, mIntf->getMaxWidth(), mIntf->getMaxHeight(), H263_MODE)) {
+ ALOGE("Error in PVInitVideoDecoder H263_MODE while resChanged was set to true");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return true;
+ }
+ }
+ mFramesConfigured = false;
+ }
+ return resChanged;
+}
+
+/* TODO: can remove temporary copy after library supports writing to display
+ * buffer Y, U and V plane pointers using stride info. */
+static void copyOutputBufferToYV12Frame(uint8_t *dst, uint8_t *src, size_t dstYStride,
+ size_t srcYStride, uint32_t width, uint32_t height) {
+ size_t dstUVStride = align(dstYStride / 2, 16);
+ size_t srcUVStride = srcYStride / 2;
+ uint8_t *srcStart = src;
+ uint8_t *dstStart = dst;
+ size_t vStride = align(height, 16);
+ for (size_t i = 0; i < height; ++i) {
+ memcpy(dst, src, width);
+ src += srcYStride;
+ dst += dstYStride;
+ }
+ /* U buffer */
+ src = srcStart + vStride * srcYStride;
+ dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dst, src, width / 2);
+ src += srcUVStride;
+ dst += dstUVStride;
+ }
+ /* V buffer */
+ src = srcStart + vStride * srcYStride * 5 / 4;
+ dst = dstStart + (dstYStride * height);
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dst, src, width / 2);
+ src += srcUVStride;
+ dst += dstUVStride;
+ }
+}
+
+void C2SoftMpeg4Dec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ if (inSize == 0) {
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ }
+ return;
+ }
+
+ uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
+ uint32_t *start_code = (uint32_t *)bitstream;
+ bool volHeader = *start_code == 0xB0010000;
+ if (volHeader) {
+ PVCleanUpVideoDecoder(mDecHandle);
+ mInitialized = false;
+ }
+
+ if (!mInitialized) {
+ uint8_t *vol_data[1]{};
+ int32_t vol_size = 0;
+
+ bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+ if (codecConfig || volHeader) {
+ vol_data[0] = bitstream;
+ vol_size = inSize;
+ }
+ MP4DecodingMode mode = (mIsMpeg4) ? MPEG4_MODE : H263_MODE;
+ if (!PVInitVideoDecoder(
+ mDecHandle, vol_data, &vol_size, 1,
+ mIntf->getMaxWidth(), mIntf->getMaxHeight(), mode)) {
+ ALOGE("PVInitVideoDecoder failed. Unsupported content?");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ mInitialized = true;
+ MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+ if (mode != actualMode) {
+ ALOGE("Decoded mode not same as actual mode of the decoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ PVSetPostProcType(mDecHandle, 0);
+ if (handleResChange(work)) {
+ ALOGI("Setting width and height");
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Config update size failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ if (codecConfig) {
+ fillEmptyWork(work);
+ return;
+ }
+ }
+
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ c2_status_t err = ensureDecoderState(pool);
+ if (C2_OK != err) {
+ mSignalledError = true;
+ work->result = err;
+ return;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+ if (mOutputBufferSize < yFrameSize * 3 / 2){
+ ALOGE("Too small output buffer: %zu bytes", mOutputBufferSize);
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+
+ if (!mFramesConfigured) {
+ PVSetReferenceYUV(mDecHandle,mOutputBuffer[1]);
+ mFramesConfigured = true;
+ }
+
+ // Need to check if header contains new info, e.g., width/height, etc.
+ VopHeaderInfo header_info;
+ uint32_t useExtTimestamp = (inPos == 0);
+ int32_t tmpInSize = (int32_t)inSize;
+ uint8_t *bitstreamTmp = bitstream;
+ uint32_t timestamp = workIndex;
+ if (PVDecodeVopHeader(
+ mDecHandle, &bitstreamTmp, ×tamp, &tmpInSize,
+ &header_info, &useExtTimestamp,
+ mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
+ ALOGE("failed to decode vop header.");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ // H263 doesn't have VOL header, the frame size information is in short header, i.e. the
+ // decoder may detect size change after PVDecodeVopHeader.
+ bool resChange = handleResChange(work);
+ if (mIsMpeg4 && resChange) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (resChange) {
+ ALOGI("Setting width and height");
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+ } else {
+ ALOGE("Config update size failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ continue;
+ }
+
+ if (PVDecodeVopBody(mDecHandle, &tmpInSize) != PV_TRUE) {
+ ALOGE("failed to decode video frame.");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ if (handleResChange(work)) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
+ (void)copyOutputBufferToYV12Frame(outputBufferY, mOutputBuffer[mNumSamplesOutput & 1],
+ wView.width(), align(mWidth, 16), mWidth, mHeight);
+
+ inPos += inSize - (size_t)tmpInSize;
+ finishWork(workIndex, work);
+ ++mNumSamplesOutput;
+ if (inSize - inPos != 0) {
+ ALOGD("decoded frame, ignoring further trailing bytes %d",
+ (int)inSize - (int)inPos);
+ break;
+ }
+ }
+}
+
+c2_status_t C2SoftMpeg4Dec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void)pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+ return C2_OK;
+}
+
+class C2SoftMpeg4DecFactory : public C2ComponentFactory {
+public:
+ C2SoftMpeg4DecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftMpeg4Dec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftMpeg4Dec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftMpeg4Dec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftMpeg4Dec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftMpeg4DecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftMpeg4DecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
new file mode 100644
index 0000000..716a095
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_MPEG4_DEC_H_
+#define C2_SOFT_MPEG4_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+struct tagvideoDecControls;
+
+namespace android {
+
+struct C2SoftMpeg4Dec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftMpeg4Dec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftMpeg4Dec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ private:
+ enum {
+ kNumOutputBuffers = 2,
+ };
+
+ status_t initDecoder();
+ c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+ bool handleResChange(const std::unique_ptr<C2Work> &work);
+
+ std::shared_ptr<IntfImpl> mIntf;
+ tagvideoDecControls *mDecHandle;
+ std::shared_ptr<C2GraphicBlock> mOutBlock;
+ uint8_t *mOutputBuffer[kNumOutputBuffers];
+ size_t mOutputBufferSize;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mNumSamplesOutput;
+
+ bool mIsMpeg4;
+ bool mInitialized;
+ bool mFramesConfigured;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+
+ C2_DO_NOT_COPY(C2SoftMpeg4Dec);
+};
+
+} // namespace android
+
+#endif // C2_SOFT_MPEG4_DEC_H_
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
new file mode 100644
index 0000000..c8796f3
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -0,0 +1,671 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#ifdef MPEG4
+ #define LOG_TAG "C2SoftMpeg4Enc"
+#else
+ #define LOG_TAG "C2SoftH263Enc"
+#endif
+#include <log/log.h>
+
+#include <inttypes.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaDefs.h>
+#include <utils/misc.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "C2SoftMpeg4Enc.h"
+#include "mp4enc_api.h"
+
+namespace android {
+
+#ifdef MPEG4
+constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.encoder";
+#else
+constexpr char COMPONENT_NAME[] = "c2.android.h263.encoder";
+#endif
+
+class C2SoftMpeg4Enc::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_VIDEO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+#ifdef MPEG4
+ MEDIA_MIMETYPE_VIDEO_MPEG4
+#else
+ MEDIA_MIMETYPE_VIDEO_H263
+#endif
+ ))
+ .build());
+
+ addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
+ .withDefault(new C2VideoSizeStreamTuning::input(0u, 176, 144))
+ .withFields({
+#ifdef MPEG4
+ C2F(mSize, width).inRange(16, 176, 16),
+ C2F(mSize, height).inRange(16, 144, 16),
+#else
+ C2F(mSize, width).oneOf({176, 352}),
+ C2F(mSize, height).oneOf({144, 288}),
+#endif
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 17.))
+ // TODO: More restriction?
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(
+ Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+
+#ifdef MPEG4
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_MP4V_SIMPLE, LEVEL_MP4V_2))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ PROFILE_MP4V_SIMPLE),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_MP4V_0,
+ C2Config::LEVEL_MP4V_0B,
+ C2Config::LEVEL_MP4V_1,
+ C2Config::LEVEL_MP4V_2})
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+#else
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_H263_BASELINE, LEVEL_H263_45))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ PROFILE_H263_BASELINE),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_H263_10,
+ C2Config::LEVEL_H263_20,
+ C2Config::LEVEL_H263_30,
+ C2Config::LEVEL_H263_40,
+ C2Config::LEVEL_H263_45})
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+#endif
+ }
+
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value <= 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
+ C2P<C2StreamPictureSizeInfo::input> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(
+ bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output> &me) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+#ifdef MPEG4
+ me.set().profile = PROFILE_MP4V_SIMPLE;
+#else
+ me.set().profile = PROFILE_H263_BASELINE;
+#endif
+ }
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+#ifdef MPEG4
+ me.set().level = LEVEL_MP4V_2;
+#else
+ me.set().level = LEVEL_H263_45;
+#endif
+ }
+ return C2R::Ok();
+ }
+
+ // unsafe getters
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+ uint32_t getSyncFramePeriod() const {
+ if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+};
+
+C2SoftMpeg4Enc::C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mHandle(nullptr),
+ mEncParams(nullptr),
+ mStarted(false),
+ mOutBufferSize(524288) {
+}
+
+C2SoftMpeg4Enc::~C2SoftMpeg4Enc() {
+ onRelease();
+}
+
+c2_status_t C2SoftMpeg4Enc::onInit() {
+#ifdef MPEG4
+ mEncodeMode = COMBINE_MODE_WITH_ERR_RES;
+#else
+ mEncodeMode = H263_MODE;
+#endif
+ if (!mHandle) {
+ mHandle = new tagvideoEncControls;
+ }
+
+ if (!mEncParams) {
+ mEncParams = new tagvideoEncOptions;
+ }
+
+ if (!(mEncParams && mHandle)) return C2_NO_MEMORY;
+
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+
+ return initEncoder();
+}
+
+c2_status_t C2SoftMpeg4Enc::onStop() {
+ if (!mStarted) {
+ return C2_OK;
+ }
+ if (mHandle) {
+ (void)PVCleanUpVideoEncoder(mHandle);
+ }
+ mStarted = false;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+ return C2_OK;
+}
+
+void C2SoftMpeg4Enc::onReset() {
+ onStop();
+ initEncoder();
+}
+
+void C2SoftMpeg4Enc::onRelease() {
+ onStop();
+ if (mEncParams) {
+ delete mEncParams;
+ mEncParams = nullptr;
+ }
+ if (mHandle) {
+ delete mHandle;
+ mHandle = nullptr;
+ }
+}
+
+c2_status_t C2SoftMpeg4Enc::onFlush_sm() {
+ return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+c2_status_t C2SoftMpeg4Enc::initEncParams() {
+ if (mHandle) {
+ memset(mHandle, 0, sizeof(tagvideoEncControls));
+ } else return C2_CORRUPTED;
+ if (mEncParams) {
+ memset(mEncParams, 0, sizeof(tagvideoEncOptions));
+ } else return C2_CORRUPTED;
+
+ if (!PVGetDefaultEncOption(mEncParams, 0)) {
+ ALOGE("Failed to get default encoding parameters");
+ return C2_CORRUPTED;
+ }
+
+ if (mFrameRate->value == 0) {
+ ALOGE("Framerate should not be 0");
+ return C2_BAD_VALUE;
+ }
+
+ mEncParams->encMode = mEncodeMode;
+ mEncParams->encWidth[0] = mSize->width;
+ mEncParams->encHeight[0] = mSize->height;
+ mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
+ mEncParams->rcType = VBR_1;
+ mEncParams->vbvDelay = 5.0f;
+ mEncParams->profile_level = CORE_PROFILE_LEVEL2;
+ mEncParams->packetSize = 32;
+ mEncParams->rvlcEnable = PV_OFF;
+ mEncParams->numLayers = 1;
+ mEncParams->timeIncRes = 1000;
+ mEncParams->tickPerSrc = mEncParams->timeIncRes / (mFrameRate->value + 0.5);
+ mEncParams->bitRate[0] = mBitrate->value;
+ mEncParams->iQuant[0] = 15;
+ mEncParams->pQuant[0] = 12;
+ mEncParams->quantType[0] = 0;
+ mEncParams->noFrameSkipped = PV_OFF;
+
+ // PV's MPEG4 encoder requires the video dimension of multiple
+ if (mSize->width % 16 != 0 || mSize->height % 16 != 0) {
+ ALOGE("Video frame size %dx%d must be a multiple of 16",
+ mSize->width, mSize->height);
+ return C2_BAD_VALUE;
+ }
+
+ // Set IDR frame refresh interval
+ mEncParams->intraPeriod = mIntf->getSyncFramePeriod();
+ mEncParams->numIntraMB = 0;
+ mEncParams->sceneDetect = PV_ON;
+ mEncParams->searchRange = 16;
+ mEncParams->mv8x8Enable = PV_OFF;
+ mEncParams->gobHeaderInterval = 0;
+ mEncParams->useACPred = PV_ON;
+ mEncParams->intraDCVlcTh = 0;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftMpeg4Enc::initEncoder() {
+ if (mStarted) {
+ return C2_OK;
+ }
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ }
+ c2_status_t err = initEncParams();
+ if (C2_OK != err) {
+ ALOGE("Failed to initialized encoder params");
+ mSignalledError = true;
+ return err;
+ }
+ if (!PVInitVideoEncoder(mHandle, mEncParams)) {
+ ALOGE("Failed to initialize the encoder");
+ mSignalledError = true;
+ return C2_CORRUPTED;
+ }
+
+ // 1st buffer for codec specific data
+ mNumInputFrames = -1;
+ mStarted = true;
+ return C2_OK;
+}
+
+void C2SoftMpeg4Enc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ // Initialize encoder if not already initialized
+ if (!mStarted && C2_OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ uint8_t *outPtr = (uint8_t *)wView.data();
+ if (mNumInputFrames < 0) {
+ // The very first thing we want to output is the codec specific data.
+ int32_t outputSize = mOutBufferSize;
+ if (!PVGetVolHeader(mHandle, outPtr, &outputSize, 0)) {
+ ALOGE("Failed to get VOL header");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ } else {
+ ALOGV("Bytes Generated in header %d\n", outputSize);
+ }
+
+ ++mNumInputFrames;
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(outputSize, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, outPtr, outputSize);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+ }
+
+ std::shared_ptr<const C2GraphicView> rView;
+ std::shared_ptr<C2Buffer> inputBuffer;
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ rView = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (rView->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", rView->error());
+ work->result = rView->error();
+ return;
+ }
+ } else {
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
+ const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front();
+ if (inBuffer.width() < mSize->width ||
+ inBuffer.height() < mSize->height) {
+ /* Expect width height to be configured */
+ ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", inBuffer.width(),
+ mSize->width, inBuffer.height(), mSize->height);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ const C2PlanarLayout &layout = rView->layout();
+ uint8_t *yPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *uPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *vPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+ // width and height are always even (as block size is 16x16)
+ CHECK_EQ((width & 1u), 0u);
+ CHECK_EQ((height & 1u), 0u);
+ size_t yPlaneSize = width * height;
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ [[fallthrough]];
+ case C2PlanarLayout::TYPE_RGBA: {
+ MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = width;
+ uStride = vStride = width / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *rView.get());
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*rView)) {
+ ALOGE("input is not YUV420");
+ work->result = C2_BAD_VALUE;
+ break;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 1
+ && layout.planes[layout.PLANE_V].colInc == 1
+ && uStride == vStride
+ && yStride == 2 * vStride) {
+ // I420 compatible - planes are already set up above
+ break;
+ }
+
+ // copy to I420
+ MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
+ MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, width, height);
+ status_t err = ImageCopy(conversionBuffer.data(), &img, *rView);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = width;
+ uStride = vStride = width / 2;
+ break;
+ }
+
+ case C2PlanarLayout::TYPE_YUVA:
+ ALOGE("YUVA plane type is not supported");
+ work->result = C2_BAD_VALUE;
+ return;
+
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ CHECK(NULL != yPlane);
+ /* Encode frames */
+ VideoEncFrameIO vin, vout;
+ memset(&vin, 0, sizeof(vin));
+ memset(&vout, 0, sizeof(vout));
+ vin.yChan = yPlane;
+ vin.uChan = uPlane;
+ vin.vChan = vPlane;
+ vin.timestamp = (inputTimeStamp + 500) / 1000; // in ms
+ vin.height = align(height, 16);
+ vin.pitch = align(width, 16);
+
+ uint32_t modTimeMs = 0;
+ int32_t nLayer = 0;
+ MP4HintTrack hintTrack;
+ int32_t outputSize = mOutBufferSize;
+ if (!PVEncodeVideoFrame(mHandle, &vin, &vout, &modTimeMs, outPtr, &outputSize, &nLayer) ||
+ !PVGetHintTrack(mHandle, &hintTrack)) {
+ ALOGE("Failed to encode frame or get hint track at frame %" PRId64, mNumInputFrames);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ ALOGV("outputSize filled : %d", outputSize);
+ ++mNumInputFrames;
+ CHECK(NULL == PVGetOverrunBuffer(mHandle));
+
+ fillEmptyWork(work);
+ if (outputSize) {
+ std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outputSize);
+ work->worklets.front()->output.ordinal.timestamp = inputTimeStamp;
+ if (hintTrack.CodeType == 0) {
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ }
+ if (eos) {
+ mSignalledOutputEos = true;
+ }
+
+ mConversionBuffersInUse.erase(yPlane);
+}
+
+c2_status_t C2SoftMpeg4Enc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void)pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+class C2SoftMpeg4EncFactory : public C2ComponentFactory {
+public:
+ C2SoftMpeg4EncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftMpeg4Enc(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftMpeg4Enc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftMpeg4Enc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftMpeg4Enc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftMpeg4EncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftMpeg4EncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
new file mode 100644
index 0000000..43461fc
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_MPEG4_ENC_H__
+#define C2_SOFT_MPEG4_ENC_H__
+
+#include <map>
+
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Component.h>
+
+#include "mp4enc_api.h"
+
+namespace android {
+
+struct C2SoftMpeg4Enc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+protected:
+
+ virtual ~C2SoftMpeg4Enc();
+
+private:
+ std::shared_ptr<IntfImpl> mIntf;
+
+ tagvideoEncControls *mHandle;
+ tagvideoEncOptions *mEncParams;
+
+ bool mStarted;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+
+ uint32_t mOutBufferSize;
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+
+ int64_t mNumInputFrames;
+ MP4EncodingMode mEncodeMode;
+
+ MemoryBlockPool mConversionBuffers;
+ std::map<void *, MemoryBlock> mConversionBuffersInUse;
+
+ c2_status_t initEncParams();
+ c2_status_t initEncoder();
+
+ C2_DO_NOT_COPY(C2SoftMpeg4Enc);
+};
+
+} // namespace android
+
+#endif // C2_SOFT_MPEG4_ENC_H__
diff --git a/media/codec2/components/mpeg4_h263/MODULE_LICENSE_APACHE2 b/media/codec2/components/mpeg4_h263/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/mpeg4_h263/NOTICE b/media/codec2/components/mpeg4_h263/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/mpeg4_h263/patent_disclaimer.txt b/media/codec2/components/mpeg4_h263/patent_disclaimer.txt
new file mode 100644
index 0000000..b4bf11d
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/patent_disclaimer.txt
@@ -0,0 +1,9 @@
+
+THIS IS NOT A GRANT OF PATENT RIGHTS.
+
+Google makes no representation or warranty that the codecs for which
+source code is made available hereunder are unencumbered by
+third-party patents. Those intending to use this source code in
+hardware or software products are advised that implementations of
+these codecs, including in open source software or shareware, may
+require patent licenses from the relevant patent holders.
diff --git a/media/codec2/components/opus/Android.bp b/media/codec2/components/opus/Android.bp
new file mode 100644
index 0000000..a6233a6
--- /dev/null
+++ b/media/codec2/components/opus/Android.bp
@@ -0,0 +1,11 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2opusdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftOpusDec.cpp"],
+
+ shared_libs: ["libopus"],
+}
diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp
new file mode 100644
index 0000000..2439c3c
--- /dev/null
+++ b/media/codec2/components/opus/C2SoftOpusDec.cpp
@@ -0,0 +1,544 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftOpusDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftOpusDec.h"
+
+extern "C" {
+ #include <opus.h>
+ #include <opus_multistream.h>
+}
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.opus.decoder";
+
+class C2SoftOpusDec::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_OPUS))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 48000))
+ .withFields({C2F(mSampleRate, value).equalTo(48000)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 6000))
+ .withFields({C2F(mBitrate, value).inRange(6000, 510000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 960 * 6))
+ .build());
+ }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftOpusDec::C2SoftOpusDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecoder(nullptr) {
+}
+
+C2SoftOpusDec::~C2SoftOpusDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftOpusDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_NO_MEMORY;
+}
+
+c2_status_t C2SoftOpusDec::onStop() {
+ if (mDecoder) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = nullptr;
+ }
+ memset(&mHeader, 0, sizeof(mHeader));
+ mCodecDelay = 0;
+ mSeekPreRoll = 0;
+ mSamplesToDiscard = 0;
+ mInputBufferCount = 0;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ return C2_OK;
+}
+
+void C2SoftOpusDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftOpusDec::onRelease() {
+ if (mDecoder) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = nullptr;
+ }
+}
+
+status_t C2SoftOpusDec::initDecoder() {
+ memset(&mHeader, 0, sizeof(mHeader));
+ mCodecDelay = 0;
+ mSeekPreRoll = 0;
+ mSamplesToDiscard = 0;
+ mInputBufferCount = 0;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ return OK;
+}
+
+c2_status_t C2SoftOpusDec::onFlush_sm() {
+ if (mDecoder) {
+ opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
+ mSamplesToDiscard = mSeekPreRoll;
+ mSignalledOutputEos = false;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftOpusDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+static uint16_t ReadLE16(const uint8_t *data, size_t data_size,
+ uint32_t read_offset) {
+ if (read_offset + 1 > data_size)
+ return 0;
+ uint16_t val;
+ val = data[read_offset];
+ val |= data[read_offset + 1] << 8;
+ return val;
+}
+
+static const int kRate = 48000;
+
+// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
+// mappings for up to 8 channels. This information is part of the Vorbis I
+// Specification:
+// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
+static const int kMaxChannels = 8;
+
+// Maximum packet size used in Xiph's opusdec.
+static const int kMaxOpusOutputPacketSizeSamples = 960 * 6;
+
+// Default audio output channel layout. Used to initialize |stream_map| in
+// OpusHeader, and passed to opus_multistream_decoder_create() when the header
+// does not contain mapping information. The values are valid only for mono and
+// stereo output: Opus streams with more than 2 channels require a stream map.
+static const int kMaxChannelsWithDefaultLayout = 2;
+static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 };
+
+// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header
+static bool ParseOpusHeader(const uint8_t *data, size_t data_size,
+ OpusHeader* header) {
+ // Size of the Opus header excluding optional mapping information.
+ const size_t kOpusHeaderSize = 19;
+
+ // Offset to the channel count byte in the Opus header.
+ const size_t kOpusHeaderChannelsOffset = 9;
+
+ // Offset to the pre-skip value in the Opus header.
+ const size_t kOpusHeaderSkipSamplesOffset = 10;
+
+ // Offset to the gain value in the Opus header.
+ const size_t kOpusHeaderGainOffset = 16;
+
+ // Offset to the channel mapping byte in the Opus header.
+ const size_t kOpusHeaderChannelMappingOffset = 18;
+
+ // Opus Header contains a stream map. The mapping values are in the header
+ // beyond the always present |kOpusHeaderSize| bytes of data. The mapping
+ // data contains stream count, coupling information, and per channel mapping
+ // values:
+ // - Byte 0: Number of streams.
+ // - Byte 1: Number coupled.
+ // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping
+ // values.
+ const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize;
+ const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1;
+ const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2;
+
+ if (data_size < kOpusHeaderSize) {
+ ALOGE("Header size is too small.");
+ return false;
+ }
+ header->channels = *(data + kOpusHeaderChannelsOffset);
+ if (header->channels <= 0 || header->channels > kMaxChannels) {
+ ALOGE("Invalid Header, wrong channel count: %d", header->channels);
+ return false;
+ }
+
+ header->skip_samples = ReadLE16(data,
+ data_size,
+ kOpusHeaderSkipSamplesOffset);
+
+ header->gain_db = static_cast<int16_t>(ReadLE16(data,
+ data_size,
+ kOpusHeaderGainOffset));
+
+ header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset);
+ if (!header->channel_mapping) {
+ if (header->channels > kMaxChannelsWithDefaultLayout) {
+ ALOGE("Invalid Header, missing stream map.");
+ return false;
+ }
+ header->num_streams = 1;
+ header->num_coupled = header->channels > 1;
+ header->stream_map[0] = 0;
+ header->stream_map[1] = 1;
+ return true;
+ }
+ if (data_size < kOpusHeaderStreamMapOffset + header->channels) {
+ ALOGE("Invalid stream map; insufficient data for current channel "
+ "count: %d", header->channels);
+ return false;
+ }
+ header->num_streams = *(data + kOpusHeaderNumStreamsOffset);
+ header->num_coupled = *(data + kOpusHeaderNumCoupledOffset);
+ if (header->num_streams + header->num_coupled != header->channels) {
+ ALOGE("Inconsistent channel mapping.");
+ return false;
+ }
+ for (int i = 0; i < header->channels; ++i)
+ header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i);
+ return true;
+}
+
+// Convert nanoseconds to number of samples.
+static uint64_t ns_to_samples(uint64_t ns, int rate) {
+ return static_cast<double>(ns) * rate / 1000000000;
+}
+
+void C2SoftOpusDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ if (inSize == 0) {
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+ const uint8_t *data = rView.data() + inOffset;
+ if (mInputBufferCount < 3) {
+ if (mInputBufferCount == 0) {
+ if (!ParseOpusHeader(data, inSize, &mHeader)) {
+ ALOGE("Encountered error while Parsing Opus Header.");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ uint8_t channel_mapping[kMaxChannels] = {0};
+ if (mHeader.channels <= kMaxChannelsWithDefaultLayout) {
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+ } else {
+ memcpy(&channel_mapping,
+ mHeader.stream_map,
+ mHeader.channels);
+ }
+ int status = OPUS_INVALID_STATE;
+ mDecoder = opus_multistream_decoder_create(kRate,
+ mHeader.channels,
+ mHeader.num_streams,
+ mHeader.num_coupled,
+ channel_mapping,
+ &status);
+ if (!mDecoder || status != OPUS_OK) {
+ ALOGE("opus_multistream_decoder_create failed status = %s",
+ opus_strerror(status));
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ status = opus_multistream_decoder_ctl(mDecoder,
+ OPUS_SET_GAIN(mHeader.gain_db));
+ if (status != OPUS_OK) {
+ ALOGE("Failed to set OPUS header gain; status = %s",
+ opus_strerror(status));
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } else {
+ if (inSize < 8) {
+ ALOGE("Input sample size is too small.");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ int64_t samples = ns_to_samples( *(reinterpret_cast<int64_t*>
+ (const_cast<uint8_t *> (data))), kRate);
+ if (mInputBufferCount == 1) {
+ mCodecDelay = samples;
+ mSamplesToDiscard = mCodecDelay;
+ }
+ else {
+ mSeekPreRoll = samples;
+
+ ALOGI("Configuring decoder: %d Hz, %d channels",
+ kRate, mHeader.channels);
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, kRate);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, mHeader.channels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config(
+ { &sampleRateInfo, &channelCountInfo },
+ C2_MAY_BLOCK,
+ &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
+ ++mInputBufferCount;
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ // Ignore CSD re-submissions.
+ if ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
+ fillEmptyWork(work);
+ return;
+ }
+
+ // When seeking to zero, |mCodecDelay| samples has to be discarded
+ // instead of |mSeekPreRoll| samples (as we would when seeking to any
+ // other timestamp).
+ if (work->input.ordinal.timestamp.peeku() == 0) mSamplesToDiscard = mCodecDelay;
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(
+ kMaxNumSamplesPerBuffer * kMaxChannels * sizeof(int16_t),
+ usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ int numSamples = opus_multistream_decode(mDecoder,
+ data,
+ inSize,
+ reinterpret_cast<int16_t *> (wView.data()),
+ kMaxOpusOutputPacketSizeSamples,
+ 0);
+ if (numSamples < 0) {
+ ALOGE("opus_multistream_decode returned numSamples %d", numSamples);
+ numSamples = 0;
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ int outOffset = 0;
+ if (mSamplesToDiscard > 0) {
+ if (mSamplesToDiscard > numSamples) {
+ mSamplesToDiscard -= numSamples;
+ numSamples = 0;
+ } else {
+ numSamples -= mSamplesToDiscard;
+ outOffset = mSamplesToDiscard * sizeof(int16_t) * mHeader.channels;
+ mSamplesToDiscard = 0;
+ }
+ }
+
+ if (numSamples) {
+ int outSize = numSamples * sizeof(int16_t) * mHeader.channels;
+ ALOGV("out buffer attr. offset %d size %d ", outOffset, outSize);
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, outOffset, outSize));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ } else {
+ fillEmptyWork(work);
+ block.reset();
+ }
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+class C2SoftOpusDecFactory : public C2ComponentFactory {
+public:
+ C2SoftOpusDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftOpusDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftOpusDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftOpusDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftOpusDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftOpusDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftOpusDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/opus/C2SoftOpusDec.h b/media/codec2/components/opus/C2SoftOpusDec.h
new file mode 100644
index 0000000..92b7426
--- /dev/null
+++ b/media/codec2/components/opus/C2SoftOpusDec.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_OPUS_DEC_H_
+#define ANDROID_C2_SOFT_OPUS_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+struct OpusMSDecoder;
+
+namespace android {
+
+struct OpusHeader {
+ int channels;
+ int skip_samples;
+ int channel_mapping;
+ int num_streams;
+ int num_coupled;
+ int16_t gain_db;
+ uint8_t stream_map[8];
+};
+
+struct C2SoftOpusDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftOpusDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftOpusDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+private:
+ enum {
+ kMaxNumSamplesPerBuffer = 960 * 6
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ OpusMSDecoder *mDecoder;
+ OpusHeader mHeader;
+
+ int64_t mCodecDelay;
+ int64_t mSeekPreRoll;
+ int64_t mSamplesToDiscard;
+ size_t mInputBufferCount;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+
+ status_t initDecoder();
+
+ C2_DO_NOT_COPY(C2SoftOpusDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_OPUS_DEC_H_
diff --git a/media/codec2/components/raw/Android.bp b/media/codec2/components/raw/Android.bp
new file mode 100644
index 0000000..150eb91
--- /dev/null
+++ b/media/codec2/components/raw/Android.bp
@@ -0,0 +1,9 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2rawdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftRawDec.cpp"],
+}
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
new file mode 100644
index 0000000..8d2a652
--- /dev/null
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftRawDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftRawDec.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.raw.decoder";
+
+class C2SoftRawDec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
+ .withFields({C2F(mSampleRate, value).inRange(8000, 192000)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 2))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(1, 10000000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 64 * 1024))
+ .build());
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftRawDec::C2SoftRawDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl) {
+}
+
+C2SoftRawDec::~C2SoftRawDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftRawDec::onInit() {
+ mSignalledEos = false;
+ return C2_OK;
+}
+
+c2_status_t C2SoftRawDec::onStop() {
+ mSignalledEos = false;
+ return C2_OK;
+}
+
+void C2SoftRawDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftRawDec::onRelease() {
+}
+
+c2_status_t C2SoftRawDec::onFlush_sm() {
+ return onStop();
+}
+
+void C2SoftRawDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void)pool;
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+
+ if (mSignalledEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ ALOGV("in buffer attr. timestamp %d frameindex %d",
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ if (!work->input.buffers.empty()) {
+ work->worklets.front()->output.buffers.push_back(work->input.buffers[0]);
+ }
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ mSignalledEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+c2_status_t C2SoftRawDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+class C2SoftRawDecFactory : public C2ComponentFactory {
+public:
+ C2SoftRawDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftRawDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftRawDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftRawDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftRawDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftRawDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftRawDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/raw/C2SoftRawDec.h b/media/codec2/components/raw/C2SoftRawDec.h
new file mode 100644
index 0000000..7dfdec5
--- /dev/null
+++ b/media/codec2/components/raw/C2SoftRawDec.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_RAW_DEC_H_
+#define ANDROID_C2_SOFT_RAW_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+namespace android {
+
+struct C2SoftRawDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftRawDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftRawDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+private:
+ std::shared_ptr<IntfImpl> mIntf;
+ bool mSignalledEos;
+
+ C2_DO_NOT_COPY(C2SoftRawDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_RAW_DEC_H_
diff --git a/media/codec2/components/raw/MODULE_LICENSE_APACHE2 b/media/codec2/components/raw/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/raw/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/raw/NOTICE b/media/codec2/components/raw/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/raw/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/vorbis/Android.bp b/media/codec2/components/vorbis/Android.bp
new file mode 100644
index 0000000..7477da6
--- /dev/null
+++ b/media/codec2/components/vorbis/Android.bp
@@ -0,0 +1,11 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2vorbisdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftVorbisDec.cpp"],
+
+ shared_libs: ["libvorbisidec"],
+}
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
new file mode 100644
index 0000000..280ae36
--- /dev/null
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -0,0 +1,493 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftVorbisDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftVorbisDec.h"
+
+extern "C" {
+ #include <Tremolo/codec_internal.h>
+
+ int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb);
+ int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb);
+ int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb);
+}
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.vorbis.decoder";
+
+class C2SoftVorbisDec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_VORBIS))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 48000))
+ .withFields({C2F(mSampleRate, value).inRange(8000, 96000)})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(32000, 500000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192 * 2 * sizeof(int16_t)))
+ .build());
+ }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftVorbisDec::C2SoftVorbisDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mState(nullptr),
+ mVi(nullptr) {
+}
+
+C2SoftVorbisDec::~C2SoftVorbisDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftVorbisDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_NO_MEMORY;
+}
+
+c2_status_t C2SoftVorbisDec::onStop() {
+ if (mState) {
+ vorbis_dsp_clear(mState);
+ delete mState;
+ mState = nullptr;
+ }
+
+ if (mVi) {
+ vorbis_info_clear(mVi);
+ delete mVi;
+ mVi = nullptr;
+ }
+ mNumFramesLeftOnPage = -1;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+
+ return (initDecoder() == OK ? C2_OK : C2_CORRUPTED);
+}
+
+void C2SoftVorbisDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftVorbisDec::onRelease() {
+ if (mState) {
+ vorbis_dsp_clear(mState);
+ delete mState;
+ mState = nullptr;
+ }
+
+ if (mVi) {
+ vorbis_info_clear(mVi);
+ delete mVi;
+ mVi = nullptr;
+ }
+}
+
+status_t C2SoftVorbisDec::initDecoder() {
+ mVi = new vorbis_info{};
+ if (!mVi) return NO_MEMORY;
+ vorbis_info_clear(mVi);
+
+ mState = new vorbis_dsp_state{};
+ if (!mState) return NO_MEMORY;
+ vorbis_dsp_clear(mState);
+
+ mNumFramesLeftOnPage = -1;
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ mInfoUnpacked = false;
+ mBooksUnpacked = false;
+ return OK;
+}
+
+c2_status_t C2SoftVorbisDec::onFlush_sm() {
+ mNumFramesLeftOnPage = -1;
+ mSignalledOutputEos = false;
+ if (mState) vorbis_dsp_restart(mState);
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftVorbisDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void) pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+static void makeBitReader(
+ const void *data, size_t size,
+ ogg_buffer *buf, ogg_reference *ref, oggpack_buffer *bits) {
+ buf->data = (uint8_t *)data;
+ buf->size = size;
+ buf->refcount = 1;
+ buf->ptr.owner = nullptr;
+
+ ref->buffer = buf;
+ ref->begin = 0;
+ ref->length = size;
+ ref->next = nullptr;
+
+ oggpack_readinit(bits, ref);
+}
+
+// (CHECK!) multiframe is tricky. decode call doesnt return the number of bytes
+// consumed by the component. Also it is unclear why numPageFrames is being
+// tagged at the end of input buffers for new pages. Refer lines 297-300 in
+// SimpleDecodingSource.cpp
+void C2SoftVorbisDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+
+ if (inSize == 0) {
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize,
+ (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku());
+ const uint8_t *data = rView.data() + inOffset;
+ int32_t numChannels = mVi->channels;
+ int32_t samplingRate = mVi->rate;
+ if (inSize > 7 && !memcmp(&data[1], "vorbis", 6)) {
+ if ((data[0] != 1) && (data[0] != 5)) {
+ ALOGE("unexpected type received %d", data[0]);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ ogg_buffer buf;
+ ogg_reference ref;
+ oggpack_buffer bits;
+
+ // skip 7 <type + "vorbis"> bytes
+ makeBitReader((const uint8_t *)data + 7, inSize - 7, &buf, &ref, &bits);
+ if (data[0] == 1) {
+ vorbis_info_init(mVi);
+ if (0 != _vorbis_unpack_info(mVi, &bits)) {
+ ALOGE("Encountered error while unpacking info");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ if (mVi->rate != samplingRate ||
+ mVi->channels != numChannels) {
+ ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels);
+ samplingRate = mVi->rate;
+ numChannels = mVi->channels;
+
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, samplingRate);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, numChannels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config(
+ { &sampleRateInfo, &channelCountInfo },
+ C2_MAY_BLOCK,
+ &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ mInfoUnpacked = true;
+ } else {
+ if (!mInfoUnpacked) {
+ ALOGE("Data with type:5 sent before sending type:1");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ if (0 != _vorbis_unpack_books(mVi, &bits)) {
+ ALOGE("Encountered error while unpacking books");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ if (0 != vorbis_dsp_init(mState, mVi)) {
+ ALOGE("Encountered error while dsp init");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ mBooksUnpacked = true;
+ }
+ fillEmptyWork(work);
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ return;
+ }
+
+ if (!mInfoUnpacked || !mBooksUnpacked) {
+ ALOGE("Missing CODEC_CONFIG data mInfoUnpacked: %d mBooksUnpack %d", mInfoUnpacked, mBooksUnpacked);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ int32_t numPageFrames = 0;
+ if (inSize < sizeof(numPageFrames)) {
+ ALOGE("input header has size %zu, expected %zu", inSize, sizeof(numPageFrames));
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ memcpy(&numPageFrames, data + inSize - sizeof(numPageFrames), sizeof(numPageFrames));
+ inSize -= sizeof(numPageFrames);
+ if (numPageFrames >= 0) {
+ mNumFramesLeftOnPage = numPageFrames;
+ }
+
+ ogg_buffer buf;
+ buf.data = const_cast<unsigned char*>(data);
+ buf.size = inSize;
+ buf.refcount = 1;
+ buf.ptr.owner = nullptr;
+
+ ogg_reference ref;
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = buf.size;
+ ref.next = nullptr;
+
+ ogg_packet pack;
+ pack.packet = &ref;
+ pack.bytes = ref.length;
+ pack.b_o_s = 0;
+ pack.e_o_s = 0;
+ pack.granulepos = 0;
+ pack.packetno = 0;
+
+ size_t maxSamplesInBuffer = kMaxNumSamplesPerChannel * mVi->channels;
+ size_t outCapacity = maxSamplesInBuffer * sizeof(int16_t);
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ int numFrames = 0;
+ int ret = vorbis_dsp_synthesis(mState, &pack, 1);
+ if (0 != ret) {
+ ALOGE("vorbis_dsp_synthesis returned %d", ret);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ } else {
+ numFrames = vorbis_dsp_pcmout(
+ mState, reinterpret_cast<int16_t *> (wView.data()),
+ kMaxNumSamplesPerChannel);
+ if (numFrames < 0) {
+ ALOGD("vorbis_dsp_pcmout returned %d", numFrames);
+ numFrames = 0;
+ }
+ }
+
+ if (mNumFramesLeftOnPage >= 0) {
+ if (numFrames > mNumFramesLeftOnPage) {
+ ALOGV("discarding %d frames at end of page", numFrames - mNumFramesLeftOnPage);
+ numFrames = mNumFramesLeftOnPage;
+ }
+ mNumFramesLeftOnPage -= numFrames;
+ }
+
+ if (numFrames) {
+ int outSize = numFrames * sizeof(int16_t) * mVi->channels;
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ } else {
+ fillEmptyWork(work);
+ block.reset();
+ }
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+}
+
+class C2SoftVorbisDecFactory : public C2ComponentFactory {
+public:
+ C2SoftVorbisDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftVorbisDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftVorbisDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftVorbisDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftVorbisDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftVorbisDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftVorbisDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.h b/media/codec2/components/vorbis/C2SoftVorbisDec.h
new file mode 100644
index 0000000..3bf7326
--- /dev/null
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_VORBIS_DEC_H_
+#define ANDROID_C2_SOFT_VORBIS_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+struct vorbis_dsp_state;
+struct vorbis_info;
+
+namespace android {
+
+struct C2SoftVorbisDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftVorbisDec(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftVorbisDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+ private:
+ enum {
+ kMaxNumSamplesPerChannel = 8192,
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ vorbis_dsp_state *mState;
+ vorbis_info *mVi;
+
+ int32_t mNumFramesLeftOnPage;
+ bool mSignalledError;
+ bool mSignalledOutputEos;
+ bool mInfoUnpacked;
+ bool mBooksUnpacked;
+ status_t initDecoder();
+
+ C2_DO_NOT_COPY(C2SoftVorbisDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_VORBIS_DEC_H_
+
diff --git a/media/codec2/components/vorbis/MODULE_LICENSE_APACHE2 b/media/codec2/components/vorbis/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/vorbis/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/vorbis/NOTICE b/media/codec2/components/vorbis/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/codec2/components/vorbis/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp
new file mode 100644
index 0000000..c09f365
--- /dev/null
+++ b/media/codec2/components/vpx/Android.bp
@@ -0,0 +1,60 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2vp9dec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftVpxDec.cpp"],
+
+ static_libs: ["libvpx"],
+
+ cflags: [
+ "-DVP9",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2vp8dec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftVpxDec.cpp"],
+
+ static_libs: ["libvpx"],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2vp9enc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: [
+ "C2SoftVp9Enc.cpp",
+ "C2SoftVpxEnc.cpp",
+ ],
+
+ static_libs: ["libvpx"],
+
+ cflags: ["-DVP9"],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2vp8enc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: [
+ "C2SoftVp8Enc.cpp",
+ "C2SoftVpxEnc.cpp",
+ ],
+
+ static_libs: ["libvpx"],
+}
+
diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.cpp b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
new file mode 100644
index 0000000..0ae717a
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftVp8Enc"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include "C2SoftVp8Enc.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.vp8.encoder";
+
+C2SoftVp8Enc::C2SoftVp8Enc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : C2SoftVpxEnc(name, id, intfImpl), mDCTPartitions(0), mProfile(1) {}
+
+void C2SoftVp8Enc::setCodecSpecificInterface() {
+ mCodecInterface = vpx_codec_vp8_cx();
+}
+
+void C2SoftVp8Enc::setCodecSpecificConfiguration() {
+ switch (mProfile) {
+ case 1:
+ mCodecConfiguration->g_profile = 0;
+ break;
+
+ case 2:
+ mCodecConfiguration->g_profile = 1;
+ break;
+
+ case 4:
+ mCodecConfiguration->g_profile = 2;
+ break;
+
+ case 8:
+ mCodecConfiguration->g_profile = 3;
+ break;
+
+ default:
+ mCodecConfiguration->g_profile = 0;
+ }
+}
+
+vpx_codec_err_t C2SoftVp8Enc::setCodecSpecificControls() {
+ vpx_codec_err_t codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_TOKEN_PARTITIONS,
+ mDCTPartitions);
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting dct partitions for vpx encoder.");
+ }
+ return codec_return;
+}
+
+class C2SoftVp8EncFactory : public C2ComponentFactory {
+public:
+ C2SoftVp8EncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftVp8Enc(COMPONENT_NAME, id,
+ std::make_shared<C2SoftVpxEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftVpxEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftVpxEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftVp8EncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftVp8EncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.h b/media/codec2/components/vpx/C2SoftVp8Enc.h
new file mode 100644
index 0000000..ed6f356
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVp8Enc.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_VP8_ENC_H__
+#define ANDROID_C2_SOFT_VP8_ENC_H__
+
+#include "C2SoftVpxEnc.h"
+
+namespace android {
+
+// Exposes vp8 encoder as a c2 Component
+//
+// In addition to the base class settings, Only following encoder settings are
+// available:
+// - token partitioning
+struct C2SoftVp8Enc : public C2SoftVpxEnc {
+ C2SoftVp8Enc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+
+ protected:
+ // Populates |mCodecInterface| with codec specific settings.
+ virtual void setCodecSpecificInterface();
+
+ // Sets codec specific configuration.
+ virtual void setCodecSpecificConfiguration();
+
+ // Initializes codec specific encoder settings.
+ virtual vpx_codec_err_t setCodecSpecificControls();
+
+ private:
+ // Max value supported for DCT partitions
+ static const uint32_t kMaxDCTPartitions = 3;
+
+ // vp8 specific configuration parameter
+ // that enables token partitioning of
+ // the stream into substreams
+ int32_t mDCTPartitions;
+
+ // C2 Profile parameter
+ int32_t mProfile;
+
+ C2_DO_NOT_COPY(C2SoftVp8Enc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_VP8_ENC_H__
diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.cpp b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
new file mode 100644
index 0000000..b26170f
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftVp9Enc"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include "C2SoftVp9Enc.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.vp9.encoder";
+
+C2SoftVp9Enc::C2SoftVp9Enc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : C2SoftVpxEnc(name, id, intfImpl),
+ mProfile(1),
+ mLevel(0),
+ mTileColumns(0),
+ mFrameParallelDecoding(false) {
+}
+
+void C2SoftVp9Enc::setCodecSpecificInterface() {
+ mCodecInterface = vpx_codec_vp9_cx();
+}
+
+void C2SoftVp9Enc::setCodecSpecificConfiguration() {
+ switch (mProfile) {
+ case 1:
+ mCodecConfiguration->g_profile = 0;
+ break;
+
+ case 2:
+ mCodecConfiguration->g_profile = 1;
+ break;
+
+ case 4:
+ mCodecConfiguration->g_profile = 2;
+ break;
+
+ case 8:
+ mCodecConfiguration->g_profile = 3;
+ break;
+
+ default:
+ mCodecConfiguration->g_profile = 0;
+ }
+}
+
+vpx_codec_err_t C2SoftVp9Enc::setCodecSpecificControls() {
+ vpx_codec_err_t codecReturn = vpx_codec_control(
+ mCodecContext, VP9E_SET_TILE_COLUMNS, mTileColumns);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP9E_SET_TILE_COLUMNS to %d. vpx_codec_control() "
+ "returned %d", mTileColumns, codecReturn);
+ return codecReturn;
+ }
+ codecReturn = vpx_codec_control(
+ mCodecContext, VP9E_SET_FRAME_PARALLEL_DECODING,
+ mFrameParallelDecoding);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP9E_SET_FRAME_PARALLEL_DECODING to %d."
+ "vpx_codec_control() returned %d", mFrameParallelDecoding,
+ codecReturn);
+ return codecReturn;
+ }
+ codecReturn = vpx_codec_control(mCodecContext, VP9E_SET_ROW_MT, 1);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP9E_SET_ROW_MT to 1. vpx_codec_control() "
+ "returned %d", codecReturn);
+ return codecReturn;
+ }
+
+ // For VP9, we always set CPU_USED to 8 (because the realtime default is 0
+ // which is too slow).
+ codecReturn = vpx_codec_control(mCodecContext, VP8E_SET_CPUUSED, 8);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP8E_SET_CPUUSED to 8. vpx_codec_control() "
+ "returned %d", codecReturn);
+ return codecReturn;
+ }
+ return codecReturn;
+}
+
+class C2SoftVp9EncFactory : public C2ComponentFactory {
+public:
+ C2SoftVp9EncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftVp9Enc(COMPONENT_NAME, id,
+ std::make_shared<C2SoftVpxEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftVpxEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftVpxEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftVp9EncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftVp9EncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.h b/media/codec2/components/vpx/C2SoftVp9Enc.h
new file mode 100644
index 0000000..77ef8fd
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVp9Enc.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_VP9_ENC_H__
+#define ANDROID_C2_SOFT_VP9_ENC_H__
+
+#include "C2SoftVpxEnc.h"
+
+namespace android {
+
+// Exposes vp9 encoder as a c2 Component
+//
+// In addition to the base class settings, Only following encoder settings are
+// available:
+// - tile rows
+// - tile columns
+// - frame parallel mode
+struct C2SoftVp9Enc : public C2SoftVpxEnc {
+ C2SoftVp9Enc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+
+ protected:
+ // Populates |mCodecInterface| with codec specific settings.
+ virtual void setCodecSpecificInterface();
+
+ // Sets codec specific configuration.
+ virtual void setCodecSpecificConfiguration();
+
+ // Initializes codec specific encoder settings.
+ virtual vpx_codec_err_t setCodecSpecificControls();
+
+ private:
+ // C2 Profile & Level parameter
+ int32_t mProfile;
+ int32_t mLevel __unused;
+
+ int32_t mTileColumns;
+
+ bool mFrameParallelDecoding;
+
+ C2_DO_NOT_COPY(C2SoftVp9Enc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_VP9_ENC_H__
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
new file mode 100644
index 0000000..01de681
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -0,0 +1,640 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftVpxDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftVpxDec.h"
+
+namespace android {
+
+#ifdef VP9
+constexpr char COMPONENT_NAME[] = "c2.android.vp9.decoder";
+#else
+constexpr char COMPONENT_NAME[] = "c2.android.vp8.decoder";
+#endif
+
+class C2SoftVpxDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO,
+#ifdef VP9
+ MEDIA_MIMETYPE_VIDEO_VP9
+#else
+ MEDIA_MIMETYPE_VIDEO_VP8
+#endif
+ ) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ // TODO: output latency and reordering
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+#ifdef VP9
+ // TODO: Add C2Config::PROFILE_VP9_2HDR ??
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_VP9_0, C2Config::LEVEL_VP9_5))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_VP9_0,
+ C2Config::PROFILE_VP9_2}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_VP9_1,
+ C2Config::LEVEL_VP9_1_1,
+ C2Config::LEVEL_VP9_2,
+ C2Config::LEVEL_VP9_2_1,
+ C2Config::LEVEL_VP9_3,
+ C2Config::LEVEL_VP9_3_1,
+ C2Config::LEVEL_VP9_4,
+ C2Config::LEVEL_VP9_4_1,
+ C2Config::LEVEL_VP9_5,
+ })
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+#if 0
+ // sample BT.2020 static info
+ mHdrStaticInfo = std::make_shared<C2StreamHdrStaticInfo::output>();
+ mHdrStaticInfo->mastering = {
+ .red = { .x = 0.708, .y = 0.292 },
+ .green = { .x = 0.170, .y = 0.797 },
+ .blue = { .x = 0.131, .y = 0.046 },
+ .white = { .x = 0.3127, .y = 0.3290 },
+ .maxLuminance = 1000,
+ .minLuminance = 0.1,
+ };
+ mHdrStaticInfo->maxCll = 1000;
+ mHdrStaticInfo->maxFall = 120;
+
+ mHdrStaticInfo->maxLuminance = 0; // disable static info
+
+ helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+ addParameter(
+ DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+ .withDefault(mHdrStaticInfo)
+ .withFields({
+ C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+ // TODO
+ })
+ .withSetter(HdrStaticInfoSetter)
+ .build());
+#endif
+#else
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withConstValue(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED))
+ .build());
+#endif
+
+ addParameter(
+ DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() };
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ { C2ChromaOffsetStruct::ITU_YUV_420_0() },
+ 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(
+ DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+ C2P<C2VideoSizeStreamInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs. hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 2048u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 2048u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 2
+ me.set().value = (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072);
+ return C2R::Ok();
+ }
+
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+#ifdef VP9
+#if 0
+ std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+#endif
+#endif
+};
+
+C2SoftVpxDec::C2SoftVpxDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mCodecCtx(nullptr) {
+}
+
+C2SoftVpxDec::~C2SoftVpxDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftVpxDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftVpxDec::onStop() {
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ return C2_OK;
+}
+
+void C2SoftVpxDec::onReset() {
+ (void)onStop();
+ c2_status_t err = onFlush_sm();
+ if (err != C2_OK)
+ {
+ ALOGW("Failed to flush decoder. Try to hard reset decoder");
+ destroyDecoder();
+ (void)initDecoder();
+ }
+}
+
+void C2SoftVpxDec::onRelease() {
+ destroyDecoder();
+}
+
+c2_status_t C2SoftVpxDec::onFlush_sm() {
+ if (mFrameParallelMode) {
+ // Flush decoder by passing nullptr data ptr and 0 size.
+ // Ideally, this should never fail.
+ if (vpx_codec_decode(mCodecCtx, nullptr, 0, nullptr, 0)) {
+ ALOGE("Failed to flush on2 decoder.");
+ return C2_CORRUPTED;
+ }
+ }
+
+ // Drop all the decoded frames in decoder.
+ vpx_codec_iter_t iter = nullptr;
+ while (vpx_codec_get_frame(mCodecCtx, &iter)) {
+ }
+
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+ int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %d", cpuCoreCount);
+ return cpuCoreCount;
+}
+
+status_t C2SoftVpxDec::initDecoder() {
+#ifdef VP9
+ mMode = MODE_VP9;
+#else
+ mMode = MODE_VP8;
+#endif
+
+ mWidth = 320;
+ mHeight = 240;
+ mFrameParallelMode = false;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+
+ if (!mCodecCtx) {
+ mCodecCtx = new vpx_codec_ctx_t;
+ }
+ if (!mCodecCtx) {
+ ALOGE("mCodecCtx is null");
+ return NO_MEMORY;
+ }
+
+ vpx_codec_dec_cfg_t cfg;
+ memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
+ cfg.threads = GetCPUCoreCount();
+
+ vpx_codec_flags_t flags;
+ memset(&flags, 0, sizeof(vpx_codec_flags_t));
+ if (mFrameParallelMode) flags |= VPX_CODEC_USE_FRAME_THREADING;
+
+ vpx_codec_err_t vpx_err;
+ if ((vpx_err = vpx_codec_dec_init(
+ mCodecCtx, mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo,
+ &cfg, flags))) {
+ ALOGE("on2 decoder failed to initialize. (%d)", vpx_err);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftVpxDec::destroyDecoder() {
+ if (mCodecCtx) {
+ vpx_codec_destroy(mCodecCtx);
+ delete mCodecCtx;
+ mCodecCtx = nullptr;
+ }
+
+ return OK;
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftVpxDec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2GraphicBlock> &block) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block,
+ C2Rect(mWidth, mHeight));
+ auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+ (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+void C2SoftVpxDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) !=0);
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+ // Software VP9 Decoder does not need the Codec Specific Data (CSD)
+ // (specified in http://www.webmproject.org/vp9/profiles/). Ignore it if
+ // it was passed.
+ if (codecConfig) {
+ // Ignore CSD buffer for VP9.
+ if (mMode == MODE_VP9) {
+ fillEmptyWork(work);
+ return;
+ } else {
+ // Tolerate the CSD buffer for VP8. This is a workaround
+ // for b/28689536. continue
+ ALOGW("WARNING: Got CSD buffer for VP8. Continue");
+ }
+ }
+
+ int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
+
+ if (inSize) {
+ uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
+ vpx_codec_err_t err = vpx_codec_decode(
+ mCodecCtx, bitstream, inSize, &frameIndex, 0);
+ if (err != VPX_CODEC_OK) {
+ ALOGE("on2 decoder failed to decode frame. err: %d", err);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ (void)outputBuffer(pool, work);
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!inSize) {
+ fillEmptyWork(work);
+ }
+}
+
+static void copyOutputBufferToYV12Frame(uint8_t *dst,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ uint32_t width, uint32_t height, int32_t bpp) {
+ size_t dstYStride = align(width, 16) * bpp ;
+ size_t dstUVStride = align(dstYStride / 2, 16);
+ uint8_t *dstStart = dst;
+
+ for (size_t i = 0; i < height; ++i) {
+ memcpy(dst, srcY, width * bpp);
+ srcY += srcYStride;
+ dst += dstYStride;
+ }
+
+ dst = dstStart + dstYStride * height;
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dst, srcV, width / 2 * bpp);
+ srcV += srcVStride;
+ dst += dstUVStride;
+ }
+
+ dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dst, srcU, width / 2 * bpp);
+ srcU += srcUStride;
+ dst += dstUVStride;
+ }
+}
+
+bool C2SoftVpxDec::outputBuffer(
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work)
+{
+ if (!(work && pool)) return false;
+
+ vpx_codec_iter_t iter = nullptr;
+ vpx_image_t *img = vpx_codec_get_frame(mCodecCtx, &iter);
+
+ if (!img) return false;
+
+ if (img->d_w != mWidth || img->d_h != mHeight) {
+ mWidth = img->d_w;
+ mHeight = img->d_h;
+
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Config update size failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+
+ }
+ CHECK(img->fmt == VPX_IMG_FMT_I420 || img->fmt == VPX_IMG_FMT_I42016);
+ int32_t bpp = 1;
+ if (img->fmt == VPX_IMG_FMT_I42016) {
+ bpp = 2;
+ }
+
+ std::shared_ptr<C2GraphicBlock> block;
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16) * bpp, mHeight, format, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ work->result = err;
+ return false;
+ }
+
+ C2GraphicView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+
+ ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d",
+ block->width(), block->height(), mWidth, mHeight, (int)*(int64_t *)img->user_priv);
+
+ uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ size_t srcYStride = img->stride[VPX_PLANE_Y];
+ size_t srcUStride = img->stride[VPX_PLANE_U];
+ size_t srcVStride = img->stride[VPX_PLANE_V];
+ const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
+ copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, mWidth, mHeight, bpp);
+
+ finishWork(*(int64_t *)img->user_priv, work, std::move(block));
+ return true;
+}
+
+c2_status_t C2SoftVpxDec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ while ((outputBuffer(pool, work))) {
+ }
+
+ if (drainMode == DRAIN_COMPONENT_WITH_EOS &&
+ work && work->workletsProcessed == 0u) {
+ fillEmptyWork(work);
+ }
+
+ return C2_OK;
+}
+c2_status_t C2SoftVpxDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftVpxFactory : public C2ComponentFactory {
+public:
+ C2SoftVpxFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftVpxDec(COMPONENT_NAME, id,
+ std::make_shared<C2SoftVpxDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftVpxDec::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftVpxDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftVpxFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftVpxFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
new file mode 100644
index 0000000..60c8484
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_VPX_DEC_H_
+#define ANDROID_C2_SOFT_VPX_DEC_H_
+
+#include <SimpleC2Component.h>
+
+
+#include "vpx/vpx_decoder.h"
+#include "vpx/vp8dx.h"
+
+namespace android {
+
+struct C2SoftVpxDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftVpxDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftVpxDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ private:
+ enum {
+ MODE_VP8,
+ MODE_VP9,
+ } mMode;
+
+ std::shared_ptr<IntfImpl> mIntf;
+ vpx_codec_ctx_t *mCodecCtx;
+ bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+
+ status_t initDecoder();
+ status_t destroyDecoder();
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2GraphicBlock> &block);
+ bool outputBuffer(
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+
+ C2_DO_NOT_COPY(C2SoftVpxDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_VPX_DEC_H_
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
new file mode 100644
index 0000000..155a84f
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -0,0 +1,670 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftVpxEnc"
+#include <log/log.h>
+#include <utils/misc.h>
+
+#include <media/hardware/VideoAPI.h>
+
+#include <Codec2BufferUtils.h>
+#include <C2Debug.h>
+#include "C2SoftVpxEnc.h"
+
+#ifndef INT32_MAX
+#define INT32_MAX 2147483647
+#endif
+
+namespace android {
+
+#if 0
+static size_t getCpuCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+#endif
+
+C2SoftVpxEnc::C2SoftVpxEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mCodecContext(nullptr),
+ mCodecConfiguration(nullptr),
+ mCodecInterface(nullptr),
+ mStrideAlign(2),
+ mColorFormat(VPX_IMG_FMT_I420),
+ mBitrateControlMode(VPX_VBR),
+ mErrorResilience(false),
+ mMinQuantizer(0),
+ mMaxQuantizer(0),
+ mTemporalLayers(0),
+ mTemporalPatternType(VPXTemporalLayerPatternNone),
+ mTemporalPatternLength(0),
+ mTemporalPatternIdx(0),
+ mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
+ mSignalledOutputEos(false),
+ mSignalledError(false) {
+ memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
+ mTemporalLayerBitrateRatio[0] = 100;
+}
+
+C2SoftVpxEnc::~C2SoftVpxEnc() {
+ onRelease();
+}
+
+c2_status_t C2SoftVpxEnc::onInit() {
+ status_t err = initEncoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+void C2SoftVpxEnc::onRelease() {
+ if (mCodecContext) {
+ vpx_codec_destroy(mCodecContext);
+ delete mCodecContext;
+ mCodecContext = nullptr;
+ }
+
+ if (mCodecConfiguration) {
+ delete mCodecConfiguration;
+ mCodecConfiguration = nullptr;
+ }
+
+ // this one is not allocated by us
+ mCodecInterface = nullptr;
+}
+
+c2_status_t C2SoftVpxEnc::onStop() {
+ onRelease();
+ mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+ return C2_OK;
+}
+
+void C2SoftVpxEnc::onReset() {
+ (void)onStop();
+}
+
+c2_status_t C2SoftVpxEnc::onFlush_sm() {
+ return onStop();
+}
+
+status_t C2SoftVpxEnc::initEncoder() {
+ vpx_codec_err_t codec_return;
+ status_t result = UNKNOWN_ERROR;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mBitrateMode = mIntf->getBitrateMode_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mIntraRefresh = mIntf->getIntraRefresh_l();
+ mRequestSync = mIntf->getRequestSync_l();
+ mTemporalLayers = mIntf->getTemporalLayers_l()->m.layerCount;
+ }
+
+ switch (mBitrateMode->value) {
+ case C2Config::BITRATE_VARIABLE:
+ mBitrateControlMode = VPX_VBR;
+ break;
+ case C2Config::BITRATE_CONST:
+ default:
+ mBitrateControlMode = VPX_CBR;
+ break;
+ break;
+ }
+
+ setCodecSpecificInterface();
+ if (!mCodecInterface) goto CleanUp;
+
+ ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
+ mMinQuantizer, mMaxQuantizer);
+
+ mCodecConfiguration = new vpx_codec_enc_cfg_t;
+ if (!mCodecConfiguration) goto CleanUp;
+ codec_return = vpx_codec_enc_config_default(mCodecInterface,
+ mCodecConfiguration,
+ 0);
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error populating default configuration for vpx encoder.");
+ goto CleanUp;
+ }
+
+ mCodecConfiguration->g_w = mSize->width;
+ mCodecConfiguration->g_h = mSize->height;
+ //mCodecConfiguration->g_threads = getCpuCoreCount();
+ mCodecConfiguration->g_threads = 0;
+ mCodecConfiguration->g_error_resilient = mErrorResilience;
+
+ // timebase unit is microsecond
+ // g_timebase is in seconds (i.e. 1/1000000 seconds)
+ mCodecConfiguration->g_timebase.num = 1;
+ mCodecConfiguration->g_timebase.den = 1000000;
+ // rc_target_bitrate is in kbps, mBitrate in bps
+ mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+ mCodecConfiguration->rc_end_usage = mBitrateControlMode;
+ // Disable frame drop - not allowed in MediaCodec now.
+ mCodecConfiguration->rc_dropframe_thresh = 0;
+ // Disable lagged encoding.
+ mCodecConfiguration->g_lag_in_frames = 0;
+ if (mBitrateControlMode == VPX_CBR) {
+ // Disable spatial resizing.
+ mCodecConfiguration->rc_resize_allowed = 0;
+ // Single-pass mode.
+ mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
+ // Maximum amount of bits that can be subtracted from the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_undershoot_pct = 100;
+ // Maximum amount of bits that can be added to the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_overshoot_pct = 15;
+ // Initial value of the buffer level in ms.
+ mCodecConfiguration->rc_buf_initial_sz = 500;
+ // Amount of data that the encoder should try to maintain in ms.
+ mCodecConfiguration->rc_buf_optimal_sz = 600;
+ // The amount of data that may be buffered by the decoding
+ // application in ms.
+ mCodecConfiguration->rc_buf_sz = 1000;
+ // Enable error resilience - needed for packet loss.
+ mCodecConfiguration->g_error_resilient = 1;
+ // Maximum key frame interval - for CBR boost to 3000
+ mCodecConfiguration->kf_max_dist = 3000;
+ // Encoder determines optimal key frame placement automatically.
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+
+ // Frames temporal pattern - for now WebRTC like pattern is only supported.
+ switch (mTemporalLayers) {
+ case 0:
+ mTemporalPatternLength = 0;
+ break;
+ case 1:
+ mCodecConfiguration->ts_number_layers = 1;
+ mCodecConfiguration->ts_rate_decimator[0] = 1;
+ mCodecConfiguration->ts_periodicity = 1;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mTemporalPattern[0] = kTemporalUpdateLastRefAll;
+ mTemporalPatternLength = 1;
+ break;
+ case 2:
+ mCodecConfiguration->ts_number_layers = 2;
+ mCodecConfiguration->ts_rate_decimator[0] = 2;
+ mCodecConfiguration->ts_rate_decimator[1] = 1;
+ mCodecConfiguration->ts_periodicity = 2;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 1;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ case 3:
+ mCodecConfiguration->ts_number_layers = 3;
+ mCodecConfiguration->ts_rate_decimator[0] = 4;
+ mCodecConfiguration->ts_rate_decimator[1] = 2;
+ mCodecConfiguration->ts_rate_decimator[2] = 1;
+ mCodecConfiguration->ts_periodicity = 4;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 2;
+ mCodecConfiguration->ts_layer_id[2] = 1;
+ mCodecConfiguration->ts_layer_id[3] = 2;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateNone;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateNone;
+ mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ default:
+ ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
+ goto CleanUp;
+ }
+ // Set bitrate values for each layer
+ for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
+ mCodecConfiguration->ts_target_bitrate[i] =
+ mCodecConfiguration->rc_target_bitrate *
+ mTemporalLayerBitrateRatio[i] / 100;
+ }
+ if (mIntf->getSyncFramePeriod() >= 0) {
+ mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
+ mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+ if (mMinQuantizer > 0) {
+ mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+ }
+ if (mMaxQuantizer > 0) {
+ mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+ }
+ setCodecSpecificConfiguration();
+ mCodecContext = new vpx_codec_ctx_t;
+ if (!mCodecContext) goto CleanUp;
+ codec_return = vpx_codec_enc_init(mCodecContext,
+ mCodecInterface,
+ mCodecConfiguration,
+ 0); // flags
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error initializing vpx encoder");
+ goto CleanUp;
+ }
+
+ // Extra CBR settings
+ if (mBitrateControlMode == VPX_CBR) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_STATIC_THRESHOLD,
+ 1);
+ if (codec_return == VPX_CODEC_OK) {
+ uint32_t rc_max_intra_target =
+ (uint32_t)(mCodecConfiguration->rc_buf_optimal_sz * mFrameRate->value / 20 + 0.5);
+ // Don't go below 3 times per frame bandwidth.
+ if (rc_max_intra_target < 300) {
+ rc_max_intra_target = 300;
+ }
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_MAX_INTRA_BITRATE_PCT,
+ rc_max_intra_target);
+ }
+ if (codec_return == VPX_CODEC_OK) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_CPUUSED,
+ -8);
+ }
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting cbr parameters for vpx encoder.");
+ goto CleanUp;
+ }
+ }
+
+ codec_return = setCodecSpecificControls();
+ if (codec_return != VPX_CODEC_OK) goto CleanUp;
+
+ {
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+ if (((uint64_t)width * height) >
+ ((uint64_t)INT32_MAX / 3)) {
+ ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
+ } else {
+ uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2);
+ if (!mConversionBuffer.size()) {
+ ALOGE("Allocating conversion buffer failed.");
+ } else {
+ mNumInputFrames = -1;
+ return OK;
+ }
+ }
+ }
+
+CleanUp:
+ onRelease();
+ return result;
+}
+
+vpx_enc_frame_flags_t C2SoftVpxEnc::getEncodeFlags() {
+ vpx_enc_frame_flags_t flags = 0;
+ if (mTemporalPatternLength > 0) {
+ int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
+ mTemporalPatternIdx++;
+ switch (mTemporalPattern[patternIdx]) {
+ case kTemporalUpdateLast:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ break;
+ case kTemporalUpdateGoldenWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ [[fallthrough]];
+ case kTemporalUpdateGolden:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateAltrefWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ [[fallthrough]];
+ case kTemporalUpdateAltref:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateNoneNoRefAltref:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ [[fallthrough]];
+ case kTemporalUpdateNone:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateNoneNoRefGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastAndGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateLastRefAll:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ break;
+ }
+ }
+ return flags;
+}
+
+// TODO: add support for YUV input color formats
+// TODO: add support for SVC, ARF. SVC and ARF returns multiple frames
+// (hierarchical / noshow) in one call. These frames should be combined in to
+// a single buffer and sent back to the client
+void C2SoftVpxEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ // Initialize encoder if not already
+ if (!mCodecContext && OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ std::shared_ptr<const C2GraphicView> rView;
+ std::shared_ptr<C2Buffer> inputBuffer;
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ rView = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (rView->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", rView->error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } else {
+ ALOGV("Empty input Buffer");
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ return;
+ }
+
+ const C2ConstGraphicBlock inBuffer =
+ inputBuffer->data().graphicBlocks().front();
+ if (inBuffer.width() != mSize->width ||
+ inBuffer.height() != mSize->height) {
+ ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
+ inBuffer.width(), mSize->width, inBuffer.height(),
+ mSize->height);
+ mSignalledError = true;
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ vpx_image_t raw_frame;
+ const C2PlanarLayout &layout = rView->layout();
+ uint32_t width = rView->width();
+ uint32_t height = rView->height();
+ if (width > 0x8000 || height > 0x8000) {
+ ALOGE("Image too big: %u x %u", width, height);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ case C2PlanarLayout::TYPE_RGBA: {
+ ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
+ mConversionBuffer.size(), *rView.get());
+ vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
+ mStrideAlign, mConversionBuffer.data());
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*rView)) {
+ ALOGE("input is not YUV420");
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 1
+ && layout.planes[layout.PLANE_V].colInc == 1) {
+ // I420 compatible - though with custom offset and stride
+ vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
+ mStrideAlign, (uint8_t*)rView->data()[0]);
+ raw_frame.planes[1] = (uint8_t*)rView->data()[1];
+ raw_frame.planes[2] = (uint8_t*)rView->data()[2];
+ raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
+ raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
+ raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
+ } else {
+ // copy to I420
+ MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
+ if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
+ status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, stride, vstride,
+ mStrideAlign, (uint8_t*)rView->data()[0]);
+ vpx_img_set_rect(&raw_frame, 0, 0, width, height);
+ } else {
+ ALOGE("Conversion buffer is too small: %u x %u for %zu",
+ stride, vstride, mConversionBuffer.size());
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ }
+ break;
+ }
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ vpx_enc_frame_flags_t flags = getEncodeFlags();
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
+ std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
+ lock.unlock();
+
+ if (intraRefresh != mIntraRefresh) {
+ mIntraRefresh = intraRefresh;
+ ALOGV("Got mIntraRefresh request");
+ }
+
+ if (requestSync != mRequestSync) {
+ // we can handle IDR immediately
+ if (requestSync->value) {
+ // unset request
+ C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
+ ALOGV("Got sync request");
+ flags |= VPX_EFLAG_FORCE_KF;
+ }
+ mRequestSync = requestSync;
+ }
+
+ if (bitrate != mBitrate) {
+ mBitrate = bitrate;
+ mCodecConfiguration->rc_target_bitrate =
+ (mBitrate->value + 500) / 1000;
+ vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
+ mCodecConfiguration);
+ if (res != VPX_CODEC_OK) {
+ ALOGE("vpx encoder failed to update bitrate: %s",
+ vpx_codec_err_to_string(res));
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
+ uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
+ uint32_t frameDuration;
+ if (inputTimeStamp > mLastTimestamp) {
+ frameDuration = (uint32_t)(inputTimeStamp - mLastTimestamp);
+ } else {
+ // Use default of 30 fps in case of 0 frame rate.
+ float frameRate = mFrameRate->value;
+ if (frameRate < 0.001) {
+ frameRate = 30;
+ }
+ frameDuration = (uint32_t)(1000000 / frameRate + 0.5);
+ }
+ mLastTimestamp = inputTimeStamp;
+
+ vpx_codec_err_t codec_return = vpx_codec_encode(mCodecContext, &raw_frame,
+ inputTimeStamp,
+ frameDuration, flags,
+ VPX_DL_REALTIME);
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("vpx encoder failed to encode frame");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ bool populated = false;
+ vpx_codec_iter_t encoded_packet_iterator = nullptr;
+ const vpx_codec_cx_pkt_t* encoded_packet;
+ while ((encoded_packet = vpx_codec_get_cx_data(
+ mCodecContext, &encoded_packet_iterator))) {
+ if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
+ ++mNumInputFrames;
+
+ ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
+ uint32_t flags = 0;
+ if (eos) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block);
+ if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
+ work->workletsProcessed = 1u;
+ populated = true;
+ if (eos) {
+ mSignalledOutputEos = true;
+ ALOGV("signalled EOS");
+ }
+ }
+ }
+ if (!populated) {
+ work->workletsProcessed = 0u;
+ }
+}
+
+c2_status_t C2SoftVpxEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ (void)pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+} // namespace android
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
new file mode 100644
index 0000000..87ed1a9
--- /dev/null
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -0,0 +1,437 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_VPX_ENC_H__
+#define ANDROID_C2_SOFT_VPX_ENC_H__
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Component.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vpx_image.h"
+#include "vpx/vp8cx.h"
+
+namespace android {
+
+// TODO: These defs taken from deprecated OMX_VideoExt.h. Move these definitions
+// to a new header file and include it.
+
+/** Maximum number of temporal layers */
+#define MAXTEMPORALLAYERS 3
+
+/** temporal layer patterns */
+typedef enum TemporalPatternType {
+ VPXTemporalLayerPatternNone = 0,
+ VPXTemporalLayerPatternWebRTC = 1,
+ VPXTemporalLayerPatternMax = 0x7FFFFFFF
+} TemporalPatternType;
+
+// Base class for a VPX Encoder Component
+//
+// Only following encoder settings are available (codec specific settings might
+// be available in the sub-classes):
+// - video resolution
+// - target bitrate
+// - rate control (constant / variable)
+// - frame rate
+// - error resilience
+// - reconstruction & loop filters (g_profile)
+//
+// Only following color formats are recognized
+// - C2PlanarLayout::TYPE_RGB
+// - C2PlanarLayout::TYPE_RGBA
+//
+// Following settings are not configurable by the client
+// - encoding deadline is realtime
+// - multithreaded encoding utilizes a number of threads equal
+// to online cpu's available
+// - the algorithm interface for encoder is decided by the sub-class in use
+// - fractional bits of frame rate is discarded
+// - timestamps are in microseconds, therefore encoder timebase is fixed
+// to 1/1000000
+
+struct C2SoftVpxEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftVpxEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override final;
+ c2_status_t onStop() override final;
+ void onReset() override final;
+ void onRelease() override final;
+ c2_status_t onFlush_sm() override final;
+
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override final;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override final;
+
+ protected:
+ std::shared_ptr<IntfImpl> mIntf;
+ virtual ~C2SoftVpxEnc();
+
+ // Initializes vpx encoder with available settings.
+ status_t initEncoder();
+
+ // Populates mCodecInterface with codec specific settings.
+ virtual void setCodecSpecificInterface() = 0;
+
+ // Sets codec specific configuration.
+ virtual void setCodecSpecificConfiguration() = 0;
+
+ // Sets codec specific encoder controls.
+ virtual vpx_codec_err_t setCodecSpecificControls() = 0;
+
+ // Get current encode flags.
+ virtual vpx_enc_frame_flags_t getEncodeFlags();
+
+ enum TemporalReferences {
+ // For 1 layer case: reference all (last, golden, and alt ref), but only
+ // update last.
+ kTemporalUpdateLastRefAll = 12,
+ // First base layer frame for 3 temporal layers, which updates last and
+ // golden with alt ref dependency.
+ kTemporalUpdateLastAndGoldenRefAltRef = 11,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenRefAltRef = 10,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
+ // Base layer with alt ref dependency.
+ kTemporalUpdateLastRefAltRef = 8,
+ // Highest enhacement layer without dependency on golden with alt ref
+ // dependency.
+ kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
+ // Second layer and last frame in cycle, for 2 layers.
+ kTemporalUpdateNoneNoRefAltref = 6,
+ // Highest enhancement layer.
+ kTemporalUpdateNone = 5,
+ // Second enhancement layer.
+ kTemporalUpdateAltref = 4,
+ // Second enhancement layer without dependency on previous frames in
+ // the second enhancement layer.
+ kTemporalUpdateAltrefWithoutDependency = 3,
+ // First enhancement layer.
+ kTemporalUpdateGolden = 2,
+ // First enhancement layer without dependency on previous frames in
+ // the first enhancement layer.
+ kTemporalUpdateGoldenWithoutDependency = 1,
+ // Base layer.
+ kTemporalUpdateLast = 0,
+ };
+ enum {
+ kMaxTemporalPattern = 8
+ };
+
+ // vpx specific opaque data structure that
+ // stores encoder state
+ vpx_codec_ctx_t* mCodecContext;
+
+ // vpx specific data structure that
+ // stores encoder configuration
+ vpx_codec_enc_cfg_t* mCodecConfiguration;
+
+ // vpx specific read-only data structure
+ // that specifies algorithm interface (e.g. vp8)
+ vpx_codec_iface_t* mCodecInterface;
+
+ // align stride to the power of 2
+ int32_t mStrideAlign;
+
+ // Color format for the input port
+ vpx_img_fmt_t mColorFormat;
+
+ // Bitrate control mode, either constant or variable
+ vpx_rc_mode mBitrateControlMode;
+
+ // Parameter that denotes whether error resilience
+ // is enabled in encoder
+ bool mErrorResilience;
+
+ // Minimum (best quality) quantizer
+ uint32_t mMinQuantizer;
+
+ // Maximum (worst quality) quantizer
+ uint32_t mMaxQuantizer;
+
+ // Number of coding temporal layers to be used.
+ size_t mTemporalLayers;
+
+ // Temporal layer bitrare ratio in percentage
+ uint32_t mTemporalLayerBitrateRatio[MAXTEMPORALLAYERS];
+
+ // Temporal pattern type
+ TemporalPatternType mTemporalPatternType;
+
+ // Temporal pattern length
+ size_t mTemporalPatternLength;
+
+ // Temporal pattern current index
+ size_t mTemporalPatternIdx;
+
+ // Frame type temporal pattern
+ TemporalReferences mTemporalPattern[kMaxTemporalPattern];
+
+ // Last input buffer timestamp
+ uint64_t mLastTimestamp;
+
+ // Number of input frames
+ int64_t mNumInputFrames;
+
+ // Conversion buffer is needed to input to
+ // yuv420 planar format.
+ MemoryBlock mConversionBuffer;
+
+ // Signalled EOS
+ bool mSignalledOutputEos;
+
+ // Signalled Error
+ bool mSignalledError;
+
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+
+ C2_DO_NOT_COPY(C2SoftVpxEnc);
+};
+
+class C2SoftVpxEnc::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_VIDEO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+#ifdef VP9
+ MEDIA_MIMETYPE_VIDEO_VP9
+#else
+ MEDIA_MIMETYPE_VIDEO_VP8
+#endif
+ ))
+ .build());
+
+ addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
+ .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(
+ 0u, C2Config::BITRATE_CONST))
+ .withFields({
+ C2F(mBitrateMode, value).oneOf({
+ C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
+ })
+ .withSetter(
+ Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ // TODO: More restriction?
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(
+ Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
+ .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
+ .withFields({
+ C2F(mLayering, m.layerCount).inRange(0, 4),
+ C2F(mLayering, m.bLayerCount).inRange(0, 0),
+ C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
+ })
+ .withSetter(LayeringSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+ .withConstValue(new C2StreamIntraRefreshTuning::output(
+ 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
+ .withFields({
+ C2F(mProfileLevel, profile).equalTo(
+ PROFILE_VP9_0
+ ),
+ C2F(mProfileLevel, level).equalTo(
+ LEVEL_VP9_4_1),
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+ }
+
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value <= 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
+ C2P<C2StreamPictureSizeInfo::input> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(
+ bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output> &me) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_VP9_0;
+ }
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ me.set().level = LEVEL_VP9_4_1;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.m.layerCount > 4) {
+ me.set().m.layerCount = 4;
+ }
+ me.set().m.bLayerCount = 0;
+ // ensure ratios are monotonic and clamped between 0 and 1
+ for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
+ me.set().m.bitrateRatios[ix] = c2_clamp(
+ ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
+ }
+ ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
+ return res;
+ }
+
+ // unsafe getters
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+ std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const { return mBitrateMode; }
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
+ std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const { return mLayering; }
+ uint32_t getSyncFramePeriod() const {
+ if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamTemporalLayeringTuning::output> mLayering;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_VPX_ENC_H__
diff --git a/media/codec2/components/vpx/MODULE_LICENSE_APACHE2 b/media/codec2/components/vpx/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/codec2/components/vpx/MODULE_LICENSE_APACHE2
diff --git a/media/codec2/components/vpx/NOTICE b/media/codec2/components/vpx/NOTICE
new file mode 100644
index 0000000..faed58a
--- /dev/null
+++ b/media/codec2/components/vpx/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2013, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/codec2/components/xaac/Android.bp b/media/codec2/components/xaac/Android.bp
new file mode 100644
index 0000000..19c12cf
--- /dev/null
+++ b/media/codec2/components/xaac/Android.bp
@@ -0,0 +1,11 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2xaacdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftXaacDec.cpp"],
+
+ static_libs: ["libxaacdec"],
+}
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
new file mode 100644
index 0000000..1c0e70b
--- /dev/null
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -0,0 +1,1583 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftXaacDec"
+#include <log/log.h>
+
+#include <inttypes.h>
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftXaacDec.h"
+
+#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT 1.0 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3 /* MPEG-D DRC effect type; 3 => Limited playback range */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
+// names of properties that can be used to override the default DRC settings
+#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
+#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
+#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
+#define PROP_DRC_OVERRIDE_EFFECT_TYPE "ro.aac_drc_effect_type"
+
+#define RETURN_IF_FATAL(retval, str) \
+ if (retval & IA_FATAL_ERROR) { \
+ ALOGE("Error in %s: Returned: %d", str, retval); \
+ return retval; \
+ } else if (retval != IA_NO_ERROR) { \
+ ALOGW("Warning in %s: Returned: %d", str, retval); \
+ }
+
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.xaac.decoder";
+
+class C2SoftXaacDec::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_AAC))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
+ .withFields({C2F(mSampleRate, value).oneOf({
+ 7350, 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000
+ })})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::input(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(8000, 960000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192))
+ .build());
+
+ addParameter(
+ DefineParam(mAacFormat, C2_NAME_STREAM_AAC_FORMAT_SETTING)
+ .withDefault(new C2StreamAacFormatInfo::input(0u, C2AacStreamFormatRaw))
+ .withFields({C2F(mAacFormat, value).oneOf({
+ C2AacStreamFormatRaw, C2AacStreamFormatAdts
+ })})
+ .withSetter(Setter<decltype(*mAacFormat)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_AAC_LC, C2Config::LEVEL_UNUSED))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_AAC_LC,
+ C2Config::PROFILE_AAC_HE,
+ C2Config::PROFILE_AAC_HE_PS,
+ C2Config::PROFILE_AAC_LD,
+ C2Config::PROFILE_AAC_ELD,
+ C2Config::PROFILE_AAC_XHE}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_UNUSED
+ })
+ })
+ .withSetter(ProfileLevelSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE)
+ .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY))
+ .withFields({
+ C2F(mDrcCompressMode, value).oneOf({
+ C2Config::DRC_COMPRESSION_ODM_DEFAULT,
+ C2Config::DRC_COMPRESSION_NONE,
+ C2Config::DRC_COMPRESSION_LIGHT,
+ C2Config::DRC_COMPRESSION_HEAVY})
+ })
+ .withSetter(Setter<decltype(*mDrcCompressMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL)
+ .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL))
+ .withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)})
+ .withSetter(Setter<decltype(*mDrcTargetRefLevel)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL)
+ .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL))
+ .withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)})
+ .withSetter(Setter<decltype(*mDrcEncTargetLevel)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR)
+ .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST))
+ .withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)})
+ .withSetter(Setter<decltype(*mDrcBoostFactor)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR)
+ .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT))
+ .withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)})
+ .withSetter(Setter<decltype(*mDrcAttenuationFactor)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE)
+ .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE))
+ .withFields({
+ C2F(mDrcEffectType, value).oneOf({
+ C2Config::DRC_EFFECT_ODM_DEFAULT,
+ C2Config::DRC_EFFECT_OFF,
+ C2Config::DRC_EFFECT_NONE,
+ C2Config::DRC_EFFECT_LATE_NIGHT,
+ C2Config::DRC_EFFECT_NOISY_ENVIRONMENT,
+ C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE,
+ C2Config::DRC_EFFECT_LOW_PLAYBACK_LEVEL,
+ C2Config::DRC_EFFECT_DIALOG_ENHANCEMENT,
+ C2Config::DRC_EFFECT_GENERAL_COMPRESSION})
+ })
+ .withSetter(Setter<decltype(*mDrcEffectType)>::StrictValueWithNoDeps)
+ .build());
+ }
+
+ bool isAdts() const { return mAacFormat->value == C2AacStreamFormatAdts; }
+ uint32_t getBitrate() const { return mBitrate->value; }
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me) {
+ (void)mayBlock;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+ int32_t getDrcCompressMode() const { return mDrcCompressMode->value == C2Config::DRC_COMPRESSION_HEAVY ? 1 : 0; }
+ int32_t getDrcTargetRefLevel() const { return (mDrcTargetRefLevel->value <= 0 ? -mDrcTargetRefLevel->value * 4. + 0.5 : -1); }
+ int32_t getDrcEncTargetLevel() const { return (mDrcEncTargetLevel->value <= 0 ? -mDrcEncTargetLevel->value * 4. + 0.5 : -1); }
+ int32_t getDrcBoostFactor() const { return mDrcBoostFactor->value * 127. + 0.5; }
+ int32_t getDrcAttenuationFactor() const { return mDrcAttenuationFactor->value * 127. + 0.5; }
+ int32_t getDrcEffectType() const { return mDrcEffectType->value; }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+ std::shared_ptr<C2StreamAacFormatInfo::input> mAacFormat;
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamDrcCompressionModeTuning::input> mDrcCompressMode;
+ std::shared_ptr<C2StreamDrcTargetReferenceLevelTuning::input> mDrcTargetRefLevel;
+ std::shared_ptr<C2StreamDrcEncodedTargetLevelTuning::input> mDrcEncTargetLevel;
+ std::shared_ptr<C2StreamDrcBoostFactorTuning::input> mDrcBoostFactor;
+ std::shared_ptr<C2StreamDrcAttenuationFactorTuning::input> mDrcAttenuationFactor;
+ std::shared_ptr<C2StreamDrcEffectTypeTuning::input> mDrcEffectType;
+ // TODO Add : C2StreamAacSbrModeTuning
+};
+
+C2SoftXaacDec::C2SoftXaacDec(
+ const char* name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mXheaacCodecHandle(nullptr),
+ mMpegDDrcHandle(nullptr),
+ mOutputDrainBuffer(nullptr) {
+}
+
+C2SoftXaacDec::~C2SoftXaacDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftXaacDec::onInit() {
+ mOutputFrameLength = 1024;
+ mInputBuffer = nullptr;
+ mOutputBuffer = nullptr;
+ mSampFreq = 0;
+ mNumChannels = 0;
+ mPcmWdSz = 0;
+ mChannelMask = 0;
+ mNumOutBytes = 0;
+ mCurFrameIndex = 0;
+ mCurTimestamp = 0;
+ mIsCodecInitialized = false;
+ mIsCodecConfigFlushRequired = false;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+ mOutputDrainBufferWritePos = 0;
+ mDRCFlag = 0;
+ mMpegDDRCPresent = 0;
+ mMemoryVec.clear();
+ mDrcMemoryVec.clear();
+
+ IA_ERRORCODE err = initDecoder();
+ return err == IA_NO_ERROR ? C2_OK : C2_CORRUPTED;
+
+}
+
+c2_status_t C2SoftXaacDec::onStop() {
+ mOutputFrameLength = 1024;
+ drainDecoder();
+ // reset the "configured" state
+ mSampFreq = 0;
+ mNumChannels = 0;
+ mPcmWdSz = 0;
+ mChannelMask = 0;
+ mNumOutBytes = 0;
+ mCurFrameIndex = 0;
+ mCurTimestamp = 0;
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+ mOutputDrainBufferWritePos = 0;
+ mDRCFlag = 0;
+ mMpegDDRCPresent = 0;
+
+ return C2_OK;
+}
+
+void C2SoftXaacDec::onReset() {
+ (void)onStop();
+}
+
+void C2SoftXaacDec::onRelease() {
+ IA_ERRORCODE errCode = deInitXAACDecoder();
+ if (IA_NO_ERROR != errCode) ALOGE("deInitXAACDecoder() failed %d", errCode);
+
+ errCode = deInitMPEGDDDrc();
+ if (IA_NO_ERROR != errCode) ALOGE("deInitMPEGDDDrc() failed %d", errCode);
+
+ if (mOutputDrainBuffer) {
+ delete[] mOutputDrainBuffer;
+ mOutputDrainBuffer = nullptr;
+ }
+}
+
+IA_ERRORCODE C2SoftXaacDec::initDecoder() {
+ ALOGV("initDecoder()");
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+
+ err_code = initXAACDecoder();
+ if (err_code != IA_NO_ERROR) {
+ ALOGE("initXAACDecoder Failed");
+ /* Call deInit to free any allocated memory */
+ deInitXAACDecoder();
+ return IA_FATAL_ERROR;
+ }
+
+ if (!mOutputDrainBuffer) {
+ mOutputDrainBuffer = new (std::nothrow) char[kOutputDrainBufferSize];
+ if (!mOutputDrainBuffer) return IA_FATAL_ERROR;
+ }
+
+ err_code = initXAACDrc();
+ RETURN_IF_FATAL(err_code, "initXAACDrc");
+
+
+ return IA_NO_ERROR;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftXaacDec::finishWork(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ ALOGV("mCurFrameIndex = %" PRIu64, mCurFrameIndex);
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err =
+ pool->fetchLinearBlock(mOutputDrainBufferWritePos, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock failed : err = %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ int16_t* outBuffer = reinterpret_cast<int16_t*>(wView.data());
+ memcpy(outBuffer, mOutputDrainBuffer, mOutputDrainBufferWritePos);
+ mOutputDrainBufferWritePos = 0;
+
+ auto fillWork = [buffer = createLinearBuffer(block)](
+ const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && work->input.ordinal.frameIndex == c2_cntr64_t(mCurFrameIndex)) {
+ fillWork(work);
+ } else {
+ finish(mCurFrameIndex, fillWork);
+ }
+
+ ALOGV("out timestamp %" PRIu64 " / %u", mCurTimestamp, block->capacity());
+}
+
+void C2SoftXaacDec::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ uint8_t* inBuffer = nullptr;
+ uint32_t inBufferLength = 0;
+ C2ReadView view = mDummyReadView;
+ size_t offset = 0u;
+ size_t size = 0u;
+ if (!work->input.buffers.empty()) {
+ view = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ size = view.capacity();
+ }
+ if (size && view.error()) {
+ ALOGE("read view map failed %d", view.error());
+ work->result = view.error();
+ return;
+ }
+
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+ bool codecConfig =
+ (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+ if (codecConfig) {
+ if (size == 0u) {
+ ALOGE("empty codec config");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ // const_cast because of libAACdec method signature.
+ inBuffer = const_cast<uint8_t*>(view.data() + offset);
+ inBufferLength = size;
+
+ /* GA header configuration sent to Decoder! */
+ IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength);
+ if (IA_NO_ERROR != err_code) {
+ ALOGE("configXAACDecoder err_code = %d", err_code);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.buffers.clear();
+ return;
+ }
+
+ mCurFrameIndex = work->input.ordinal.frameIndex.peeku();
+ mCurTimestamp = work->input.ordinal.timestamp.peeku();
+ mOutputDrainBufferWritePos = 0;
+ char* tempOutputDrainBuffer = mOutputDrainBuffer;
+ while (size > 0u) {
+ if ((kOutputDrainBufferSize * sizeof(int16_t) -
+ mOutputDrainBufferWritePos) <
+ (mOutputFrameLength * sizeof(int16_t) * mNumChannels)) {
+ ALOGV("skipping decode: not enough space left in DrainBuffer");
+ break;
+ }
+
+ ALOGV("inAttribute size = %zu", size);
+ if (mIntf->isAdts()) {
+ ALOGV("ADTS");
+ size_t adtsHeaderSize = 0;
+ // skip 30 bits, aac_frame_length follows.
+ // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+
+ const uint8_t* adtsHeader = view.data() + offset;
+ bool signalError = false;
+ if (size < 7) {
+ ALOGE("Audio data too short to contain even the ADTS header. "
+ "Got %zu bytes.", size);
+ hexdump(adtsHeader, size);
+ signalError = true;
+ } else {
+ bool protectionAbsent = (adtsHeader[1] & 1);
+ unsigned aac_frame_length = ((adtsHeader[3] & 3) << 11) |
+ (adtsHeader[4] << 3) |
+ (adtsHeader[5] >> 5);
+
+ if (size < aac_frame_length) {
+ ALOGE("Not enough audio data for the complete frame. "
+ "Got %zu bytes, frame size according to the ADTS "
+ "header is %u bytes.", size, aac_frame_length);
+ hexdump(adtsHeader, size);
+ signalError = true;
+ } else {
+ adtsHeaderSize = (protectionAbsent ? 7 : 9);
+ if (aac_frame_length < adtsHeaderSize) {
+ signalError = true;
+ } else {
+ // const_cast because of libAACdec method signature.
+ inBuffer =
+ const_cast<uint8_t*>(adtsHeader + adtsHeaderSize);
+ inBufferLength = aac_frame_length - adtsHeaderSize;
+
+ offset += adtsHeaderSize;
+ size -= adtsHeaderSize;
+ }
+ }
+ }
+
+ if (signalError) {
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } else {
+ ALOGV("Non ADTS");
+ // const_cast because of libAACdec method signature.
+ inBuffer = const_cast<uint8_t*>(view.data() + offset);
+ inBufferLength = size;
+ }
+
+ signed int prevSampleRate = mSampFreq;
+ signed int prevNumChannels = mNumChannels;
+
+ /* XAAC decoder expects first frame to be fed via configXAACDecoder API
+ * which should initialize the codec. Once this state is reached, call the
+ * decodeXAACStream API with same frame to decode! */
+ if (!mIsCodecInitialized) {
+ IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength);
+ if (IA_NO_ERROR != err_code) {
+ ALOGE("configXAACDecoder Failed 2 err_code = %d", err_code);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if ((mSampFreq != prevSampleRate) ||
+ (mNumChannels != prevNumChannels)) {
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ prevSampleRate, mSampFreq, prevNumChannels, mNumChannels);
+
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, mSampFreq);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, mNumChannels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config(
+ { &sampleRateInfo, &channelCountInfo },
+ C2_MAY_BLOCK,
+ &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(sampleRateInfo));
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
+ signed int bytesConsumed = 0;
+ IA_ERRORCODE errorCode = IA_NO_ERROR;
+ if (mIsCodecInitialized) {
+ mIsCodecConfigFlushRequired = true;
+ errorCode = decodeXAACStream(inBuffer, inBufferLength,
+ &bytesConsumed, &mNumOutBytes);
+ } else if (!mIsCodecConfigFlushRequired) {
+ ALOGW("Assumption that first frame after header initializes decoder Failed!");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ size -= bytesConsumed;
+ offset += bytesConsumed;
+
+ if (inBufferLength != (uint32_t)bytesConsumed)
+ ALOGW("All data not consumed");
+
+ /* In case of error, decoder would have given out empty buffer */
+ if ((IA_NO_ERROR != errorCode) && (0 == mNumOutBytes) && mIsCodecInitialized)
+ mNumOutBytes = mOutputFrameLength * (mPcmWdSz / 8) * mNumChannels;
+
+ if (!bytesConsumed) {
+ ALOGW("bytesConsumed = 0 should never happen");
+ }
+
+ if ((uint32_t)mNumOutBytes >
+ mOutputFrameLength * sizeof(int16_t) * mNumChannels) {
+ ALOGE("mNumOutBytes > mOutputFrameLength * sizeof(int16_t) * mNumChannels, should never happen");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if (IA_NO_ERROR != errorCode) {
+ // TODO: check for overflow, ASAN
+ memset(mOutputBuffer, 0, mNumOutBytes);
+
+ // Discard input buffer.
+ size = 0;
+
+ // fall through
+ }
+ memcpy(tempOutputDrainBuffer, mOutputBuffer, mNumOutBytes);
+ tempOutputDrainBuffer += mNumOutBytes;
+ mOutputDrainBufferWritePos += mNumOutBytes;
+ }
+
+ if (mOutputDrainBufferWritePos) {
+ finishWork(work, pool);
+ } else {
+ fillEmptyWork(work);
+ }
+ if (eos) mSignalledOutputEos = true;
+}
+
+c2_status_t C2SoftXaacDec::drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ (void)pool;
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ return C2_OK;
+}
+
+IA_ERRORCODE C2SoftXaacDec::configflushDecode() {
+ IA_ERRORCODE err_code;
+ uint32_t ui_init_done;
+ uint32_t inBufferLength = 8203;
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_FLUSH_MEM,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_FLUSH_MEM");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_INPUT_BYTES,
+ 0,
+ &inBufferLength);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_FLUSH_MEM,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_FLUSH_MEM");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_DONE_QUERY,
+ &ui_init_done);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_DONE_QUERY");
+
+ if (ui_init_done) {
+ err_code = getXAACStreamInfo();
+ RETURN_IF_FATAL(err_code, "getXAACStreamInfo");
+ ALOGV("Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz %d\nchannelMask %d\noutputFrameLength %d",
+ mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength);
+ mIsCodecInitialized = true;
+ }
+ return IA_NO_ERROR;
+}
+
+c2_status_t C2SoftXaacDec::onFlush_sm() {
+ if (mIsCodecInitialized) {
+ IA_ERRORCODE err_code = configflushDecode();
+ if (err_code != IA_NO_ERROR) {
+ ALOGE("Error in configflushDecode: Error %d", err_code);
+ }
+ }
+ drainDecoder();
+ mSignalledOutputEos = false;
+ mSignalledError = false;
+
+ return C2_OK;
+}
+
+IA_ERRORCODE C2SoftXaacDec::drainDecoder() {
+ /* Output delay compensation logic should sit here. */
+ /* Nothing to be done as XAAC decoder does not introduce output buffer delay */
+
+ return 0;
+}
+
+IA_ERRORCODE C2SoftXaacDec::initXAACDecoder() {
+ /* First part */
+ /* Error Handler Init */
+ /* Get Library Name, Library Version and API Version */
+ /* Initialize API structure + Default config set */
+ /* Set config params from user */
+ /* Initialize memory tables */
+ /* Get memory information and allocate memory */
+
+ mInputBufferSize = 0;
+ mInputBuffer = nullptr;
+ mOutputBuffer = nullptr;
+ /* Process struct initing end */
+
+ /* ******************************************************************/
+ /* Initialize API structure and set config params to default */
+ /* ******************************************************************/
+ /* API size */
+ uint32_t pui_api_size;
+ /* Get the API size */
+ IA_ERRORCODE err_code = ixheaacd_dec_api(nullptr,
+ IA_API_CMD_GET_API_SIZE,
+ 0,
+ &pui_api_size);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE");
+
+ /* Allocate memory for API */
+ mXheaacCodecHandle = memalign(4, pui_api_size);
+ if (!mXheaacCodecHandle) {
+ ALOGE("malloc for pui_api_size + 4 >> %d Failed", pui_api_size + 4);
+ return IA_FATAL_ERROR;
+ }
+ mMemoryVec.push(mXheaacCodecHandle);
+
+ /* Set the config params to default values */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS");
+
+ /* Get the API size */
+ err_code = ia_drc_dec_api(nullptr, IA_API_CMD_GET_API_SIZE, 0, &pui_api_size);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE");
+
+ /* Allocate memory for API */
+ mMpegDDrcHandle = memalign(4, pui_api_size);
+ if (!mMpegDDrcHandle) {
+ ALOGE("malloc for pui_api_size + 4 >> %d Failed", pui_api_size + 4);
+ return IA_FATAL_ERROR;
+ }
+ mMemoryVec.push(mMpegDDrcHandle);
+
+ /* Set the config params to default values */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS, nullptr);
+
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS");
+
+ /* ******************************************************************/
+ /* Set config parameters */
+ /* ******************************************************************/
+ uint32_t ui_mp4_flag = 1;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4,
+ &ui_mp4_flag);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4");
+
+ /* ******************************************************************/
+ /* Initialize Memory info tables */
+ /* ******************************************************************/
+ uint32_t ui_proc_mem_tabs_size;
+ pVOID pv_alloc_ptr;
+ /* Get memory info tables size */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_MEMTABS_SIZE,
+ 0,
+ &ui_proc_mem_tabs_size);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEMTABS_SIZE");
+
+ pv_alloc_ptr = memalign(4, ui_proc_mem_tabs_size);
+ if (!pv_alloc_ptr) {
+ ALOGE("Malloc for size (ui_proc_mem_tabs_size + 4) = %d failed!", ui_proc_mem_tabs_size + 4);
+ return IA_FATAL_ERROR;
+ }
+ mMemoryVec.push(pv_alloc_ptr);
+
+ /* Set pointer for process memory tables */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_MEMTABS_PTR,
+ 0,
+ pv_alloc_ptr);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEMTABS_PTR");
+
+ /* initialize the API, post config, fill memory tables */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
+
+ /* ******************************************************************/
+ /* Allocate Memory with info from library */
+ /* ******************************************************************/
+ /* There are four different types of memories, that needs to be allocated */
+ /* persistent,scratch,input and output */
+ for (int i = 0; i < 4; i++) {
+ int ui_size = 0, ui_alignment = 0, ui_type = 0;
+
+ /* Get memory size */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_MEM_INFO_SIZE,
+ i,
+ &ui_size);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
+
+ /* Get memory alignment */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_MEM_INFO_ALIGNMENT,
+ i,
+ &ui_alignment);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
+
+ /* Get memory type */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_MEM_INFO_TYPE,
+ i,
+ &ui_type);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
+
+ pv_alloc_ptr = memalign(ui_alignment, ui_size);
+ if (!pv_alloc_ptr) {
+ ALOGE("Malloc for size (ui_size + ui_alignment) = %d failed!",
+ ui_size + ui_alignment);
+ return IA_FATAL_ERROR;
+ }
+ mMemoryVec.push(pv_alloc_ptr);
+
+ /* Set the buffer pointer */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_MEM_PTR,
+ i,
+ pv_alloc_ptr);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+ if (ui_type == IA_MEMTYPE_INPUT) {
+ mInputBuffer = (pWORD8)pv_alloc_ptr;
+ mInputBufferSize = ui_size;
+ }
+ if (ui_type == IA_MEMTYPE_OUTPUT)
+ mOutputBuffer = (pWORD8)pv_alloc_ptr;
+ }
+ /* End first part */
+
+ return IA_NO_ERROR;
+}
+
+status_t C2SoftXaacDec::initXAACDrc() {
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+ unsigned int ui_drc_val;
+ // DRC_PRES_MODE_WRAP_DESIRED_TARGET
+ int32_t targetRefLevel = mIntf->getDrcTargetRefLevel();
+ ALOGV("AAC decoder using desired DRC target reference level of %d", targetRefLevel);
+ ui_drc_val = (unsigned int)targetRefLevel;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL,
+ &ui_drc_val);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL");
+
+ /* Use ui_drc_val from PROP_DRC_OVERRIDE_REF_LEVEL or DRC_DEFAULT_MOBILE_REF_LEVEL
+ * for IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS too */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS, &ui_drc_val);
+
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS");
+
+ int32_t attenuationFactor = mIntf->getDrcAttenuationFactor();
+ ALOGV("AAC decoder using desired DRC attenuation factor of %d", attenuationFactor);
+ ui_drc_val = (unsigned int)attenuationFactor;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT,
+ &ui_drc_val);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT");
+
+ // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
+ int32_t boostFactor = mIntf->getDrcBoostFactor();
+ ALOGV("AAC decoder using desired DRC boost factor of %d", boostFactor);
+ ui_drc_val = (unsigned int)boostFactor;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST,
+ &ui_drc_val);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST");
+
+ // DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+ int32_t compressMode = mIntf->getDrcCompressMode();
+ ALOGV("AAC decoder using desried DRC heavy compression switch of %d", compressMode);
+ ui_drc_val = (unsigned int)compressMode;
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP,
+ &ui_drc_val);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP");
+
+ // AAC_UNIDRC_SET_EFFECT
+ int32_t effectType = mIntf->getDrcEffectType();
+ ALOGV("AAC decoder using MPEG-D DRC effect type %d", effectType);
+ ui_drc_val = (unsigned int)effectType;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE, &ui_drc_val);
+
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE");
+
+ return IA_NO_ERROR;
+}
+
+IA_ERRORCODE C2SoftXaacDec::deInitXAACDecoder() {
+ ALOGV("deInitXAACDecoder");
+
+ /* Error code */
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+
+ if (mXheaacCodecHandle) {
+ /* Tell that the input is over in this buffer */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INPUT_OVER,
+ 0,
+ nullptr);
+ }
+
+ /* Irrespective of error returned in IA_API_CMD_INPUT_OVER, free allocated memory */
+ for (void* buf : mMemoryVec) {
+ if (buf) free(buf);
+ }
+ mMemoryVec.clear();
+ mXheaacCodecHandle = nullptr;
+
+ return err_code;
+}
+
+IA_ERRORCODE C2SoftXaacDec::deInitMPEGDDDrc() {
+ ALOGV("deInitMPEGDDDrc");
+
+ for (void* buf : mDrcMemoryVec) {
+ if (buf) free(buf);
+ }
+ mDrcMemoryVec.clear();
+ return IA_NO_ERROR;
+}
+
+IA_ERRORCODE C2SoftXaacDec::configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength) {
+ if (mInputBufferSize < inBufferLength) {
+ ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize, inBufferLength);
+ return false;
+ }
+ /* Copy the buffer passed by Android plugin to codec input buffer */
+ memcpy(mInputBuffer, inBuffer, inBufferLength);
+
+ /* Set number of bytes to be processed */
+ IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_INPUT_BYTES,
+ 0,
+ &inBufferLength);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
+
+ if (mIsCodecConfigFlushRequired) {
+ /* If codec is already initialized, then GA header is passed again */
+ /* Need to call the Flush API instead of INIT_PROCESS */
+ mIsCodecInitialized = false; /* Codec needs to be Reinitialized after flush */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_GA_HDR,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_GA_HDR");
+ } else {
+ /* Initialize the process */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_PROCESS,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_PROCESS");
+ }
+
+ uint32_t ui_init_done;
+ /* Checking for end of initialization */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_DONE_QUERY,
+ &ui_init_done);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_DONE_QUERY");
+
+ /* How much buffer is used in input buffers */
+ int32_t i_bytes_consumed;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CURIDX_INPUT_BUF,
+ 0,
+ &i_bytes_consumed);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF");
+
+ if (ui_init_done) {
+ err_code = getXAACStreamInfo();
+ RETURN_IF_FATAL(err_code, "getXAACStreamInfo");
+ ALOGI("Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz %d\nchannelMask %d\noutputFrameLength %d",
+ mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength);
+ mIsCodecInitialized = true;
+
+ err_code = configMPEGDDrc();
+ RETURN_IF_FATAL(err_code, "configMPEGDDrc");
+ }
+
+ return IA_NO_ERROR;
+}
+IA_ERRORCODE C2SoftXaacDec::initMPEGDDDrc() {
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+
+ for (int i = 0; i < (WORD32)2; i++) {
+ WORD32 ui_size, ui_alignment, ui_type;
+ pVOID pv_alloc_ptr;
+
+ /* Get memory size */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
+
+ /* Get memory alignment */
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i, &ui_alignment);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
+
+ /* Get memory type */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
+
+ pv_alloc_ptr = memalign(4, ui_size);
+ if (pv_alloc_ptr == nullptr) {
+ ALOGE(" Cannot create requested memory %d", ui_size);
+ return IA_FATAL_ERROR;
+ }
+ mDrcMemoryVec.push(pv_alloc_ptr);
+
+ /* Set the buffer pointer */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+ }
+
+ WORD32 ui_size;
+ ui_size = 8192 * 2;
+
+ mDrcInBuf = (int8_t*)memalign(4, ui_size);
+ if (mDrcInBuf == nullptr) {
+ ALOGE(" Cannot create requested memory %d", ui_size);
+ return IA_FATAL_ERROR;
+ }
+ mDrcMemoryVec.push(mDrcInBuf);
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 2, mDrcInBuf);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+
+ mDrcOutBuf = (int8_t*)memalign(4, ui_size);
+ if (mDrcOutBuf == nullptr) {
+ ALOGE(" Cannot create requested memory %d", ui_size);
+ return IA_FATAL_ERROR;
+ }
+ mDrcMemoryVec.push(mDrcOutBuf);
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 3, mDrcOutBuf);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+
+ return IA_NO_ERROR;
+}
+int C2SoftXaacDec::configMPEGDDrc() {
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+ int i_effect_type;
+ int i_loud_norm;
+ int i_target_loudness;
+ unsigned int i_sbr_mode;
+
+ /* Sampling Frequency */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ, &mSampFreq);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ");
+ /* Total Number of Channels */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &mNumChannels);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS");
+
+ /* PCM word size */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ, &mPcmWdSz);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ");
+
+ /*Set Effect Type*/
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, &i_effect_type);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE");
+
+ /*Set target loudness */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS,
+ &i_target_loudness);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, &i_target_loudness);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS");
+
+ /*Set loud_norm_flag*/
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, &i_loud_norm);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_LOUD_NORM");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE, &i_sbr_mode);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS, nullptr);
+
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
+
+ /* Free any memory that is allocated for MPEG D Drc so far */
+ deInitMPEGDDDrc();
+
+ err_code = initMPEGDDDrc();
+ if (err_code != IA_NO_ERROR) {
+ ALOGE("initMPEGDDDrc failed with error %d", err_code);
+ deInitMPEGDDDrc();
+ return err_code;
+ }
+
+ /* DRC buffers
+ buf[0] - contains extension element pay load loudness related
+ buf[1] - contains extension element pay load*/
+ {
+ VOID* p_array[2][16];
+ WORD32 ii;
+ WORD32 buf_sizes[2][16];
+ WORD32 num_elements;
+ WORD32 num_config_ext;
+ WORD32 bit_str_fmt = 1;
+
+ WORD32 uo_num_chan;
+
+ memset(buf_sizes, 0, 32 * sizeof(WORD32));
+
+ err_code =
+ ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES, &buf_sizes[0][0]);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR, &p_array);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR");
+
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, IA_CMD_TYPE_INIT_SET_BUFF_PTR, nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_SET_BUFF_PTR");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE, &num_elements);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT, &num_config_ext);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT");
+
+ for (ii = 0; ii < num_config_ext; ii++) {
+ /*copy loudness bitstream*/
+ if (buf_sizes[0][ii] > 0) {
+ memcpy(mDrcInBuf, p_array[0][ii], buf_sizes[0][ii]);
+
+ /*Set bitstream_split_format */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
+
+ /* Set number of bytes to be processed */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_IL_BS, 0,
+ &buf_sizes[0][ii]);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES_IL_BS");
+
+ /* Execute process */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF, nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF");
+
+ mDRCFlag = 1;
+ }
+ }
+
+ for (ii = 0; ii < num_elements; ii++) {
+ /*copy config bitstream*/
+ if (buf_sizes[1][ii] > 0) {
+ memcpy(mDrcInBuf, p_array[1][ii], buf_sizes[1][ii]);
+ /* Set number of bytes to be processed */
+
+ /*Set bitstream_split_format */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_IC_BS, 0,
+ &buf_sizes[1][ii]);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES_IC_BS");
+
+ /* Execute process */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF, nullptr);
+
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF");
+
+ mDRCFlag = 1;
+ }
+ }
+
+ if (mDRCFlag == 1) {
+ mMpegDDRCPresent = 1;
+ } else {
+ mMpegDDRCPresent = 0;
+ }
+
+ /*Read interface buffer config file bitstream*/
+ if (mMpegDDRCPresent == 1) {
+ WORD32 interface_is_present = 1;
+
+ if (i_sbr_mode != 0) {
+ if (i_sbr_mode == 1) {
+ mOutputFrameLength = 2048;
+ } else if (i_sbr_mode == 3) {
+ mOutputFrameLength = 4096;
+ } else {
+ mOutputFrameLength = 1024;
+ }
+ } else {
+ mOutputFrameLength = 4096;
+ }
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE, (WORD32 *)&mOutputFrameLength);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE");
+
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT, &interface_is_present);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT");
+
+ /* Execute process */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF, nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_PROCESS, nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_PROCESS");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &uo_num_chan);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS");
+ }
+ }
+
+ return err_code;
+}
+
+IA_ERRORCODE C2SoftXaacDec::decodeXAACStream(uint8_t* inBuffer,
+ uint32_t inBufferLength,
+ int32_t* bytesConsumed,
+ int32_t* outBytes) {
+ if (mInputBufferSize < inBufferLength) {
+ ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize, inBufferLength);
+ return -1;
+ }
+ /* Copy the buffer passed by Android plugin to codec input buffer */
+ memcpy(mInputBuffer, inBuffer, inBufferLength);
+
+ /* Set number of bytes to be processed */
+ IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_SET_INPUT_BYTES,
+ 0,
+ &inBufferLength);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
+
+ /* Execute process */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_EXECUTE,
+ IA_CMD_TYPE_DO_EXECUTE,
+ nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DO_EXECUTE");
+
+ /* Checking for end of processing */
+ uint32_t ui_exec_done;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_EXECUTE,
+ IA_CMD_TYPE_DONE_QUERY,
+ &ui_exec_done);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DONE_QUERY");
+
+ if (ui_exec_done != 1) {
+ VOID* p_array; // ITTIAM:buffer to handle gain payload
+ WORD32 buf_size = 0; // ITTIAM:gain payload length
+ WORD32 bit_str_fmt = 1;
+ WORD32 gain_stream_flag = 1;
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN, &buf_size);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN");
+
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF, &p_array);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF");
+
+ if (buf_size > 0) {
+ /*Set bitstream_split_format */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
+
+ memcpy(mDrcInBuf, p_array, buf_size);
+ /* Set number of bytes to be processed */
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_BS, 0, &buf_size);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_GAIN_STREAM_FLAG, &gain_stream_flag);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
+
+ /* Execute process */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
+ IA_CMD_TYPE_INIT_CPY_BSF_BUFF, nullptr);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT");
+
+ mMpegDDRCPresent = 1;
+ }
+ }
+
+ /* How much buffer is used in input buffers */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CURIDX_INPUT_BUF,
+ 0,
+ bytesConsumed);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF");
+
+ /* Get the output bytes */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_OUTPUT_BYTES,
+ 0,
+ outBytes);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_OUTPUT_BYTES");
+
+ if (mMpegDDRCPresent == 1) {
+ memcpy(mDrcInBuf, mOutputBuffer, *outBytes);
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES, 0, outBytes);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES");
+
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_EXECUTE, IA_CMD_TYPE_DO_EXECUTE, nullptr);
+ RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DO_EXECUTE");
+
+ memcpy(mOutputBuffer, mDrcOutBuf, *outBytes);
+ }
+ return IA_NO_ERROR;
+}
+
+IA_ERRORCODE C2SoftXaacDec::getXAACStreamInfo() {
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+
+ /* Sampling frequency */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ,
+ &mSampFreq);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ");
+
+ /* Total Number of Channels */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS,
+ &mNumChannels);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS");
+ if (mNumChannels > MAX_CHANNEL_COUNT) {
+ ALOGE(" No of channels are more than max channels\n");
+ return IA_FATAL_ERROR;
+ }
+
+ /* PCM word size */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ,
+ &mPcmWdSz);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ");
+ if ((mPcmWdSz / 8) != 2) {
+ ALOGE(" No of channels are more than max channels\n");
+ return IA_FATAL_ERROR;
+ }
+
+ /* channel mask to tell the arrangement of channels in bit stream */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK,
+ &mChannelMask);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK");
+
+ /* Channel mode to tell MONO/STEREO/DUAL-MONO/NONE_OF_THESE */
+ uint32_t ui_channel_mode;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE,
+ &ui_channel_mode);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE");
+ if (ui_channel_mode == 0)
+ ALOGV("Channel Mode: MONO_OR_PS\n");
+ else if (ui_channel_mode == 1)
+ ALOGV("Channel Mode: STEREO\n");
+ else if (ui_channel_mode == 2)
+ ALOGV("Channel Mode: DUAL-MONO\n");
+ else
+ ALOGV("Channel Mode: NONE_OF_THESE or MULTICHANNEL\n");
+
+ /* Channel mode to tell SBR PRESENT/NOT_PRESENT */
+ uint32_t ui_sbr_mode;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle,
+ IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE,
+ &ui_sbr_mode);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE");
+ if (ui_sbr_mode == 0)
+ ALOGV("SBR Mode: NOT_PRESENT\n");
+ else if (ui_sbr_mode == 1)
+ ALOGV("SBR Mode: PRESENT\n");
+ else
+ ALOGV("SBR Mode: ILLEGAL\n");
+
+ /* mOutputFrameLength = 1024 * (1 + SBR_MODE) for AAC */
+ /* For USAC it could be 1024 * 3 , support to query */
+ /* not yet added in codec */
+ mOutputFrameLength = 1024 * (1 + ui_sbr_mode);
+ ALOGI("mOutputFrameLength %d ui_sbr_mode %d", mOutputFrameLength, ui_sbr_mode);
+
+ return IA_NO_ERROR;
+}
+
+IA_ERRORCODE C2SoftXaacDec::setXAACDRCInfo(int32_t drcCut, int32_t drcBoost,
+ int32_t drcRefLevel,
+ int32_t drcHeavyCompression,
+ int32_t drEffectType) {
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+
+ int32_t ui_drc_enable = 1;
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE,
+ &ui_drc_enable);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE");
+ if (drcCut != -1) {
+ err_code =
+ ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT, &drcCut);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT");
+ }
+
+ if (drcBoost != -1) {
+ err_code = ixheaacd_dec_api(
+ mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST, &drcBoost);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST");
+ }
+
+ if (drcRefLevel != -1) {
+ err_code = ixheaacd_dec_api(
+ mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL, &drcRefLevel);
+ RETURN_IF_FATAL(err_code,
+ "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL");
+ }
+
+ if (drcRefLevel != -1) {
+ err_code = ixheaacd_dec_api(
+ mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS, &drcRefLevel);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS");
+ }
+
+ if (drcHeavyCompression != -1) {
+ err_code =
+ ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP,
+ &drcHeavyCompression);
+ RETURN_IF_FATAL(err_code,
+ "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP");
+ }
+
+ err_code =
+ ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE, &drEffectType);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE");
+
+ int32_t i_effect_type, i_target_loudness, i_loud_norm;
+ /*Set Effect Type*/
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE,
+ &i_effect_type);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE");
+
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type);
+
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE");
+
+ /*Set target loudness */
+ err_code = ixheaacd_dec_api(
+ mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS, &i_target_loudness);
+ RETURN_IF_FATAL(err_code,
+ "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS,
+ &i_target_loudness);
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS");
+
+ /*Set loud_norm_flag*/
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
+ IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM,
+ &i_loud_norm);
+ RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM");
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM,
+ IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm);
+
+ RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_LOUD_NORM");
+
+ return IA_NO_ERROR;
+}
+
+class C2SoftXaacDecFactory : public C2ComponentFactory {
+public:
+ C2SoftXaacDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftXaacDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftXaacDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftXaacDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftXaacDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftXaacDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftXaacDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.h b/media/codec2/components/xaac/C2SoftXaacDec.h
new file mode 100644
index 0000000..5c8567f
--- /dev/null
+++ b/media/codec2/components/xaac/C2SoftXaacDec.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_XAAC_DEC_H_
+#define ANDROID_C2_SOFT_XAAC_DEC_H_
+#include <utils/Vector.h>
+#include <SimpleC2Component.h>
+
+#include "ixheaacd_type_def.h"
+#include "ixheaacd_error_standards.h"
+#include "ixheaacd_error_handler.h"
+#include "ixheaacd_apicmd_standards.h"
+#include "ixheaacd_memory_standards.h"
+#include "ixheaacd_aac_config.h"
+
+#include "impd_apicmd_standards.h"
+#include "impd_drc_config_params.h"
+
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
+#define MAX_NUM_BLOCKS 8 /* maximum number of audio blocks that can be decoded */
+
+extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj,
+ WORD32 i_cmd, WORD32 i_idx, pVOID pv_value);
+extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj,
+ WORD32 i_cmd, WORD32 i_idx, pVOID pv_value);
+extern "C" IA_ERRORCODE ixheaacd_get_config_param(pVOID p_ia_process_api_obj,
+ pWORD32 pi_samp_freq,
+ pWORD32 pi_num_chan,
+ pWORD32 pi_pcm_wd_sz,
+ pWORD32 pi_channel_mask);
+
+namespace android {
+
+struct C2SoftXaacDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftXaacDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftXaacDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ enum {
+ kOutputDrainBufferSize = 2048 * MAX_CHANNEL_COUNT * MAX_NUM_BLOCKS,
+ };
+
+ std::shared_ptr<IntfImpl> mIntf;
+ void* mXheaacCodecHandle;
+ void* mMpegDDrcHandle;
+ uint32_t mInputBufferSize;
+ uint32_t mOutputFrameLength;
+ int8_t* mInputBuffer;
+ int8_t* mOutputBuffer;
+ int32_t mSampFreq;
+ int32_t mNumChannels;
+ int32_t mPcmWdSz;
+ int32_t mChannelMask;
+ int32_t mNumOutBytes;
+ uint64_t mCurFrameIndex;
+ uint64_t mCurTimestamp;
+ bool mIsCodecInitialized;
+ bool mIsCodecConfigFlushRequired;
+ int8_t* mDrcInBuf;
+ int8_t* mDrcOutBuf;
+ int32_t mMpegDDRCPresent;
+ int32_t mDRCFlag;
+
+ Vector<void*> mMemoryVec;
+ Vector<void*> mDrcMemoryVec;
+
+ size_t mInputBufferCount __unused;
+ size_t mOutputBufferCount __unused;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+ char* mOutputDrainBuffer;
+ uint32_t mOutputDrainBufferWritePos;
+
+ IA_ERRORCODE initDecoder();
+ IA_ERRORCODE setDrcParameter();
+ IA_ERRORCODE configflushDecode();
+ IA_ERRORCODE drainDecoder();
+ void finishWork(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool);
+
+ IA_ERRORCODE initXAACDrc();
+ IA_ERRORCODE initXAACDecoder();
+ IA_ERRORCODE deInitXAACDecoder();
+ IA_ERRORCODE initMPEGDDDrc();
+ IA_ERRORCODE deInitMPEGDDDrc();
+ IA_ERRORCODE configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength);
+ int configMPEGDDrc();
+ IA_ERRORCODE decodeXAACStream(uint8_t* inBuffer,
+ uint32_t inBufferLength,
+ int32_t* bytesConsumed,
+ int32_t* outBytes);
+ IA_ERRORCODE getXAACStreamInfo();
+ IA_ERRORCODE setXAACDRCInfo(int32_t drcCut, int32_t drcBoost,
+ int32_t drcRefLevel, int32_t drcHeavyCompression,
+ int32_t drEffectType);
+
+ C2_DO_NOT_COPY(C2SoftXaacDec);
+};
+
+} // namespace android
+
+#endif // C2_SOFT_XAAC_H_