Merge "Modified that logs do not include URLs."
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index 2a045e1..369bd78 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -9,8 +9,17 @@
static_libs: ["libhevcdec"],
- include_dirs: [
- "external/libhevc/decoder",
- "external/libhevc/common",
+}
+
+cc_library_shared {
+ name: "libcodec2_soft_hevcenc",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_signed-defaults",
],
+
+ srcs: ["C2SoftHevcEnc.cpp"],
+
+ static_libs: ["libhevcenc"],
+
}
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
new file mode 100644
index 0000000..5e6f44f
--- /dev/null
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -0,0 +1,802 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftHevcEnc"
+#include <log/log.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "ihevc_typedefs.h"
+#include "itt_video_api.h"
+#include "ihevce_api.h"
+#include "ihevce_plugin.h"
+#include "C2SoftHevcEnc.h"
+
+namespace android {
+
+class C2SoftHevcEnc::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(
+ new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_VIDEO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_VIDEO_HEVC))
+ .build());
+
+ addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
+ .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(320, 1920, 2),
+ C2F(mSize, height).inRange(128, 1088, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(
+ Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_HEVC_MAIN, LEVEL_HEVC_MAIN_1))
+ .withFields({
+ C2F(mProfileLevel, profile)
+ .oneOf({C2Config::PROFILE_HEVC_MAIN,
+ C2Config::PROFILE_HEVC_MAIN_STILL}),
+ C2F(mProfileLevel, level)
+ .oneOf({LEVEL_HEVC_MAIN_1, LEVEL_HEVC_MAIN_2,
+ LEVEL_HEVC_MAIN_2_1, LEVEL_HEVC_MAIN_3,
+ LEVEL_HEVC_MAIN_3_1, LEVEL_HEVC_MAIN_4,
+ LEVEL_HEVC_MAIN_4_1, LEVEL_HEVC_MAIN_5,
+ LEVEL_HEVC_MAIN_5_1, LEVEL_HEVC_MAIN_5_2}),
+ })
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+ .build());
+
+ addParameter(
+ DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(
+ new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(
+ Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+ }
+
+ static C2R BitrateSetter(bool mayBlock,
+ C2P<C2StreamBitrateInfo::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value <= 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock,
+ const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+ C2P<C2StreamPictureSizeInfo::input>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(
+ bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output> &me,
+ const C2P<C2VideoSizeStreamTuning::input> &size,
+ const C2P<C2StreamFrameRateInfo::output> &frameRate,
+ const C2P<C2BitrateTuning::output> &bitrate) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_HEVC_MAIN;
+ }
+
+ struct LevelLimits {
+ C2Config::level_t level;
+ uint64_t samplesPerSec;
+ uint64_t samples;
+ uint32_t bitrate;
+ };
+
+ constexpr LevelLimits kLimits[] = {
+ { LEVEL_HEVC_MAIN_1, 552960, 36864, 128000 },
+ { LEVEL_HEVC_MAIN_2, 3686400, 122880, 1500000 },
+ { LEVEL_HEVC_MAIN_2_1, 7372800, 245760, 3000000 },
+ { LEVEL_HEVC_MAIN_3, 16588800, 552960, 6000000 },
+ { LEVEL_HEVC_MAIN_3_1, 33177600, 983040, 10000000 },
+ { LEVEL_HEVC_MAIN_4, 66846720, 2228224, 12000000 },
+ { LEVEL_HEVC_MAIN_4_1, 133693440, 2228224, 20000000 },
+ { LEVEL_HEVC_MAIN_5, 267386880, 8912896, 25000000 },
+ { LEVEL_HEVC_MAIN_5_1, 534773760, 8912896, 40000000 },
+ { LEVEL_HEVC_MAIN_5_2, 1069547520, 8912896, 60000000 },
+ { LEVEL_HEVC_MAIN_6, 1069547520, 35651584, 60000000 },
+ { LEVEL_HEVC_MAIN_6_1, 2139095040, 35651584, 120000000 },
+ { LEVEL_HEVC_MAIN_6_2, 4278190080, 35651584, 240000000 },
+ };
+
+ uint64_t samples = size.v.width * size.v.height;
+ uint64_t samplesPerSec = samples * frameRate.v.value;
+
+ // Check if the supplied level meets the MB / bitrate requirements. If
+ // not, update the level with the lowest level meeting the requirements.
+
+ bool found = false;
+ // By default needsUpdate = false in case the supplied level does meet
+ // the requirements.
+ bool needsUpdate = false;
+ for (const LevelLimits &limit : kLimits) {
+ if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
+ bitrate.v.value <= limit.bitrate) {
+ // This is the lowest level that meets the requirements, and if
+ // we haven't seen the supplied level yet, that means we don't
+ // need the update.
+ if (needsUpdate) {
+ ALOGD("Given level %x does not cover current configuration: "
+ "adjusting to %x", me.v.level, limit.level);
+ me.set().level = limit.level;
+ }
+ found = true;
+ break;
+ }
+ if (me.v.level == limit.level) {
+ // We break out of the loop when the lowest feasible level is
+ // found. The fact that we're here means that our level doesn't
+ // meet the requirement and needs to be updated.
+ needsUpdate = true;
+ }
+ }
+ if (!found) {
+ // We set to the highest supported level.
+ me.set().level = LEVEL_HEVC_MAIN_5_2;
+ }
+ return C2R::Ok();
+ }
+
+ UWORD32 getProfile_l() const {
+ switch (mProfileLevel->profile) {
+ case PROFILE_HEVC_MAIN: [[fallthrough]];
+ case PROFILE_HEVC_MAIN_STILL: return 1;
+ default:
+ ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+ return 1;
+ }
+ }
+
+ UWORD32 getLevel_l() const {
+ struct Level {
+ C2Config::level_t c2Level;
+ UWORD32 hevcLevel;
+ };
+ constexpr Level levels[] = {
+ { LEVEL_HEVC_MAIN_1, 30 },
+ { LEVEL_HEVC_MAIN_2, 60 },
+ { LEVEL_HEVC_MAIN_2_1, 63 },
+ { LEVEL_HEVC_MAIN_3, 90 },
+ { LEVEL_HEVC_MAIN_3_1, 93 },
+ { LEVEL_HEVC_MAIN_4, 120 },
+ { LEVEL_HEVC_MAIN_4_1, 123 },
+ { LEVEL_HEVC_MAIN_5, 150 },
+ { LEVEL_HEVC_MAIN_5_1, 153 },
+ { LEVEL_HEVC_MAIN_5_2, 156 },
+ { LEVEL_HEVC_MAIN_6, 180 },
+ { LEVEL_HEVC_MAIN_6_1, 183 },
+ { LEVEL_HEVC_MAIN_6_2, 186 },
+ };
+ for (const Level &level : levels) {
+ if (mProfileLevel->level == level.c2Level) {
+ return level.hevcLevel;
+ }
+ }
+ ALOGD("Unrecognized level: %x", mProfileLevel->level);
+ return 156;
+ }
+ uint32_t getSyncFramePeriod_l() const {
+ if (mSyncFramePeriod->value < 0 ||
+ mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ }
+
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const {
+ return mSize;
+ }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const {
+ return mFrameRate;
+ }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const {
+ return mBitrate;
+ }
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+ return mRequestSync;
+ }
+
+ private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+};
+constexpr char COMPONENT_NAME[] = "c2.android.hevc.encoder";
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+C2SoftHevcEnc::C2SoftHevcEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mHevcEncProfile(1),
+ mHevcEncLevel(30),
+ mStarted(false),
+ mSpsPpsHeaderReceived(false),
+ mSignalledEos(false),
+ mSignalledError(false),
+ mCodecCtx(nullptr) {
+ // If dump is enabled, then create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+C2SoftHevcEnc::~C2SoftHevcEnc() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftHevcEnc::onInit() {
+ return initEncoder();
+}
+
+c2_status_t C2SoftHevcEnc::onStop() {
+ if (!mStarted) {
+ return C2_OK;
+ }
+ return releaseEncoder();
+}
+
+void C2SoftHevcEnc::onReset() {
+ onStop();
+ initEncoder();
+}
+
+void C2SoftHevcEnc::onRelease() {
+ onStop();
+}
+
+c2_status_t C2SoftHevcEnc::onFlush_sm() {
+ return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("Signalling EOS");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+c2_status_t C2SoftHevcEnc::initEncParams() {
+ mCodecCtx = nullptr;
+ mNumCores = MIN(GetCPUCoreCount(), CODEC_MAX_CORES);
+ memset(&mEncParams, 0, sizeof(ihevce_static_cfg_params_t));
+
+ // default configuration
+ IHEVCE_PLUGIN_STATUS_T err = ihevce_set_def_params(&mEncParams);
+ if (IHEVCE_EOK != err) {
+ ALOGE("HEVC default init failed : 0x%x", err);
+ return C2_CORRUPTED;
+ }
+
+ // update configuration
+ mEncParams.s_src_prms.i4_width = mSize->width;
+ mEncParams.s_src_prms.i4_height = mSize->height;
+ mEncParams.s_src_prms.i4_frm_rate_denom = 1000;
+ mEncParams.s_src_prms.i4_frm_rate_num = mFrameRate->value * mEncParams.s_src_prms.i4_frm_rate_denom;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].i4_quality_preset = IHEVCE_QUALITY_P5;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_tgt_bitrate[0] =
+ mBitrate->value;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0] =
+ mBitrate->value << 1;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].i4_codec_level = mHevcEncLevel;
+ mEncParams.s_coding_tools_prms.i4_max_i_open_gop_period = mIDRInterval;
+ mEncParams.s_coding_tools_prms.i4_max_cra_open_gop_period = mIDRInterval;
+ mIvVideoColorFormat = IV_YUV_420P;
+ mEncParams.s_multi_thrd_prms.i4_max_num_cores = mNumCores;
+ mEncParams.s_out_strm_prms.i4_codec_profile = mHevcEncProfile;
+ mEncParams.s_config_prms.i4_rate_control_mode = 2;
+ mEncParams.s_lap_prms.i4_rc_look_ahead_pics = 0;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcEnc::releaseEncoder() {
+ mSpsPpsHeaderReceived = false;
+ mSignalledEos = false;
+ mSignalledError = false;
+ mStarted = false;
+
+ if (mCodecCtx) {
+ IHEVCE_PLUGIN_STATUS_T err = ihevce_close(mCodecCtx);
+ if (IHEVCE_EOK != err) return C2_CORRUPTED;
+ mCodecCtx = nullptr;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcEnc::drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ (void)drainMode;
+ (void)pool;
+ return C2_OK;
+}
+c2_status_t C2SoftHevcEnc::initEncoder() {
+ CHECK(!mCodecCtx);
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mHevcEncProfile = mIntf->getProfile_l();
+ mHevcEncLevel = mIntf->getLevel_l();
+ mIDRInterval = mIntf->getSyncFramePeriod_l();
+ }
+
+ c2_status_t status = initEncParams();
+
+ if (C2_OK != status) {
+ ALOGE("Failed to initialize encoder params : 0x%x", status);
+ mSignalledError = true;
+ return status;
+ }
+
+ IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
+ err = ihevce_init(&mEncParams, &mCodecCtx);
+ if (IHEVCE_EOK != err) {
+ ALOGE("HEVC encoder init failed : 0x%x", err);
+ return C2_CORRUPTED;
+ }
+
+ mStarted = true;
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcEnc::setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
+ const C2GraphicView* const input,
+ uint64_t timestamp) {
+ ihevce_static_cfg_params_t* params = &mEncParams;
+ memset(ps_encode_ip, 0, sizeof(ihevce_inp_buf_t));
+
+ if (!input) {
+ return C2_OK;
+ }
+
+ if (input->width() < mSize->width ||
+ input->height() < mSize->height) {
+ /* Expect width height to be configured */
+ ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(),
+ mSize->width, input->height(), mSize->height);
+ return C2_BAD_VALUE;
+ }
+
+ const C2PlanarLayout& layout = input->layout();
+ uint8_t* yPlane =
+ const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* uPlane =
+ const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t* vPlane =
+ const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+
+ // width and height are always even
+ // width and height are always even (as block size is 16x16)
+ CHECK_EQ((width & 1u), 0u);
+ CHECK_EQ((height & 1u), 0u);
+
+ size_t yPlaneSize = width * height;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ [[fallthrough]];
+ case C2PlanarLayout::TYPE_RGBA: {
+ MemoryBlock conversionBuffer =
+ mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(),
+ conversionBuffer);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, height,
+ conversionBuffer.size(), *input);
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*input)) {
+ ALOGE("input is not YUV420");
+ return C2_BAD_VALUE;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1 &&
+ layout.planes[layout.PLANE_U].colInc == 1 &&
+ layout.planes[layout.PLANE_V].colInc == 1 &&
+ uStride == vStride && yStride == 2 * vStride) {
+ // I420 compatible - already set up above
+ break;
+ }
+
+ // copy to I420
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ MemoryBlock conversionBuffer =
+ mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(),
+ conversionBuffer);
+ MediaImage2 img =
+ CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ return C2_BAD_VALUE;
+ }
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ break;
+ }
+
+ case C2PlanarLayout::TYPE_YUVA:
+ ALOGE("YUVA plane type is not supported");
+ return C2_BAD_VALUE;
+
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P: {
+ // input buffer is supposed to be const but Ittiam API wants bare
+ // pointer.
+ ps_encode_ip->apv_inp_planes[0] = yPlane;
+ ps_encode_ip->apv_inp_planes[1] = uPlane;
+ ps_encode_ip->apv_inp_planes[2] = vPlane;
+
+ ps_encode_ip->ai4_inp_strd[0] = yStride;
+ ps_encode_ip->ai4_inp_strd[1] = uStride;
+ ps_encode_ip->ai4_inp_strd[2] = vStride;
+
+ ps_encode_ip->ai4_inp_size[0] = yStride * height;
+ ps_encode_ip->ai4_inp_size[1] = uStride * height >> 1;
+ ps_encode_ip->ai4_inp_size[2] = vStride * height >> 1;
+ break;
+ }
+
+ case IV_YUV_422ILE: {
+ // TODO
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default: {
+ ps_encode_ip->apv_inp_planes[0] = yPlane;
+ ps_encode_ip->apv_inp_planes[1] = uPlane;
+ ps_encode_ip->apv_inp_planes[2] = nullptr;
+
+ ps_encode_ip->ai4_inp_strd[0] = yStride;
+ ps_encode_ip->ai4_inp_strd[1] = uStride;
+ ps_encode_ip->ai4_inp_strd[2] = 0;
+
+ ps_encode_ip->ai4_inp_size[0] = yStride * height;
+ ps_encode_ip->ai4_inp_size[1] = uStride * height >> 1;
+ ps_encode_ip->ai4_inp_size[2] = 0;
+ break;
+ }
+ }
+
+ ps_encode_ip->i4_curr_bitrate =
+ params->s_tgt_lyr_prms.as_tgt_params[0].ai4_tgt_bitrate[0];
+ ps_encode_ip->i4_curr_peak_bitrate =
+ params->s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0];
+ ps_encode_ip->i4_curr_rate_factor = params->s_config_prms.i4_rate_factor;
+ ps_encode_ip->u8_pts = timestamp;
+ return C2_OK;
+}
+
+void C2SoftHevcEnc::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledEos) {
+ work->result = C2_BAD_VALUE;
+ ALOGD("Signalled Error / Signalled Eos");
+ return;
+ }
+ c2_status_t status = C2_OK;
+
+ // Initialize encoder if not already initialized
+ if (!mStarted) {
+ status = initEncoder();
+ if (C2_OK != status) {
+ ALOGE("Failed to initialize encoder : 0x%x", status);
+ mSignalledError = true;
+ work->result = status;
+ return;
+ }
+ }
+
+ std::shared_ptr<const C2GraphicView> view;
+ std::shared_ptr<C2Buffer> inputBuffer = nullptr;
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ view = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (view->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view->error());
+ mSignalledError = true;
+ return;
+ }
+ }
+
+ IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
+
+ fillEmptyWork(work);
+ if (!mSpsPpsHeaderReceived) {
+ ihevce_out_buf_t s_header_op{};
+ err = ihevce_encode_header(mCodecCtx, &s_header_op);
+ if (err == IHEVCE_EOK && s_header_op.i4_bytes_generated) {
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(
+ s_header_op.i4_bytes_generated, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, s_header_op.pu1_output_buf,
+ s_header_op.i4_bytes_generated);
+ DUMP_TO_FILE(mOutFile, csd->m.value, csd->flexCount());
+ work->worklets.front()->output.configUpdate.push_back(
+ std::move(csd));
+ mSpsPpsHeaderReceived = true;
+ }
+ if (!inputBuffer) {
+ return;
+ }
+ }
+ ihevce_inp_buf_t s_encode_ip{};
+ ihevce_out_buf_t s_encode_op{};
+ uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+
+ status = setEncodeArgs(&s_encode_ip, view.get(), timestamp);
+ if (C2_OK != status) {
+ mSignalledError = true;
+ ALOGE("setEncodeArgs failed : 0x%x", status);
+ work->result = status;
+ return;
+ }
+
+ uint64_t timeDelay = 0;
+ uint64_t timeTaken = 0;
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ ihevce_inp_buf_t* ps_encode_ip = (inputBuffer) ? &s_encode_ip : nullptr;
+
+ err = ihevce_encode(mCodecCtx, ps_encode_ip, &s_encode_op);
+ if (IHEVCE_EOK != err) {
+ ALOGE("Encode Frame failed : 0x%x", err);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ GETTIME(&mTimeEnd, nullptr);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", (int)timeTaken,
+ (int)timeDelay, s_encode_op.i4_bytes_generated);
+
+ if (s_encode_op.i4_bytes_generated) {
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ status = pool->fetchLinearBlock(s_encode_op.i4_bytes_generated, usage, &block);
+ if (C2_OK != status) {
+ ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+ work->result = C2_NO_MEMORY;
+ mSignalledError = true;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (C2_OK != wView.error()) {
+ ALOGE("write view map failed with status 0x%x", wView.error());
+ work->result = wView.error();
+ mSignalledError = true;
+ return;
+ }
+ memcpy(wView.data(), s_encode_op.pu1_output_buf,
+ s_encode_op.i4_bytes_generated);
+
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, s_encode_op.i4_bytes_generated);
+
+ DUMP_TO_FILE(mOutFile, s_encode_op.pu1_output_buf,
+ s_encode_op.i4_bytes_generated);
+
+ work->worklets.front()->output.ordinal.timestamp = s_encode_op.u8_pts;
+ if (s_encode_op.i4_is_key_frame) {
+ ALOGV("IDR frame produced");
+ buffer->setInfo(
+ std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ }
+ if (eos) {
+ mSignalledEos = true;
+ }
+}
+
+class C2SoftHevcEncFactory : public C2ComponentFactory {
+ public:
+ C2SoftHevcEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftHevcEnc(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftHevcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftHevcEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftHevcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftHevcEncFactory() override = default;
+
+ private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftHevcEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
new file mode 100644
index 0000000..c22fea2
--- /dev/null
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_HEVC_ENC_H_
+#define ANDROID_C2_SOFT_HEVC_ENC_H_
+
+#include <map>
+#include <utils/Vector.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <SimpleC2Component.h>
+
+#include "ihevc_typedefs.h"
+
+namespace android {
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#define CODEC_MAX_CORES 4
+
+struct C2SoftHevcEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftHevcEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+ c2_status_t drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+
+ protected:
+ virtual ~C2SoftHevcEnc();
+
+ private:
+ std::shared_ptr<IntfImpl> mIntf;
+ ihevce_static_cfg_params_t mEncParams;
+ size_t mNumCores;
+ UWORD32 mIDRInterval;
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+ UWORD32 mHevcEncProfile;
+ UWORD32 mHevcEncLevel;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+ bool mSignalledEos;
+ bool mSignalledError;
+ void* mCodecCtx;
+ MemoryBlockPool mConversionBuffers;
+ std::map<void*, MemoryBlock> mConversionBuffersInUse;
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ // profile
+ struct timeval mTimeStart;
+ struct timeval mTimeEnd;
+
+ c2_status_t initEncParams();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+ c2_status_t setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
+ const C2GraphicView* const input,
+ uint64_t timestamp);
+ C2_DO_NOT_COPY(C2SoftHevcEnc);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/data/local/tmp/hevc"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/data/local/tmp/hevc"
+#define OUTPUT_DUMP_EXT "h265"
+#define GENERATE_FILE_NAMES() \
+{ \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ ALOGD("GENERATE_FILE_NAMES"); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
+ mTimeStart.tv_usec, INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) \
+{ \
+ FILE* fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE* fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ ALOGD("Dump to file!"); \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename); \
+ if (fp != NULL) fclose(fp); \
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // C2_SOFT_HEVC_ENC_H__
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index cdcc41b..df81d49 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -139,8 +139,8 @@
std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
durations.push_back(elapsed);
}
- nth_element(durations.begin(), durations.end(), durations.begin() + n,
- std::greater<Clock::duration>());
+ std::nth_element(durations.begin(), durations.end(), durations.begin() + n,
+ std::greater<Clock::duration>());
return durations[n];
}
diff --git a/media/codec2/sfplugin/PipelineWatcher.h b/media/codec2/sfplugin/PipelineWatcher.h
index 1c127e4..1e23147 100644
--- a/media/codec2/sfplugin/PipelineWatcher.h
+++ b/media/codec2/sfplugin/PipelineWatcher.h
@@ -26,7 +26,8 @@
namespace android {
/**
- * PipelineWatcher watches the status of the work.
+ * PipelineWatcher watches the pipeline and infers the status of work items from
+ * events.
*/
class PipelineWatcher {
public:
@@ -39,21 +40,81 @@
mSmoothnessFactor(0) {}
~PipelineWatcher() = default;
+ /**
+ * \param value the new input delay value
+ * \return this object
+ */
PipelineWatcher &inputDelay(uint32_t value);
+
+ /**
+ * \param value the new pipeline delay value
+ * \return this object
+ */
PipelineWatcher &pipelineDelay(uint32_t value);
+
+ /**
+ * \param value the new output delay value
+ * \return this object
+ */
PipelineWatcher &outputDelay(uint32_t value);
+
+ /**
+ * \param value the new smoothness factor value
+ * \return this object
+ */
PipelineWatcher &smoothnessFactor(uint32_t value);
+ /**
+ * Client queued a work item to the component.
+ *
+ * \param frameIndex input frame index of this work
+ * \param buffers input buffers of the queued work item
+ * \param queuedAt time when the client queued the buffer
+ */
void onWorkQueued(
uint64_t frameIndex,
std::vector<std::shared_ptr<C2Buffer>> &&buffers,
const Clock::time_point &queuedAt);
+
+ /**
+ * The component released input buffers from a work item.
+ *
+ * \param frameIndex input frame index
+ * \param arrayIndex index of the buffer at the original |buffers| in
+ * onWorkQueued().
+ * \return buffers[arrayIndex]
+ */
std::shared_ptr<C2Buffer> onInputBufferReleased(
uint64_t frameIndex, size_t arrayIndex);
+
+ /**
+ * The component finished processing a work item.
+ *
+ * \param frameIndex input frame index
+ */
void onWorkDone(uint64_t frameIndex);
+
+ /**
+ * Flush the pipeline.
+ */
void flush();
+ /**
+ * \return true if pipeline does not need more work items to proceed
+ * smoothly, considering delays and smoothness factor;
+ * false otherwise.
+ */
bool pipelineFull() const;
+
+ /**
+ * Return elapsed processing time of a work item, nth from the longest
+ * processing time to the shortest.
+ *
+ * \param now current timestamp
+ * \param n nth work item, from the longest processing time to the
+ * shortest. It's a 0-based index.
+ * \return elapsed processing time of nth work item.
+ */
Clock::duration elapsed(const Clock::time_point &now, size_t n) const;
private:
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index f07d9b0..32588a5 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -856,6 +856,7 @@
emplace("libcodec2_soft_h263dec.so");
emplace("libcodec2_soft_h263enc.so");
emplace("libcodec2_soft_hevcdec.so");
+ emplace("libcodec2_soft_hevcenc.so");
emplace("libcodec2_soft_mp3dec.so");
emplace("libcodec2_soft_mpeg2dec.so");
emplace("libcodec2_soft_mpeg4dec.so");
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index d9f6e36..1bce16f 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -1167,8 +1167,6 @@
case SET_STREAM_VOLUME:
case REGISTER_POLICY_MIXES:
case SET_MASTER_MONO:
- case START_AUDIO_SOURCE:
- case STOP_AUDIO_SOURCE:
case GET_SURROUND_FORMATS:
case SET_SURROUND_FORMAT_ENABLED:
case SET_ASSISTANT_UID:
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 3119950..0ad4d04 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -151,13 +151,13 @@
virtual media::VolumeShaper::Status applyVolumeShaper(
const sp<media::VolumeShaper::Configuration>& configuration,
- const sp<media::VolumeShaper::Operation>& operation);
- virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+ const sp<media::VolumeShaper::Operation>& operation) = 0;
+ virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
// AudioRouting
- virtual status_t setOutputDevice(audio_port_handle_t deviceId);
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
- virtual status_t enableAudioDeviceCallback(bool enabled);
+ virtual status_t setOutputDevice(audio_port_handle_t deviceId) = 0;
+ virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+ virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
};
MediaPlayerBase() {}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 998f096..9d3338b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -171,11 +171,7 @@
}
struct CodecObserver : public BnOMXObserver {
- CodecObserver() {}
-
- void setNotificationMessage(const sp<AMessage> &msg) {
- mNotify = msg;
- }
+ explicit CodecObserver(const sp<AMessage> &msg) : mNotify(msg) {}
// from IOMXObserver
virtual void onMessages(const std::list<omx_message> &messages) {
@@ -251,7 +247,7 @@
virtual ~CodecObserver() {}
private:
- sp<AMessage> mNotify;
+ const sp<AMessage> mNotify;
DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
};
@@ -1248,6 +1244,7 @@
info.mRenderInfo = NULL;
info.mGraphicBuffer = graphicBuffer;
info.mNewGraphicBuffer = false;
+ info.mDequeuedAt = mDequeueCounter;
// TODO: We shouln't need to create MediaCodecBuffer. In metadata mode
// OMX doesn't use the shared memory buffer, but some code still
@@ -6629,7 +6626,8 @@
CHECK(mCodec->mOMXNode == NULL);
- sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);
+ sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
+ notify->setInt32("generation", mCodec->mNodeGeneration + 1);
sp<RefBase> obj;
CHECK(msg->findObject("codecInfo", &obj));
@@ -6644,7 +6642,7 @@
AString componentName;
CHECK(msg->findString("componentName", &componentName));
- sp<CodecObserver> observer = new CodecObserver;
+ sp<CodecObserver> observer = new CodecObserver(notify);
sp<IOMX> omx;
sp<IOMXNode> omxNode;
@@ -6675,9 +6673,7 @@
mDeathNotifier.clear();
}
- notify = new AMessage(kWhatOMXMessageList, mCodec);
- notify->setInt32("generation", ++mCodec->mNodeGeneration);
- observer->setNotificationMessage(notify);
+ ++mCodec->mNodeGeneration;
mCodec->mComponentName = componentName;
mCodec->mRenderTracker.setComponentName(componentName);
@@ -8167,6 +8163,10 @@
OMX_CommandPortEnable, kPortIndexOutput);
}
+ // Clear the RenderQueue in which queued GraphicBuffers hold the
+ // actual buffer references in order to free them early.
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
if (err == OK) {
err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
@@ -8572,7 +8572,7 @@
}
sp<IOMX> omx = client.interface();
- sp<CodecObserver> observer = new CodecObserver;
+ sp<CodecObserver> observer = new CodecObserver(new AMessage);
sp<IOMXNode> omxNode;
err = omx->allocateNode(name, observer, &omxNode);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 42b98b1..18a6bd8 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -431,7 +431,7 @@
|| !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
if (frameTimeUs < 0) {
- int64_t thumbNailTime;
+ int64_t thumbNailTime = -1ll;
if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
|| thumbNailTime < 0) {
thumbNailTime = 0;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index f34d54c..fa3d372 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -165,6 +165,9 @@
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ if (!meta) {
+ continue;
+ }
ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
const char *mime;
@@ -186,6 +189,9 @@
}
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
+ if (!trackMeta) {
+ return NULL;
+ }
if (metaOnly) {
return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail);
@@ -280,6 +286,9 @@
size_t i;
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ if (!meta) {
+ continue;
+ }
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -296,6 +305,9 @@
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
+ if (!trackMeta) {
+ return UNKNOWN_ERROR;
+ }
if (metaOnly) {
if (outFrame != NULL) {
@@ -529,6 +541,9 @@
String8 timedTextLang;
for (size_t i = 0; i < numTracks; ++i) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
+ if (!trackMeta) {
+ continue;
+ }
int64_t durationUs;
if (trackMeta->findInt64(kKeyDuration, &durationUs)) {
@@ -667,8 +682,9 @@
!strcasecmp(fileMIME, "video/x-matroska")) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
const char *trackMIME;
- CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
-
+ if (trackMeta != nullptr) {
+ CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
+ }
if (!strncasecmp("audio/", trackMIME, 6)) {
// The matroska file only contains a single audio track,
// rewrite its mime type.
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 09424b8..16b3319 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -588,6 +588,7 @@
{ "genre", kKeyGenre },
{ "location", kKeyLocation },
{ "lyricist", kKeyWriter },
+ { "manufacturer", kKeyManufacturer },
{ "title", kKeyTitle },
{ "year", kKeyYear },
}
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 5c2d96d..e20174f 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -107,6 +107,15 @@
<Limit name="bitrate" range="1-12000000" />
<Feature name="intra-refresh" />
</MediaCodec>
+ <MediaCodec name="c2.android.hevc.encoder" type="video/hevc">
+ <!-- profiles and levels: ProfileMain : MainTierLevel51 -->
+ <Limit name="size" min="320x128" max="512x512" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="8x8" />
+ <Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
+ <Limit name="blocks-per-second" range="1-122880" />
+ <Limit name="bitrate" range="1-10000000" />
+ </MediaCodec>
<MediaCodec name="c2.android.mpeg4.encoder" type="video/mp4v-es">
<Alias name="OMX.google.mpeg4.encoder" />
<!-- profiles and levels: ProfileCore : Level2 -->
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 437bdb7..a0407af 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -144,6 +144,9 @@
// The language code for this media
kKeyMediaLanguage = 'lang', // cstring
+ // The manufacturer code for this media
+ kKeyManufacturer = 'manu', // cstring
+
// To store the timed text format data
kKeyTextFormatData = 'text', // raw data
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 74754ea..339f622 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -106,6 +106,10 @@
symbol_file: "libmediandk.map.txt",
versions: ["29"],
},
+
+ // Bug: http://b/124522995 libmediandk has linker errors when built with
+ // coverage
+ native_coverage: false,
}
llndk_library {
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index cd8ecb5..26a6238 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -342,6 +342,7 @@
EXPORT const char* AMEDIAFORMAT_KEY_LOCATION = "location";
EXPORT const char* AMEDIAFORMAT_KEY_LOOP = "loop";
EXPORT const char* AMEDIAFORMAT_KEY_LYRICIST = "lyricist";
+EXPORT const char* AMEDIAFORMAT_KEY_MANUFACTURER = "manufacturer";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE = "max-bitrate";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index cc1d9ef..ddf5291 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -214,6 +214,7 @@
extern const char* AMEDIAFORMAT_KEY_LOCATION __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_LOOP __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_LYRICIST __INTRODUCED_IN(29);
+extern const char* AMEDIAFORMAT_KEY_MANUFACTURER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN __INTRODUCED_IN(29);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 171167d..7bdd3ad 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -105,6 +105,7 @@
AMEDIAFORMAT_KEY_LOCATION; # var introduced=29
AMEDIAFORMAT_KEY_LOOP; # var introduced=29
AMEDIAFORMAT_KEY_LYRICIST; # var introduced=29
+ AMEDIAFORMAT_KEY_MANUFACTURER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_BIT_RATE; # var introduced=29
AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var introduced=21
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 1c54aec..599c446 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -85,7 +85,7 @@
return false;
}
} else {
- if (appOps.noteOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
+ if (appOps.checkOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
ALOGE("Request denied by app op: %d", op);
return false;
}
diff --git a/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml b/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
index 3c48e88..e6e6bdb 100644
--- a/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
@@ -2,7 +2,7 @@
<!-- Hearing aid Audio HAL Audio Policy Configuration file -->
<module name="hearing_aid" halVersion="2.0">
<mixPorts>
- <mixPort name="hearing aid output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <mixPort name="hearing aid output" role="source">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="24000,16000"
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 32cc380..b1aa92d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -4048,14 +4048,18 @@
std::vector<const char*> fileNames;
status_t ret;
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
- } else {
+ if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
+ if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ // This property persist.bluetooth.bluetooth_audio_hal.enabled is temporary only.
+ // xml files AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME, although having
+ // the same name, must be different in offload and non offload cases in device
+ // specific configuration file.
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
}
+ } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
}
fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index a39477d..76ac191 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -383,6 +383,8 @@
// OR The client is an accessibility service
// AND is on TOP OR latest started
// AND the source is VOICE_RECOGNITION or HOTWORD
+// OR the source is one of: AUDIO_SOURCE_VOICE_DOWNLINK, AUDIO_SOURCE_VOICE_UPLINK,
+// AUDIO_SOURCE_VOICE_CALL
// OR Any other client
// AND The assistant is not on TOP
// AND is on TOP OR latest started
@@ -463,6 +465,10 @@
(source == AUDIO_SOURCE_VOICE_RECOGNITION || source == AUDIO_SOURCE_HOTWORD)) {
forceIdle = false;
}
+ } else if (source == AUDIO_SOURCE_VOICE_DOWNLINK ||
+ source == AUDIO_SOURCE_VOICE_CALL ||
+ (source == AUDIO_SOURCE_VOICE_UPLINK)) {
+ forceIdle = false;
} else {
if (!isAssistantOnTop && (isOnTop || isLatest) &&
(!isSensitiveActive || isLatestSensitive)) {
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 2ca8356..7ec0e4c 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -94,6 +94,7 @@
"libsensorprivacy",
"libstagefright",
"libstagefright_foundation",
+ "libyuv",
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
"android.frameworks.cameraservice.device@2.0",
@@ -137,6 +138,7 @@
name: "libdepthphoto",
srcs: [
+ "utils/ExifUtils.cpp",
"common/DepthPhotoProcessor.cpp",
],
@@ -150,6 +152,8 @@
"libcutils",
"libjpeg",
"libmemunreachable",
+ "libexif",
+ "libcamera_client",
],
include_dirs: [
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 2eec0f7..9525ad2 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -339,6 +339,21 @@
} else {
depthPhoto.mIsLensDistortionValid = 0;
}
+ entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
+ if (entry.count > 0) {
+ // The camera jpeg orientation values must be within [0, 90, 180, 270].
+ switch (entry.data.i32[0]) {
+ case 0:
+ case 90:
+ case 180:
+ case 270:
+ depthPhoto.mOrientation = static_cast<DepthPhotoOrientation> (entry.data.i32[0]);
+ break;
+ default:
+ ALOGE("%s: Unexpected jpeg orientation value: %d, default to 0 degrees",
+ __FUNCTION__, entry.data.i32[0]);
+ }
+ }
size_t actualJpegSize = 0;
res = mDepthPhotoProcess(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a61cdee..9fd0e8b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -23,6 +23,7 @@
#include <sys/syscall.h>
#include <android/hardware/camera/device/3.5/types.h>
+#include <libyuv.h>
#include <gui/Surface.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -192,6 +193,7 @@
return res;
}
+ initCopyRowFunction(width);
return res;
}
@@ -1373,7 +1375,7 @@
for (auto row = top; row < top+height; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
- memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
+ mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
}
// U is Cb, V is Cr
@@ -1406,24 +1408,25 @@
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
- memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
+ mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
}
} else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
// U plane
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
- memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
+ mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
}
// V plane
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
- memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
+ mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
}
} else {
- // Convert between semiplannar and plannar
+ // Convert between semiplannar and plannar, or when UV orders are
+ // different.
uint8_t *dst = codecBuffer->data();
for (auto row = top/2; row < (top+height)/2; row++) {
for (auto col = left/2; col < (left+width)/2; col++) {
@@ -1446,6 +1449,38 @@
return OK;
}
+void HeicCompositeStream::initCopyRowFunction(int32_t width)
+{
+ using namespace libyuv;
+
+ mFnCopyRow = CopyRow_C;
+#if defined(HAS_COPYROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
+ }
+#endif
+#if defined(HAS_COPYROW_AVX)
+ if (TestCpuFlag(kCpuHasAVX)) {
+ mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
+ }
+#endif
+#if defined(HAS_COPYROW_ERMS)
+ if (TestCpuFlag(kCpuHasERMS)) {
+ mFnCopyRow = CopyRow_ERMS;
+ }
+#endif
+#if defined(HAS_COPYROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
+ }
+#endif
+#if defined(HAS_COPYROW_MIPS)
+ if (TestCpuFlag(kCpuHasMIPS)) {
+ mFnCopyRow = CopyRow_MIPS;
+ }
+#endif
+}
+
size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
size_t maxAppsSegment = 1;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 4cd9af0..2aa3c38 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -195,6 +195,7 @@
status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height);
+ void initCopyRowFunction(int32_t width);
static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -244,6 +245,9 @@
// In most common use case, entries are accessed in order.
std::map<int64_t, InputFrame> mPendingInputFrames;
+
+ // Function pointer of libyuv row copy.
+ void (*mFnCopyRow)(const uint8_t* src, uint8_t* dst, int width);
};
}; // namespace camera3
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index a945aca..6d96163 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -32,9 +32,12 @@
#include <dynamic_depth/profile.h>
#include <dynamic_depth/profiles.h>
#include <jpeglib.h>
+#include <libexif/exif-data.h>
+#include <libexif/exif-system.h>
#include <math.h>
#include <sstream>
#include <utils/Errors.h>
+#include <utils/ExifUtils.h>
#include <utils/Log.h>
#include <xmpmeta/xmp_data.h>
#include <xmpmeta/xmp_writer.h>
@@ -61,8 +64,44 @@
namespace android {
namespace camera3 {
+ExifOrientation getExifOrientation(const unsigned char *jpegBuffer, size_t jpegBufferSize) {
+ if ((jpegBuffer == nullptr) || (jpegBufferSize == 0)) {
+ return ExifOrientation::ORIENTATION_UNDEFINED;
+ }
+
+ auto exifData = exif_data_new();
+ exif_data_load_data(exifData, jpegBuffer, jpegBufferSize);
+ ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
+ EXIF_TAG_ORIENTATION);
+ if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
+ ALOGV("%s: Orientation EXIF entry invalid!", __FUNCTION__);
+ exif_data_unref(exifData);
+ return ExifOrientation::ORIENTATION_0_DEGREES;
+ }
+
+ auto orientationValue = exif_get_short(orientation->data, exif_data_get_byte_order(exifData));
+ ExifOrientation ret;
+ switch (orientationValue) {
+ case ExifOrientation::ORIENTATION_0_DEGREES:
+ case ExifOrientation::ORIENTATION_90_DEGREES:
+ case ExifOrientation::ORIENTATION_180_DEGREES:
+ case ExifOrientation::ORIENTATION_270_DEGREES:
+ ret = static_cast<ExifOrientation> (orientationValue);
+ break;
+ default:
+ ALOGE("%s: Unexpected EXIF orientation value: %d, defaulting to 0 degrees",
+ __FUNCTION__, orientationValue);
+ ret = ExifOrientation::ORIENTATION_0_DEGREES;
+ }
+
+ exif_data_unref(exifData);
+
+ return ret;
+}
+
status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
- const size_t maxOutSize, uint8_t jpegQuality, size_t &actualSize) {
+ const size_t maxOutSize, uint8_t jpegQuality, ExifOrientation exifOrientation,
+ size_t &actualSize) {
status_t ret;
// libjpeg is a C library so we use C-style "inheritance" by
// putting libjpeg's jpeg_destination_mgr first in our custom
@@ -151,6 +190,23 @@
jpeg_start_compress(&cinfo, TRUE);
+ if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
+ std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+ utils->initializeEmpty();
+ utils->setImageWidth(width);
+ utils->setImageHeight(height);
+ utils->setOrientationValue(exifOrientation);
+
+ if (utils->generateApp1()) {
+ const uint8_t* exifBuffer = utils->getApp1Buffer();
+ size_t exifBufferSize = utils->getApp1Length();
+ jpeg_write_marker(&cinfo, JPEG_APP0 + 1, static_cast<const JOCTET*>(exifBuffer),
+ exifBufferSize);
+ } else {
+ ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+ }
+ }
+
for (size_t i = 0; i < cinfo.image_height; i++) {
auto currentRow = static_cast<JSAMPROW>(in + i*width);
jpeg_write_scanlines(&cinfo, ¤tRow, /*num_lines*/1);
@@ -168,8 +224,106 @@
return ret;
}
+inline void unpackDepth16(uint16_t value, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ // Android densely packed depth map. The units for the range are in
+ // millimeters and need to be scaled to meters.
+ // The confidence value is encoded in the 3 most significant bits.
+ // The confidence data needs to be additionally normalized with
+ // values 1.0f, 0.0f representing maximum and minimum confidence
+ // respectively.
+ auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
+ points->push_back(point);
+
+ auto conf = (value >> 13) & 0x7;
+ float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
+ confidence->push_back(normConfidence);
+
+ if (*near > point) {
+ *near = point;
+ }
+ if (*far < point) {
+ *far = point;
+ }
+}
+
+// Trivial case, read forward from top,left corner.
+void rotate0AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
+ for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 90 degrees CW rotation can be applied by starting to read from bottom, left corner
+// transposing rows and columns.
+void rotate90AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (size_t i = 0; i < inputFrame.mDepthMapWidth; i++) {
+ for (ssize_t j = inputFrame.mDepthMapHeight-1; j >= 0; j--) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 180 CW degrees rotation can be applied by starting to read backwards from bottom, right corner.
+void rotate180AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (ssize_t i = inputFrame.mDepthMapHeight-1; i >= 0; i--) {
+ for (ssize_t j = inputFrame.mDepthMapWidth-1; j >= 0; j--) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 270 degrees CW rotation can be applied by starting to read from top, right corner
+// transposing rows and columns.
+void rotate270AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (ssize_t i = inputFrame.mDepthMapWidth-1; i >= 0; i--) {
+ for (size_t j = 0; j < inputFrame.mDepthMapHeight; j++) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
+ confidence, near, far);
+ }
+ }
+}
+
+bool rotateAndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ switch (inputFrame.mOrientation) {
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES:
+ rotate0AndUnpack(inputFrame, points, confidence, near, far);
+ return false;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES:
+ rotate90AndUnpack(inputFrame, points, confidence, near, far);
+ return true;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES:
+ rotate180AndUnpack(inputFrame, points, confidence, near, far);
+ return false;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES:
+ rotate270AndUnpack(inputFrame, points, confidence, near, far);
+ return true;
+ default:
+ ALOGE("%s: Unsupported depth photo rotation: %d, default to 0", __FUNCTION__,
+ inputFrame.mOrientation);
+ rotate0AndUnpack(inputFrame, points, confidence, near, far);
+ }
+
+ return false;
+}
+
std::unique_ptr<dynamic_depth::DepthMap> processDepthMapFrame(DepthPhotoInputFrame inputFrame,
- std::vector<std::unique_ptr<Item>> *items /*out*/) {
+ ExifOrientation exifOrientation, std::vector<std::unique_ptr<Item>> *items /*out*/,
+ bool *switchDimensions /*out*/) {
+ if ((items == nullptr) || (switchDimensions == nullptr)) {
+ return nullptr;
+ }
+
std::vector<float> points, confidence;
size_t pointCount = inputFrame.mDepthMapWidth * inputFrame.mDepthMapHeight;
@@ -177,29 +331,21 @@
confidence.reserve(pointCount);
float near = UINT16_MAX;
float far = .0f;
- for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
- for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
- // Android densely packed depth map. The units for the range are in
- // millimeters and need to be scaled to meters.
- // The confidence value is encoded in the 3 most significant bits.
- // The confidence data needs to be additionally normalized with
- // values 1.0f, 0.0f representing maximum and minimum confidence
- // respectively.
- auto value = inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j];
- auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
- points.push_back(point);
+ *switchDimensions = false;
+ // Physical rotation of depth and confidence maps may be needed in case
+ // the EXIF orientation is set to 0 degrees and the depth photo orientation
+ // (source color image) has some different value.
+ if (exifOrientation == ExifOrientation::ORIENTATION_0_DEGREES) {
+ *switchDimensions = rotateAndUnpack(inputFrame, &points, &confidence, &near, &far);
+ } else {
+ rotate0AndUnpack(inputFrame, &points, &confidence, &near, &far);
+ }
- auto conf = (value >> 13) & 0x7;
- float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
- confidence.push_back(normConfidence);
-
- if (near > point) {
- near = point;
- }
- if (far < point) {
- far = point;
- }
- }
+ size_t width = inputFrame.mDepthMapWidth;
+ size_t height = inputFrame.mDepthMapHeight;
+ if (*switchDimensions) {
+ width = inputFrame.mDepthMapHeight;
+ height = inputFrame.mDepthMapWidth;
}
if (near == far) {
@@ -225,18 +371,18 @@
depthParams.depth_image_data.resize(inputFrame.mMaxJpegSize);
depthParams.confidence_data.resize(inputFrame.mMaxJpegSize);
size_t actualJpegSize;
- auto ret = encodeGrayscaleJpeg(inputFrame.mDepthMapWidth, inputFrame.mDepthMapHeight,
- pointsQuantized.data(), depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
- inputFrame.mJpegQuality, actualJpegSize);
+ auto ret = encodeGrayscaleJpeg(width, height, pointsQuantized.data(),
+ depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
+ inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
if (ret != NO_ERROR) {
ALOGE("%s: Depth map compression failed!", __FUNCTION__);
return nullptr;
}
depthParams.depth_image_data.resize(actualJpegSize);
- ret = encodeGrayscaleJpeg(inputFrame.mDepthMapWidth, inputFrame.mDepthMapHeight,
- confidenceQuantized.data(), depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
- inputFrame.mJpegQuality, actualJpegSize);
+ ret = encodeGrayscaleJpeg(width, height, confidenceQuantized.data(),
+ depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
+ inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
if (ret != NO_ERROR) {
ALOGE("%s: Confidence map compression failed!", __FUNCTION__);
return nullptr;
@@ -262,7 +408,12 @@
return BAD_VALUE;
}
- cameraParams->depth_map = processDepthMapFrame(inputFrame, &items);
+ ExifOrientation exifOrientation = getExifOrientation(
+ reinterpret_cast<const unsigned char*> (inputFrame.mMainJpegBuffer),
+ inputFrame.mMainJpegSize);
+ bool switchDimensions;
+ cameraParams->depth_map = processDepthMapFrame(inputFrame, exifOrientation, &items,
+ &switchDimensions);
if (cameraParams->depth_map == nullptr) {
ALOGE("%s: Depth map processing failed!", __FUNCTION__);
return BAD_VALUE;
@@ -274,7 +425,13 @@
// [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
const dynamic_depth::Point<double> focalLength(inputFrame.mInstrinsicCalibration[0],
inputFrame.mInstrinsicCalibration[1]);
- const Dimension imageSize(inputFrame.mMainJpegWidth, inputFrame.mMainJpegHeight);
+ size_t width = inputFrame.mMainJpegWidth;
+ size_t height = inputFrame.mMainJpegHeight;
+ if (switchDimensions) {
+ width = inputFrame.mMainJpegHeight;
+ height = inputFrame.mMainJpegWidth;
+ }
+ const Dimension imageSize(width, height);
ImagingModelParams imagingParams(focalLength, imageSize);
imagingParams.principal_point.x = inputFrame.mInstrinsicCalibration[2];
imagingParams.principal_point.y = inputFrame.mInstrinsicCalibration[3];
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.h b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
index 19889a1..6a2fbff 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.h
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
@@ -23,19 +23,27 @@
namespace android {
namespace camera3 {
+enum DepthPhotoOrientation {
+ DEPTH_ORIENTATION_0_DEGREES = 0,
+ DEPTH_ORIENTATION_90_DEGREES = 90,
+ DEPTH_ORIENTATION_180_DEGREES = 180,
+ DEPTH_ORIENTATION_270_DEGREES = 270,
+};
+
struct DepthPhotoInputFrame {
- const char* mMainJpegBuffer;
- size_t mMainJpegSize;
- size_t mMainJpegWidth, mMainJpegHeight;
- uint16_t* mDepthMapBuffer;
- size_t mDepthMapWidth, mDepthMapHeight, mDepthMapStride;
- size_t mMaxJpegSize;
- uint8_t mJpegQuality;
- uint8_t mIsLogical;
- float mInstrinsicCalibration[5];
- uint8_t mIsInstrinsicCalibrationValid;
- float mLensDistortion[5];
- uint8_t mIsLensDistortionValid;
+ const char* mMainJpegBuffer;
+ size_t mMainJpegSize;
+ size_t mMainJpegWidth, mMainJpegHeight;
+ uint16_t* mDepthMapBuffer;
+ size_t mDepthMapWidth, mDepthMapHeight, mDepthMapStride;
+ size_t mMaxJpegSize;
+ uint8_t mJpegQuality;
+ uint8_t mIsLogical;
+ float mInstrinsicCalibration[5];
+ uint8_t mIsInstrinsicCalibrationValid;
+ float mLensDistortion[5];
+ uint8_t mIsLensDistortionValid;
+ DepthPhotoOrientation mOrientation;
DepthPhotoInputFrame() :
mMainJpegBuffer(nullptr),
@@ -52,7 +60,8 @@
mInstrinsicCalibration{0.f},
mIsInstrinsicCalibrationValid(0),
mLensDistortion{0.f},
- mIsLensDistortionValid(0) {}
+ mIsLensDistortionValid(0),
+ mOrientation(DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES) {}
};
static const char *kDepthPhotoLibrary = "libdepthphoto.so";
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f9ef996..923d17a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2151,7 +2151,11 @@
// Pause to reconfigure
status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
- mRequestThread->setPaused(true);
+ if (mRequestThread.get() != nullptr) {
+ mRequestThread->setPaused(true);
+ } else {
+ return NO_INIT;
+ }
ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
maxExpectedDuration);
@@ -4558,7 +4562,7 @@
return;
}
- auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter);
+ auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter - 1);
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
return;
@@ -5890,16 +5894,16 @@
if (mPaused == false) {
ALOGV("%s: RequestThread: Going idle", __FUNCTION__);
mPaused = true;
- // Let the tracker know
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
- }
if (mNotifyPipelineDrain) {
mInterface->signalPipelineDrain(mStreamIdsToBeDrained);
mNotifyPipelineDrain = false;
mStreamIdsToBeDrained.clear();
}
+ // Let the tracker know
+ sp<StatusTracker> statusTracker = mStatusTracker.promote();
+ if (statusTracker != 0) {
+ statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+ }
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
@@ -5983,16 +5987,16 @@
if (mPaused == false) {
mPaused = true;
ALOGV("%s: RequestThread: Paused", __FUNCTION__);
- // Let the tracker know
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
- }
if (mNotifyPipelineDrain) {
mInterface->signalPipelineDrain(mStreamIdsToBeDrained);
mNotifyPipelineDrain = false;
mStreamIdsToBeDrained.clear();
}
+ // Let the tracker know
+ sp<StatusTracker> statusTracker = mStatusTracker.promote();
+ if (statusTracker != 0) {
+ statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+ }
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index d777ca1..b4e7c32 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -27,6 +27,8 @@
libcamera_client \
libcamera_metadata \
libutils \
+ libjpeg \
+ libexif \
android.hardware.camera.common@1.0 \
android.hardware.camera.provider@2.4 \
android.hardware.camera.provider@2.5 \
@@ -36,6 +38,8 @@
LOCAL_C_INCLUDES += \
system/media/private/camera/include \
+ external/dynamic_depth/includes \
+ external/dynamic_depth/internal \
LOCAL_CFLAGS += -Wall -Wextra -Werror
diff --git a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
new file mode 100644
index 0000000..2162514
--- /dev/null
+++ b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "DepthProcessorTest"
+
+#include <array>
+#include <random>
+
+#include <dlfcn.h>
+#include <gtest/gtest.h>
+
+#include "../common/DepthPhotoProcessor.h"
+#include "../utils/ExifUtils.h"
+#include "NV12Compressor.h"
+
+using namespace android;
+using namespace android::camera3;
+
+static const size_t kTestBufferWidth = 640;
+static const size_t kTestBufferHeight = 480;
+static const size_t kTestBufferNV12Size ((((kTestBufferWidth) * (kTestBufferHeight)) * 3) / 2);
+static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+static const size_t kSeed = 1234;
+
+void linkToDepthPhotoLibrary(void **libHandle /*out*/,
+ process_depth_photo_frame *processFrameFunc /*out*/) {
+ ASSERT_NE(libHandle, nullptr);
+ ASSERT_NE(processFrameFunc, nullptr);
+
+ *libHandle = dlopen(kDepthPhotoLibrary, RTLD_NOW | RTLD_LOCAL);
+ if (*libHandle != nullptr) {
+ *processFrameFunc = reinterpret_cast<camera3::process_depth_photo_frame> (
+ dlsym(*libHandle, kDepthPhotoProcessFunction));
+ ASSERT_NE(*processFrameFunc, nullptr);
+ }
+}
+
+void generateColorJpegBuffer(int jpegQuality, ExifOrientation orientationValue, bool includeExif,
+ bool switchDimensions, std::vector<uint8_t> *colorJpegBuffer /*out*/) {
+ ASSERT_NE(colorJpegBuffer, nullptr);
+
+ std::array<uint8_t, kTestBufferNV12Size> colorSourceBuffer;
+ std::default_random_engine gen(kSeed);
+ std::uniform_int_distribution<int> uniDist(0, UINT8_MAX - 1);
+ for (size_t i = 0; i < colorSourceBuffer.size(); i++) {
+ colorSourceBuffer[i] = uniDist(gen);
+ }
+
+ size_t width = kTestBufferWidth;
+ size_t height = kTestBufferHeight;
+ if (switchDimensions) {
+ width = kTestBufferHeight;
+ height = kTestBufferWidth;
+ }
+
+ NV12Compressor jpegCompressor;
+ if (includeExif) {
+ ASSERT_TRUE(jpegCompressor.compressWithExifOrientation(
+ reinterpret_cast<const unsigned char*> (colorSourceBuffer.data()), width, height,
+ jpegQuality, orientationValue));
+ } else {
+ ASSERT_TRUE(jpegCompressor.compress(
+ reinterpret_cast<const unsigned char*> (colorSourceBuffer.data()), width, height,
+ jpegQuality));
+ }
+
+ *colorJpegBuffer = std::move(jpegCompressor.getCompressedData());
+ ASSERT_FALSE(colorJpegBuffer->empty());
+}
+
+void generateDepth16Buffer(std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
+ ASSERT_NE(depth16Buffer, nullptr);
+ std::default_random_engine gen(kSeed+1);
+ std::uniform_int_distribution<int> uniDist(0, UINT16_MAX - 1);
+ for (size_t i = 0; i < depth16Buffer->size(); i++) {
+ (*depth16Buffer)[i] = uniDist(gen);
+ }
+}
+
+TEST(DepthProcessorTest, LinkToLibray) {
+ void *libHandle;
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle != nullptr) {
+ dlclose(libHandle);
+ }
+}
+
+TEST(DepthProcessorTest, BadInput) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ DepthPhotoInputFrame inputFrame;
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
+ /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), nullptr,
+ &actualDepthPhotoSize), 0);
+
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(), nullptr),
+ 0);
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, BasicDepthPhotoValidation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
+ /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) && (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ // The final depth photo must consist of three jpeg images:
+ // - the main color image
+ // - the depth map image
+ // - the confidence map image
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, TestDepthPhotoExifOrientation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ ExifOrientation exifOrientations[] = { ExifOrientation::ORIENTATION_UNDEFINED,
+ ExifOrientation::ORIENTATION_0_DEGREES, ExifOrientation::ORIENTATION_90_DEGREES,
+ ExifOrientation::ORIENTATION_180_DEGREES, ExifOrientation::ORIENTATION_270_DEGREES };
+ for (auto exifOrientation : exifOrientations) {
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, exifOrientation, /*includeExif*/ true,
+ /*switchDimensions*/ false, &colorJpegBuffer);
+ if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
+ auto jpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(colorJpegBuffer.data(),
+ colorJpegBuffer.size(), &jpegExifOrientation), OK);
+ ASSERT_EQ(exifOrientation, jpegExifOrientation);
+ }
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) &&
+ (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+ size_t confidenceMapSize = actualDepthPhotoSize - (mainJpegSize + depthMapSize);
+
+ //Depth and confidence images must have the same EXIF orientation as the source
+ auto depthJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthJpegExifOrientation), OK);
+ if (exifOrientation == ORIENTATION_UNDEFINED) {
+ // In case of undefined or missing EXIF orientation, always expect 0 degrees in the
+ // depth map.
+ ASSERT_EQ(depthJpegExifOrientation, ExifOrientation::ORIENTATION_0_DEGREES);
+ } else {
+ ASSERT_EQ(depthJpegExifOrientation, exifOrientation);
+ }
+
+ auto confidenceJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize,
+ confidenceMapSize, &confidenceJpegExifOrientation), OK);
+ if (exifOrientation == ORIENTATION_UNDEFINED) {
+ // In case of undefined or missing EXIF orientation, always expect 0 degrees in the
+ // confidence map.
+ ASSERT_EQ(confidenceJpegExifOrientation, ExifOrientation::ORIENTATION_0_DEGREES);
+ } else {
+ ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
+ }
+ }
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, TestDephtPhotoPhysicalRotation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ // In case of physical rotation, the EXIF orientation must always be 0.
+ auto exifOrientation = ExifOrientation::ORIENTATION_0_DEGREES;
+ DepthPhotoOrientation depthOrientations[] = {
+ DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES };
+ for (auto depthOrientation : depthOrientations) {
+ std::vector<uint8_t> colorJpegBuffer;
+ bool switchDimensions = false;
+ size_t expectedWidth = kTestBufferWidth;
+ size_t expectedHeight = kTestBufferHeight;
+ if ((depthOrientation == DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES) ||
+ (depthOrientation == DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES)) {
+ switchDimensions = true;
+ expectedWidth = kTestBufferHeight;
+ expectedHeight = kTestBufferWidth;
+ }
+ generateColorJpegBuffer(jpegQuality, exifOrientation, /*includeExif*/ true,
+ switchDimensions, &colorJpegBuffer);
+ auto jpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(colorJpegBuffer.data(), colorJpegBuffer.size(),
+ &jpegExifOrientation), OK);
+ ASSERT_EQ(exifOrientation, jpegExifOrientation);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ inputFrame.mOrientation = depthOrientation;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) &&
+ (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+ size_t confidenceMapSize = actualDepthPhotoSize - (mainJpegSize + depthMapSize);
+
+ //Depth and confidence images must have the same EXIF orientation as the source
+ auto depthJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthJpegExifOrientation), OK);
+ ASSERT_EQ(depthJpegExifOrientation, exifOrientation);
+ size_t depthMapWidth, depthMapHeight;
+ ASSERT_EQ(NV12Compressor::getJpegImageDimensions(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthMapWidth, &depthMapHeight), OK);
+ ASSERT_EQ(depthMapWidth, expectedWidth);
+ ASSERT_EQ(depthMapHeight, expectedHeight);
+
+ auto confidenceJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize, confidenceMapSize,
+ &confidenceJpegExifOrientation), OK);
+ ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
+ size_t confidenceMapWidth, confidenceMapHeight;
+ ASSERT_EQ(NV12Compressor::getJpegImageDimensions(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize, confidenceMapSize,
+ &confidenceMapWidth, &confidenceMapHeight), OK);
+ ASSERT_EQ(confidenceMapWidth, expectedWidth);
+ ASSERT_EQ(confidenceMapHeight, expectedHeight);
+ }
+
+ dlclose(libHandle);
+}
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.cpp b/services/camera/libcameraservice/tests/NV12Compressor.cpp
new file mode 100644
index 0000000..0a41a1f
--- /dev/null
+++ b/services/camera/libcameraservice/tests/NV12Compressor.cpp
@@ -0,0 +1,379 @@
+/*
+* Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "Test_NV12Compressor"
+
+#include "NV12Compressor.h"
+
+#include <libexif/exif-data.h>
+#include <netinet/in.h>
+
+using namespace android;
+using namespace android::camera3;
+
+namespace std {
+template <>
+struct default_delete<ExifEntry> {
+ inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); }
+};
+
+template <>
+struct default_delete<ExifData> {
+ inline void operator()(ExifData* data) const { exif_data_unref(data); }
+};
+
+} // namespace std
+
+bool NV12Compressor::compress(const unsigned char* data, int width, int height, int quality) {
+ if (!configureCompressor(width, height, quality)) {
+ // the method will have logged a more detailed error message than we can
+ // provide here so just return.
+ return false;
+ }
+
+ return compressData(data, /*exifData*/ nullptr);
+}
+
+bool NV12Compressor::compressWithExifOrientation(const unsigned char* data, int width, int height,
+ int quality, android::camera3::ExifOrientation exifValue) {
+ std::unique_ptr<ExifData> exifData(exif_data_new());
+ if (exifData.get() == nullptr) {
+ return false;
+ }
+
+ exif_data_set_option(exifData.get(), EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exifData.get(), EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exifData.get(), EXIF_BYTE_ORDER_INTEL);
+ std::unique_ptr<ExifEntry> exifEntry(exif_entry_new());
+ if (exifEntry.get() == nullptr) {
+ return false;
+ }
+
+ exifEntry->tag = EXIF_TAG_ORIENTATION;
+ exif_content_add_entry(exifData->ifd[EXIF_IFD_0], exifEntry.get());
+ exif_entry_initialize(exifEntry.get(), exifEntry->tag);
+ exif_set_short(exifEntry->data, EXIF_BYTE_ORDER_INTEL, exifValue);
+
+ if (!configureCompressor(width, height, quality)) {
+ return false;
+ }
+
+ return compressData(data, exifData.get());
+}
+
+const std::vector<uint8_t>& NV12Compressor::getCompressedData() const {
+ return mDestManager.mBuffer;
+}
+
+bool NV12Compressor::configureCompressor(int width, int height, int quality) {
+ mCompressInfo.err = jpeg_std_error(&mErrorManager);
+ // NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
+ // The compiler will not generate code to destroy them during the return
+ // below so they will leak. Additionally, do not place any calls to libjpeg
+ // that can fail above this line or any error will cause undefined behavior.
+ if (setjmp(mErrorManager.mJumpBuffer)) {
+ // This is where the error handler will jump in case setup fails
+ // The error manager will ALOG an appropriate error message
+ return false;
+ }
+
+ jpeg_create_compress(&mCompressInfo);
+
+ mCompressInfo.image_width = width;
+ mCompressInfo.image_height = height;
+ mCompressInfo.input_components = 3;
+ mCompressInfo.in_color_space = JCS_YCbCr;
+ jpeg_set_defaults(&mCompressInfo);
+
+ jpeg_set_quality(&mCompressInfo, quality, TRUE);
+ // It may seem weird to set color space here again but this will also set
+ // other fields. These fields might be overwritten by jpeg_set_defaults
+ jpeg_set_colorspace(&mCompressInfo, JCS_YCbCr);
+ mCompressInfo.raw_data_in = TRUE;
+ mCompressInfo.dct_method = JDCT_IFAST;
+ // Set sampling factors
+ mCompressInfo.comp_info[0].h_samp_factor = 2;
+ mCompressInfo.comp_info[0].v_samp_factor = 2;
+ mCompressInfo.comp_info[1].h_samp_factor = 1;
+ mCompressInfo.comp_info[1].v_samp_factor = 1;
+ mCompressInfo.comp_info[2].h_samp_factor = 1;
+ mCompressInfo.comp_info[2].v_samp_factor = 1;
+
+ mCompressInfo.dest = &mDestManager;
+
+ return true;
+}
+
+static void deinterleave(const uint8_t* vuPlanar, std::vector<uint8_t>& uRows,
+ std::vector<uint8_t>& vRows, int rowIndex, int width, int height, int stride) {
+ int numRows = (height - rowIndex) / 2;
+ if (numRows > 8) numRows = 8;
+ for (int row = 0; row < numRows; ++row) {
+ int offset = ((rowIndex >> 1) + row) * stride;
+ const uint8_t* vu = vuPlanar + offset;
+ for (int i = 0; i < (width >> 1); ++i) {
+ int index = row * (width >> 1) + i;
+ uRows[index] = vu[1];
+ vRows[index] = vu[0];
+ vu += 2;
+ }
+ }
+}
+
+bool NV12Compressor::compressData(const unsigned char* data, ExifData* exifData) {
+ const uint8_t* y[16];
+ const uint8_t* cb[8];
+ const uint8_t* cr[8];
+ const uint8_t** planes[3] = { y, cb, cr };
+
+ int i, offset;
+ int width = mCompressInfo.image_width;
+ int height = mCompressInfo.image_height;
+ const uint8_t* yPlanar = data;
+ const uint8_t* vuPlanar = data + (width * height);
+ std::vector<uint8_t> uRows(8 * (width >> 1));
+ std::vector<uint8_t> vRows(8 * (width >> 1));
+
+ // NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
+ // The compiler will not generate code to destroy them during the return
+ // below so they will leak. Additionally, do not place any calls to libjpeg
+ // that can fail above this line or any error will cause undefined behavior.
+ if (setjmp(mErrorManager.mJumpBuffer)) {
+ // This is where the error handler will jump in case compression fails
+ // The error manager will ALOG an appropriate error message
+ return false;
+ }
+
+ jpeg_start_compress(&mCompressInfo, TRUE);
+
+ attachExifData(exifData);
+
+ // process 16 lines of Y and 8 lines of U/V each time.
+ while (mCompressInfo.next_scanline < mCompressInfo.image_height) {
+ //deinterleave u and v
+ deinterleave(vuPlanar, uRows, vRows, mCompressInfo.next_scanline,
+ width, height, width);
+
+ // Jpeg library ignores the rows whose indices are greater than height.
+ for (i = 0; i < 16; i++) {
+ // y row
+ y[i] = yPlanar + (mCompressInfo.next_scanline + i) * width;
+
+ // construct u row and v row
+ if ((i & 1) == 0) {
+ // height and width are both halved because of downsampling
+ offset = (i >> 1) * (width >> 1);
+ cb[i/2] = &uRows[offset];
+ cr[i/2] = &vRows[offset];
+ }
+ }
+ jpeg_write_raw_data(&mCompressInfo, const_cast<JSAMPIMAGE>(planes), 16);
+ }
+
+ jpeg_finish_compress(&mCompressInfo);
+ jpeg_destroy_compress(&mCompressInfo);
+
+ return true;
+}
+
+bool NV12Compressor::attachExifData(ExifData* exifData) {
+ if (exifData == nullptr) {
+ // This is not an error, we don't require EXIF data
+ return true;
+ }
+
+ // Save the EXIF data to memory
+ unsigned char* rawData = nullptr;
+ unsigned int size = 0;
+ exif_data_save_data(exifData, &rawData, &size);
+ if (rawData == nullptr) {
+ ALOGE("Failed to create EXIF data block");
+ return false;
+ }
+
+ jpeg_write_marker(&mCompressInfo, JPEG_APP0 + 1, rawData, size);
+ free(rawData);
+ return true;
+}
+
+NV12Compressor::ErrorManager::ErrorManager() {
+ error_exit = &onJpegError;
+}
+
+void NV12Compressor::ErrorManager::onJpegError(j_common_ptr cinfo) {
+ // NOTE! Do not construct any non-trivial objects in this method at the top
+ // scope. Their destructors will not be called. If you do need such an
+ // object create a local scope that does not include the longjmp call,
+ // that ensures the object is destroyed before longjmp is called.
+ ErrorManager* errorManager = reinterpret_cast<ErrorManager*>(cinfo->err);
+
+ // Format and log error message
+ char errorMessage[JMSG_LENGTH_MAX];
+ (*errorManager->format_message)(cinfo, errorMessage);
+ errorMessage[sizeof(errorMessage) - 1] = '\0';
+ ALOGE("JPEG compression error: %s", errorMessage);
+ jpeg_destroy(cinfo);
+
+ // And through the looking glass we go
+ longjmp(errorManager->mJumpBuffer, 1);
+}
+
+NV12Compressor::DestinationManager::DestinationManager() {
+ init_destination = &initDestination;
+ empty_output_buffer = &emptyOutputBuffer;
+ term_destination = &termDestination;
+}
+
+void NV12Compressor::DestinationManager::initDestination(j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Start out with some arbitrary but not too large buffer size
+ manager->mBuffer.resize(16 * 1024);
+ manager->next_output_byte = &manager->mBuffer[0];
+ manager->free_in_buffer = manager->mBuffer.size();
+}
+
+boolean NV12Compressor::DestinationManager::emptyOutputBuffer(
+ j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Keep doubling the size of the buffer for a very low, amortized
+ // performance cost of the allocations
+ size_t oldSize = manager->mBuffer.size();
+ manager->mBuffer.resize(oldSize * 2);
+ manager->next_output_byte = &manager->mBuffer[oldSize];
+ manager->free_in_buffer = manager->mBuffer.size() - oldSize;
+ return manager->free_in_buffer != 0;
+}
+
+void NV12Compressor::DestinationManager::termDestination(j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Resize down to the exact size of the output, that is remove as many
+ // bytes as there are left in the buffer
+ manager->mBuffer.resize(manager->mBuffer.size() - manager->free_in_buffer);
+}
+
+status_t NV12Compressor::findJpegSize(uint8_t *jpegBuffer, size_t maxSize, size_t *size /*out*/) {
+ if ((size == nullptr) || (jpegBuffer == nullptr)) {
+ return BAD_VALUE;
+ }
+
+ if (checkJpegStart(jpegBuffer) == 0) {
+ return BAD_VALUE;
+ }
+
+ // Read JFIF segment markers, skip over segment data
+ *size = kMarkerLength; //jump to Start Of Image
+ while (*size <= maxSize - kMarkerLength) {
+ segment_t *segment = (segment_t*)(jpegBuffer + *size);
+ uint8_t type = checkJpegMarker(segment->marker);
+ if (type == 0) { // invalid marker, no more segments, begin JPEG data
+ break;
+ }
+ if (type == kEndOfImage || *size > maxSize - sizeof(segment_t)) {
+ return BAD_VALUE;
+ }
+
+ size_t length = ntohs(segment->length);
+ *size += length + kMarkerLength;
+ }
+
+ // Find End of Image
+ // Scan JPEG buffer until End of Image
+ bool foundEnd = false;
+ for ( ; *size <= maxSize - kMarkerLength; (*size)++) {
+ if (checkJpegEnd(jpegBuffer + *size)) {
+ foundEnd = true;
+ *size += kMarkerLength;
+ break;
+ }
+ }
+
+ if (!foundEnd) {
+ return BAD_VALUE;
+ }
+
+ if (*size > maxSize) {
+ *size = maxSize;
+ }
+
+ return OK;
+}
+
+status_t NV12Compressor::getJpegImageDimensions(uint8_t *jpegBuffer,
+ size_t jpegBufferSize, size_t *width /*out*/, size_t *height /*out*/) {
+ if ((jpegBuffer == nullptr) || (width == nullptr) || (height == nullptr) ||
+ (jpegBufferSize == 0u)) {
+ return BAD_VALUE;
+ }
+
+ // Scan JPEG buffer until Start of Frame
+ bool foundSOF = false;
+ size_t currentPos;
+ for (currentPos = 0; currentPos <= jpegBufferSize - kMarkerLength; currentPos++) {
+ if (checkStartOfFrame(jpegBuffer + currentPos)) {
+ foundSOF = true;
+ currentPos += kMarkerLength;
+ break;
+ }
+ }
+
+ if (!foundSOF) {
+ ALOGE("%s: Start of Frame not found", __func__);
+ return BAD_VALUE;
+ }
+
+ sof_t *startOfFrame = reinterpret_cast<sof_t *> (jpegBuffer + currentPos);
+ *width = ntohs(startOfFrame->width);
+ *height = ntohs(startOfFrame->height);
+
+ return OK;
+}
+
+status_t NV12Compressor::getExifOrientation(uint8_t *jpegBuffer, size_t jpegBufferSize,
+ ExifOrientation *exifValue /*out*/) {
+ if ((jpegBuffer == nullptr) || (exifValue == nullptr) || (jpegBufferSize == 0u)) {
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<ExifData> exifData(exif_data_new());
+ exif_data_load_data(exifData.get(), jpegBuffer, jpegBufferSize);
+ ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
+ EXIF_TAG_ORIENTATION);
+ if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
+ return BAD_VALUE;
+ }
+
+ auto orientationValue = exif_get_short(orientation->data,
+ exif_data_get_byte_order(exifData.get()));
+ status_t ret;
+ switch (orientationValue) {
+ case ExifOrientation::ORIENTATION_0_DEGREES:
+ case ExifOrientation::ORIENTATION_90_DEGREES:
+ case ExifOrientation::ORIENTATION_180_DEGREES:
+ case ExifOrientation::ORIENTATION_270_DEGREES:
+ *exifValue = static_cast<ExifOrientation> (orientationValue);
+ ret = OK;
+ break;
+ default:
+ ALOGE("%s: Unexpected EXIF orientation value: %u", __FUNCTION__, orientationValue);
+ ret = BAD_VALUE;
+ }
+
+ return ret;
+}
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.h b/services/camera/libcameraservice/tests/NV12Compressor.h
new file mode 100644
index 0000000..ee22d5e
--- /dev/null
+++ b/services/camera/libcameraservice/tests/NV12Compressor.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+#define TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+
+#include <setjmp.h>
+#include <stdlib.h>
+extern "C" {
+#include <jpeglib.h>
+#include <jerror.h>
+}
+
+#include <utils/Errors.h>
+#include <vector>
+
+#include "../utils/ExifUtils.h"
+
+struct _ExifData;
+typedef _ExifData ExifData;
+
+class NV12Compressor {
+public:
+ NV12Compressor() {}
+
+ /* Compress |data| which represents raw NV21 encoded data of dimensions
+ * |width| * |height|.
+ */
+ bool compress(const unsigned char* data, int width, int height, int quality);
+ bool compressWithExifOrientation(const unsigned char* data, int width, int height, int quality,
+ android::camera3::ExifOrientation exifValue);
+
+ /* Get a reference to the compressed data, this will return an empty vector
+ * if compress has not been called yet
+ */
+ const std::vector<unsigned char>& getCompressedData() const;
+
+ // Utility methods
+ static android::status_t findJpegSize(uint8_t *jpegBuffer, size_t maxSize,
+ size_t *size /*out*/);
+
+ static android::status_t getExifOrientation(uint8_t *jpegBuffer,
+ size_t jpegBufferSize, android::camera3::ExifOrientation *exifValue /*out*/);
+
+ /* Get Jpeg image dimensions from the first Start Of Frame. Please note that due to the
+ * way the jpeg buffer is scanned if the image contains a thumbnail, then the size returned
+ * will be of the thumbnail and not the main image.
+ */
+ static android::status_t getJpegImageDimensions(uint8_t *jpegBuffer, size_t jpegBufferSize,
+ size_t *width /*out*/, size_t *height /*out*/);
+
+private:
+
+ struct DestinationManager : jpeg_destination_mgr {
+ DestinationManager();
+
+ static void initDestination(j_compress_ptr cinfo);
+ static boolean emptyOutputBuffer(j_compress_ptr cinfo);
+ static void termDestination(j_compress_ptr cinfo);
+
+ std::vector<unsigned char> mBuffer;
+ };
+
+ struct ErrorManager : jpeg_error_mgr {
+ ErrorManager();
+
+ static void onJpegError(j_common_ptr cinfo);
+
+ jmp_buf mJumpBuffer;
+ };
+
+ static const size_t kMarkerLength = 2; // length of a marker
+ static const uint8_t kMarker = 0xFF; // First byte of marker
+ static const uint8_t kStartOfImage = 0xD8; // Start of Image
+ static const uint8_t kEndOfImage = 0xD9; // End of Image
+ static const uint8_t kStartOfFrame = 0xC0; // Start of Frame
+
+ struct __attribute__((packed)) segment_t {
+ uint8_t marker[kMarkerLength];
+ uint16_t length;
+ };
+
+ struct __attribute__((packed)) sof_t {
+ uint16_t length;
+ uint8_t precision;
+ uint16_t height;
+ uint16_t width;
+ };
+
+ // check for start of image marker
+ static bool checkStartOfFrame(uint8_t* buf) {
+ return buf[0] == kMarker && buf[1] == kStartOfFrame;
+ }
+
+ // check for start of image marker
+ static bool checkJpegStart(uint8_t* buf) {
+ return buf[0] == kMarker && buf[1] == kStartOfImage;
+ }
+
+ // check for End of Image marker
+ static bool checkJpegEnd(uint8_t *buf) {
+ return buf[0] == kMarker && buf[1] == kEndOfImage;
+ }
+
+ // check for arbitrary marker, returns marker type (second byte)
+ // returns 0 if no marker found. Note: 0x00 is not a valid marker type
+ static uint8_t checkJpegMarker(uint8_t *buf) {
+ return (buf[0] == kMarker) ? buf[1] : 0;
+ }
+
+ jpeg_compress_struct mCompressInfo;
+ DestinationManager mDestManager;
+ ErrorManager mErrorManager;
+
+ bool configureCompressor(int width, int height, int quality);
+ bool compressData(const unsigned char* data, ExifData* exifData);
+ bool attachExifData(ExifData* exifData);
+};
+
+#endif // TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
index 4dea8b5..c0afdc1 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.cpp
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -55,6 +55,7 @@
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize);
+ virtual bool initializeEmpty();
// set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
@@ -150,7 +151,11 @@
// sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setOrientation(uint16_t orientation);
+ virtual bool setOrientation(uint16_t degrees);
+
+ // sets image orientation.
+ // Returns false if memory allocation fails.
+ virtual bool setOrientationValue(ExifOrientation orientationValue);
// sets the shutter speed.
// Returns false if memory allocation fails.
@@ -314,6 +319,26 @@
return true;
}
+bool ExifUtilsImpl::initializeEmpty() {
+ reset();
+ exif_data_ = exif_data_new();
+ if (exif_data_ == nullptr) {
+ ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__);
+ return false;
+ }
+ // set the image options.
+ exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL);
+
+ // set exif version to 2.2.
+ if (!setExifVersion("0220")) {
+ return false;
+ }
+
+ return true;
+}
+
bool ExifUtilsImpl::setAperture(float aperture) {
float apexValue = convertToApex(aperture);
SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE,
@@ -609,32 +634,26 @@
return true;
}
-bool ExifUtilsImpl::setOrientation(uint16_t orientation) {
- /*
- * Orientation value:
- * 1 2 3 4 5 6 7 8
- *
- * 888888 888888 88 88 8888888888 88 88 8888888888
- * 88 88 88 88 88 88 88 88 88 88 88 88
- * 8888 8888 8888 8888 88 8888888888 8888888888 88
- * 88 88 88 88
- * 88 88 888888 888888
- */
- int value = 1;
- switch (orientation) {
+bool ExifUtilsImpl::setOrientation(uint16_t degrees) {
+ ExifOrientation value = ExifOrientation::ORIENTATION_0_DEGREES;
+ switch (degrees) {
case 90:
- value = 6;
+ value = ExifOrientation::ORIENTATION_90_DEGREES;
break;
case 180:
- value = 3;
+ value = ExifOrientation::ORIENTATION_180_DEGREES;
break;
case 270:
- value = 8;
+ value = ExifOrientation::ORIENTATION_270_DEGREES;
break;
default:
break;
}
- SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value);
+ return setOrientationValue(value);
+}
+
+bool ExifUtilsImpl::setOrientationValue(ExifOrientation orientationValue) {
+ SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, orientationValue);
return true;
}
diff --git a/services/camera/libcameraservice/utils/ExifUtils.h b/services/camera/libcameraservice/utils/ExifUtils.h
index c78bab9..f1d0205 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.h
+++ b/services/camera/libcameraservice/utils/ExifUtils.h
@@ -22,6 +22,24 @@
namespace android {
namespace camera3 {
+/*
+ * Orientation value:
+ * 1 2 3 4 5 6 7 8
+ *
+ * 888888 888888 88 88 8888888888 88 88 8888888888
+ * 88 88 88 88 88 88 88 88 88 88 88 88
+ * 8888 8888 8888 8888 88 8888888888 8888888888 88
+ * 88 88 88 88
+ * 88 88 888888 888888
+ */
+enum ExifOrientation : uint16_t {
+ ORIENTATION_UNDEFINED = 0x0,
+ ORIENTATION_0_DEGREES = 0x1,
+ ORIENTATION_90_DEGREES = 0x6,
+ ORIENTATION_180_DEGREES = 0x3,
+ ORIENTATION_270_DEGREES = 0x8,
+};
+
// This is based on the camera HIDL shim implementation, which was in turned
// based on original ChromeOS ARC implementation of a V4L2 HAL
@@ -49,6 +67,7 @@
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize) = 0;
+ virtual bool initializeEmpty() = 0;
// Set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
@@ -142,7 +161,11 @@
// Sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setOrientation(uint16_t orientation) = 0;
+ virtual bool setOrientation(uint16_t degrees) = 0;
+
+ // Sets image orientation.
+ // Returns false if memory allocation fails.
+ virtual bool setOrientationValue(ExifOrientation orientationValue) = 0;
// Sets the shutter speed.
// Returns false if memory allocation fails.
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 80d3630..1470de2 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -28,6 +28,7 @@
"libcodec2_soft_amrwbdec",
"libcodec2_soft_amrwbenc",
"libcodec2_soft_hevcdec",
+ "libcodec2_soft_hevcenc",
"libcodec2_soft_g711alawdec",
"libcodec2_soft_g711mlawdec",
"libcodec2_soft_mpeg2dec",