Merge "media: enable Codec 2.0 by default again" into qt-dev
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 7786856..a2ee65d 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -77,7 +77,7 @@
cc_library_shared {
name: "libcamera2ndk_vendor",
- vendor_available: true,
+ vendor: true,
srcs: [
"ndk_vendor/impl/ACameraDevice.cpp",
"ndk_vendor/impl/ACameraManager.cpp",
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index df4dadb..769895c 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -501,15 +501,13 @@
}
}
-static void copyOutputBufferToYV12Frame(uint8_t *dst,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+static void copyOutputBufferToYuvPlanarFrame(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- size_t dstYStride = align(width, 16);
- size_t dstUVStride = align(dstYStride / 2, 16);
uint8_t* dstStart = dst;
-
for (size_t i = 0; i < height; ++i) {
memcpy(dst, srcY, width);
srcY += srcYStride;
@@ -597,11 +595,10 @@
static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width, size_t height) {
+ size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstStride * height;
- size_t dstUVStride = align(dstStride / 2, 16);
+ size_t dstYSize = dstYStride * height;
size_t dstUVSize = dstUVStride * height / 2;
uint8_t *dstV = dstY + dstYSize;
uint8_t *dstU = dstV + dstUVSize;
@@ -612,7 +609,7 @@
}
srcY += srcYStride;
- dstY += dstStride;
+ dstY += dstYStride;
}
for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -699,6 +696,9 @@
size_t srcYStride = img->stride[AOM_PLANE_Y];
size_t srcUStride = img->stride[AOM_PLANE_U];
size_t srcVStride = img->stride[AOM_PLANE_V];
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
if (img->fmt == AOM_IMG_FMT_I42016) {
const uint16_t *srcY = (const uint16_t *)img->planes[AOM_PLANE_Y];
@@ -708,20 +708,23 @@
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
+ dstYStride / sizeof(uint32_t),
mWidth, mHeight);
} else {
convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
+ dstYStride, dstUVStride,
mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
- copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+ copyOutputBufferToYuvPlanarFrame(
+ dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
finishWork(*(int64_t*)img->user_priv, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 0ae2a5a..8d9f21a 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -55,7 +55,6 @@
noPrivateBuffers(); // TODO: account for our buffers here
noInputReferences();
noOutputReferences();
- noInputLatency();
noTimeStretch();
setDerivedInstance(this);
@@ -82,6 +81,13 @@
.build());
addParameter(
+ DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
+ .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
+ .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
+ .withSetter(Setter<decltype(*mActualInputDelay)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
.withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
// TODO: More restriction?
@@ -365,9 +371,9 @@
mAVCEncLevel(41),
mStarted(false),
mSawInputEOS(false),
- mSawOutputEOS(false),
mSignalledError(false),
mCodecCtx(nullptr),
+ mOutBlock(nullptr),
// TODO: output buffer size
mOutBufferSize(524288) {
@@ -380,7 +386,7 @@
}
C2SoftAvcEnc::~C2SoftAvcEnc() {
- releaseEncoder();
+ onRelease();
}
c2_status_t C2SoftAvcEnc::onInit() {
@@ -394,11 +400,17 @@
void C2SoftAvcEnc::onReset() {
// TODO: use IVE_CMD_CTL_RESET?
releaseEncoder();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
initEncParams();
}
void C2SoftAvcEnc::onRelease() {
releaseEncoder();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
c2_status_t C2SoftAvcEnc::onFlush_sm() {
@@ -1106,8 +1118,10 @@
const C2GraphicView *const input,
uint8_t *base,
uint32_t capacity,
- uint64_t timestamp) {
+ uint64_t workIndex) {
iv_raw_buf_t *ps_inp_raw_buf;
+ memset(ps_encode_ip, 0, sizeof(*ps_encode_ip));
+ memset(ps_encode_op, 0, sizeof(*ps_encode_op));
ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
ps_encode_ip->s_out_buf.pv_buf = base;
@@ -1123,8 +1137,8 @@
ps_encode_ip->u4_mb_info_type = 0;
ps_encode_ip->u4_pic_info_type = 0;
ps_encode_ip->u4_is_last = 0;
- ps_encode_ip->u4_timestamp_high = timestamp >> 32;
- ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF;
+ ps_encode_ip->u4_timestamp_high = workIndex >> 32;
+ ps_encode_ip->u4_timestamp_low = workIndex & 0xFFFFFFFF;
ps_encode_op->s_out_buf.pv_buf = nullptr;
/* Initialize color formats */
@@ -1132,7 +1146,7 @@
ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
if (input == nullptr) {
- if (mSawInputEOS){
+ if (mSawInputEOS) {
ps_encode_ip->u4_is_last = 1;
}
return C2_OK;
@@ -1271,17 +1285,46 @@
return C2_OK;
}
+void C2SoftAvcEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work> &work,
+ ive_video_encode_op_t *ps_encode_op) {
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(mOutBlock, 0, ps_encode_op->s_out_buf.u4_bytes);
+ if (IV_IDR_FRAME == ps_encode_op->u4_encoded_frame_type) {
+ ALOGV("IDR frame produced");
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2Config::SYNC_FRAME));
+ }
+ mOutBlock = nullptr;
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ if (mSawInputEOS) {
+ work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
+ }
+ } else {
+ finish(workIndex, fillWork);
+ }
+}
+
void C2SoftAvcEnc::process(
const std::unique_ptr<C2Work> &work,
const std::shared_ptr<C2BlockPool> &pool) {
// Initialize output work
work->result = C2_OK;
- work->workletsProcessed = 1u;
+ work->workletsProcessed = 0u;
work->worklets.front()->output.flags = work->input.flags;
IV_STATUS_T status;
- WORD32 timeDelay, timeTaken;
- uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+ WORD32 timeDelay = 0;
+ WORD32 timeTaken = 0;
+ uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
// Initialize encoder if not already initialized
if (mCodecCtx == nullptr) {
@@ -1289,27 +1332,29 @@
ALOGE("Failed to initialize encoder");
mSignalledError = true;
work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
return;
}
}
if (mSignalledError) {
return;
}
-
// while (!mSawOutputEOS && !outQueue.empty()) {
c2_status_t error;
ive_video_encode_ip_t s_encode_ip;
ive_video_encode_op_t s_encode_op;
+ memset(&s_encode_op, 0, sizeof(s_encode_op));
if (!mSpsPpsHeaderReceived) {
constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
uint8_t header[kHeaderLength];
error = setEncodeArgs(
- &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, timestamp);
+ &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, workIndex);
if (error != C2_OK) {
ALOGE("setEncodeArgs failed: %d", error);
mSignalledError = true;
work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
return;
}
status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
@@ -1317,6 +1362,7 @@
if (IV_SUCCESS != status) {
ALOGE("Encode header failed = 0x%x\n",
s_encode_op.u4_error_code);
+ work->workletsProcessed = 1u;
return;
} else {
ALOGV("Bytes Generated in header %d\n",
@@ -1331,6 +1377,7 @@
ALOGE("CSD allocation failed");
mSignalledError = true;
work->result = C2_NO_MEMORY;
+ work->workletsProcessed = 1u;
return;
}
memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
@@ -1338,6 +1385,10 @@
DUMP_TO_FILE(
mOutFile, csd->m.value, csd->flexCount());
+ if (work->input.buffers.empty()) {
+ work->workletsProcessed = 1u;
+ return;
+ }
}
// handle dynamic config parameters
@@ -1394,34 +1445,41 @@
inputBuffer->data().graphicBlocks().front().map().get());
if (view->error() != C2_OK) {
ALOGE("graphic view map err = %d", view->error());
+ work->workletsProcessed = 1u;
return;
}
}
- std::shared_ptr<C2LinearBlock> block;
-
do {
- C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- // TODO: error handling, proper usage, etc.
- c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
- if (err != C2_OK) {
- ALOGE("fetch linear block err = %d", err);
- work->result = err;
- return;
+ if (mSawInputEOS && work->input.buffers.empty()) break;
+ if (!mOutBlock) {
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
+ C2MemoryUsage::CPU_WRITE};
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err =
+ pool->fetchLinearBlock(mOutBufferSize, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetch linear block err = %d", err);
+ work->result = err;
+ work->workletsProcessed = 1u;
+ return;
+ }
}
- C2WriteView wView = block->map().get();
+ C2WriteView wView = mOutBlock->map().get();
if (wView.error() != C2_OK) {
ALOGE("write view map err = %d", wView.error());
work->result = wView.error();
+ work->workletsProcessed = 1u;
return;
}
error = setEncodeArgs(
- &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), timestamp);
+ &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
if (error != C2_OK) {
ALOGE("setEncodeArgs failed : %d", error);
mSignalledError = true;
work->result = error;
+ work->workletsProcessed = 1u;
return;
}
@@ -1439,12 +1497,14 @@
if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
// TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
mOutBufferSize *= 2;
+ mOutBlock.reset();
continue;
}
ALOGE("Encode Frame failed = 0x%x\n",
s_encode_op.u4_error_code);
mSignalledError = true;
work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
return;
}
} while (IV_SUCCESS != status);
@@ -1473,41 +1533,104 @@
}
}
- work->worklets.front()->output.flags = work->input.flags;
- work->worklets.front()->output.ordinal = work->input.ordinal;
- work->worklets.front()->output.ordinal.timestamp =
- ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low;
- work->worklets.front()->output.buffers.clear();
-
- if (s_encode_op.s_out_buf.u4_bytes) {
- std::shared_ptr<C2Buffer> buffer =
- createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes);
- if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
- ALOGV("IDR frame produced");
- buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
- 0u /* stream id */, C2Config::SYNC_FRAME));
+ if (s_encode_op.output_present) {
+ if (!s_encode_op.s_out_buf.u4_bytes) {
+ ALOGE("Error: Output present but bytes generated is zero");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ return;
}
- work->worklets.front()->output.buffers.push_back(buffer);
+ uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
+ s_encode_op.u4_timestamp_low;
+ finishWork(workId, work, &s_encode_op);
+ }
+ if (mSawInputEOS) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ }
+}
+
+c2_status_t C2SoftAvcEnc::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
}
- if (s_encode_op.u4_is_last) {
- // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
- mSawOutputEOS = true;
- } else {
- // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ while (true) {
+ if (!mOutBlock) {
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
+ C2MemoryUsage::CPU_WRITE};
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err =
+ pool->fetchLinearBlock(mOutBufferSize, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetch linear block err = %d", err);
+ work->result = err;
+ work->workletsProcessed = 1u;
+ return err;
+ }
+ }
+ C2WriteView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ return C2_CORRUPTED;
+ }
+ ive_video_encode_ip_t s_encode_ip;
+ ive_video_encode_op_t s_encode_op;
+ if (C2_OK != setEncodeArgs(&s_encode_ip, &s_encode_op, nullptr,
+ wView.base(), wView.capacity(), 0)) {
+ ALOGE("setEncodeArgs failed for drainInternal");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ return C2_CORRUPTED;
+ }
+ (void)ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ /* If encoder frees up an input buffer, mark it as free */
+ if (freed != nullptr) {
+ if (mBuffers.count(freed) == 0u) {
+ ALOGD("buffer not tracked");
+ } else {
+ // Release input buffer reference
+ mBuffers.erase(freed);
+ mConversionBuffersInUse.erase(freed);
+ }
+ }
+
+ if (s_encode_op.output_present) {
+ uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
+ s_encode_op.u4_timestamp_low;
+ finishWork(workId, work, &s_encode_op);
+ } else {
+ if (work->workletsProcessed != 1u) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.buffers.clear();
+ work->workletsProcessed = 1u;
+ }
+ break;
+ }
}
+
+ return C2_OK;
}
c2_status_t C2SoftAvcEnc::drain(
uint32_t drainMode,
const std::shared_ptr<C2BlockPool> &pool) {
- // TODO: use IVE_CMD_CTL_FLUSH?
- (void)drainMode;
- (void)pool;
- return C2_OK;
+ return drainInternal(drainMode, pool, nullptr);
}
-
class C2SoftAvcEncFactory : public C2ComponentFactory {
public:
C2SoftAvcEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 58a86d8..555055b 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -33,6 +33,7 @@
#define LEN_STATUS_BUFFER (10 * 1024)
#define MAX_VBV_BUFF_SIZE (120 * 16384)
#define MAX_NUM_IO_BUFS 3
+#define MAX_B_FRAMES 1
#define DEFAULT_MAX_REF_FRM 2
#define DEFAULT_MAX_REORDER_FRM 0
@@ -167,7 +168,6 @@
bool mSpsPpsHeaderReceived;
bool mSawInputEOS;
- bool mSawOutputEOS;
bool mSignalledError;
bool mIntra4x4;
bool mEnableFastSad;
@@ -183,6 +183,8 @@
size_t mNumMemRecords; // Number of memory records requested by codec
size_t mNumCores; // Number of cores used by the codec
+ std::shared_ptr<C2LinearBlock> mOutBlock;
+
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
@@ -230,7 +232,13 @@
const C2GraphicView *const input,
uint8_t *base,
uint32_t capacity,
- uint64_t timestamp);
+ uint64_t workIndex);
+ void finishWork(uint64_t workIndex,
+ const std::unique_ptr<C2Work> &work,
+ ive_video_encode_op_t *ps_encode_op);
+ c2_status_t drainInternal(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
C2_DO_NOT_COPY(C2SoftAvcEnc);
};
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index b27ee4e..efeab6c 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -56,11 +56,20 @@
noPrivateBuffers(); // TODO: account for our buffers here
noInputReferences();
noOutputReferences();
- noInputLatency();
noTimeStretch();
setDerivedInstance(this);
addParameter(
+ DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
+ .withDefault(new C2PortActualDelayTuning::input(
+ DEFAULT_B_FRAMES + DEFAULT_RC_LOOKAHEAD))
+ .withFields({C2F(mActualInputDelay, value).inRange(
+ 0, MAX_B_FRAMES + MAX_RC_LOOKAHEAD)})
+ .withSetter(
+ Setter<decltype(*mActualInputDelay)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
.withConstValue(new C2ComponentAttributesSetting(
C2Component::ATTRIB_IS_TEMPORAL))
@@ -462,7 +471,8 @@
mIvVideoColorFormat = IV_YUV_420P;
mEncParams.s_multi_thrd_prms.i4_max_num_cores = mNumCores;
mEncParams.s_out_strm_prms.i4_codec_profile = mHevcEncProfile;
- mEncParams.s_lap_prms.i4_rc_look_ahead_pics = 0;
+ mEncParams.s_lap_prms.i4_rc_look_ahead_pics = DEFAULT_RC_LOOKAHEAD;
+ mEncParams.s_coding_tools_prms.i4_max_temporal_layers = DEFAULT_B_FRAMES;
switch (mBitrateMode->value) {
case C2Config::BITRATE_IGNORE:
@@ -512,10 +522,9 @@
c2_status_t C2SoftHevcEnc::drain(uint32_t drainMode,
const std::shared_ptr<C2BlockPool>& pool) {
- (void)drainMode;
- (void)pool;
- return C2_OK;
+ return drainInternal(drainMode, pool, nullptr);
}
+
c2_status_t C2SoftHevcEnc::initEncoder() {
CHECK(!mCodecCtx);
{
@@ -552,7 +561,7 @@
c2_status_t C2SoftHevcEnc::setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
const C2GraphicView* const input,
- uint64_t timestamp) {
+ uint64_t workIndex) {
ihevce_static_cfg_params_t* params = &mEncParams;
memset(ps_encode_ip, 0, sizeof(*ps_encode_ip));
@@ -696,7 +705,92 @@
ps_encode_ip->i4_curr_peak_bitrate =
params->s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0];
ps_encode_ip->i4_curr_rate_factor = params->s_config_prms.i4_rate_factor;
- ps_encode_ip->u8_pts = timestamp;
+ ps_encode_ip->u8_pts = workIndex;
+ return C2_OK;
+}
+
+void C2SoftHevcEnc::finishWork(uint64_t index,
+ const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool,
+ ihevce_out_buf_t* ps_encode_op) {
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ c2_status_t status =
+ pool->fetchLinearBlock(ps_encode_op->i4_bytes_generated, usage, &block);
+ if (C2_OK != status) {
+ ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+ mSignalledError = true;
+ work->result = status;
+ work->workletsProcessed = 1u;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (C2_OK != wView.error()) {
+ ALOGE("write view map failed with status 0x%x", wView.error());
+ mSignalledError = true;
+ work->result = wView.error();
+ work->workletsProcessed = 1u;
+ return;
+ }
+ memcpy(wView.data(), ps_encode_op->pu1_output_buf,
+ ps_encode_op->i4_bytes_generated);
+
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, ps_encode_op->i4_bytes_generated);
+
+ DUMP_TO_FILE(mOutFile, ps_encode_op->pu1_output_buf,
+ ps_encode_op->i4_bytes_generated);
+
+ if (ps_encode_op->i4_is_key_frame) {
+ ALOGV("IDR frame produced");
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2Config::SYNC_FRAME));
+ }
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ if (mSignalledEos) {
+ work->worklets.front()->output.flags =
+ C2FrameData::FLAG_END_OF_STREAM;
+ }
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+c2_status_t C2SoftHevcEnc::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ while (true) {
+ ihevce_out_buf_t s_encode_op{};
+ memset(&s_encode_op, 0, sizeof(s_encode_op));
+
+ ihevce_encode(mCodecCtx, nullptr, &s_encode_op);
+ if (s_encode_op.i4_bytes_generated) {
+ finishWork(s_encode_op.u8_pts, work, pool, &s_encode_op);
+ } else {
+ if (work->workletsProcessed != 1u) fillEmptyWork(work);
+ break;
+ }
+ }
return C2_OK;
}
@@ -704,7 +798,7 @@
const std::shared_ptr<C2BlockPool>& pool) {
// Initialize output work
work->result = C2_OK;
- work->workletsProcessed = 1u;
+ work->workletsProcessed = 0u;
work->worklets.front()->output.flags = work->input.flags;
if (mSignalledError || mSignalledEos) {
@@ -721,6 +815,7 @@
ALOGE("Failed to initialize encoder : 0x%x", status);
mSignalledError = true;
work->result = status;
+ work->workletsProcessed = 1u;
return;
}
}
@@ -728,6 +823,8 @@
std::shared_ptr<const C2GraphicView> view;
std::shared_ptr<C2Buffer> inputBuffer = nullptr;
bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ if (eos) mSignalledEos = true;
+
if (!work->input.buffers.empty()) {
inputBuffer = work->input.buffers[0];
view = std::make_shared<const C2GraphicView>(
@@ -736,13 +833,12 @@
ALOGE("graphic view map err = %d", view->error());
mSignalledError = true;
work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
return;
}
}
-
IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
- fillEmptyWork(work);
if (!mSpsPpsHeaderReceived) {
ihevce_out_buf_t s_header_op{};
err = ihevce_encode_header(mCodecCtx, &s_header_op);
@@ -754,6 +850,7 @@
ALOGE("CSD allocation failed");
mSignalledError = true;
work->result = C2_NO_MEMORY;
+ work->workletsProcessed = 1u;
return;
}
memcpy(csd->m.value, s_header_op.pu1_output_buf,
@@ -764,34 +861,40 @@
mSpsPpsHeaderReceived = true;
}
if (!inputBuffer) {
+ work->workletsProcessed = 1u;
return;
}
}
ihevce_inp_buf_t s_encode_ip{};
ihevce_out_buf_t s_encode_op{};
- uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+ uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
- status = setEncodeArgs(&s_encode_ip, view.get(), timestamp);
+ status = setEncodeArgs(&s_encode_ip, view.get(), workIndex);
if (C2_OK != status) {
ALOGE("setEncodeArgs failed : 0x%x", status);
mSignalledError = true;
work->result = status;
+ work->workletsProcessed = 1u;
return;
}
uint64_t timeDelay = 0;
uint64_t timeTaken = 0;
+ memset(&s_encode_op, 0, sizeof(s_encode_op));
GETTIME(&mTimeStart, nullptr);
TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
- ihevce_inp_buf_t* ps_encode_ip = (inputBuffer) ? &s_encode_ip : nullptr;
-
- err = ihevce_encode(mCodecCtx, ps_encode_ip, &s_encode_op);
- if (IHEVCE_EOK != err) {
- ALOGE("Encode Frame failed : 0x%x", err);
- mSignalledError = true;
- work->result = C2_CORRUPTED;
- return;
+ if (inputBuffer) {
+ err = ihevce_encode(mCodecCtx, &s_encode_ip, &s_encode_op);
+ if (IHEVCE_EOK != err) {
+ ALOGE("Encode Frame failed : 0x%x", err);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ return;
+ }
+ } else if (!eos) {
+ fillEmptyWork(work);
}
GETTIME(&mTimeEnd, nullptr);
@@ -802,42 +905,11 @@
(int)timeDelay, s_encode_op.i4_bytes_generated);
if (s_encode_op.i4_bytes_generated) {
- std::shared_ptr<C2LinearBlock> block;
- C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
- status = pool->fetchLinearBlock(s_encode_op.i4_bytes_generated, usage, &block);
- if (C2_OK != status) {
- ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
- work->result = C2_NO_MEMORY;
- mSignalledError = true;
- return;
- }
- C2WriteView wView = block->map().get();
- if (C2_OK != wView.error()) {
- ALOGE("write view map failed with status 0x%x", wView.error());
- work->result = wView.error();
- mSignalledError = true;
- return;
- }
- memcpy(wView.data(), s_encode_op.pu1_output_buf,
- s_encode_op.i4_bytes_generated);
-
- std::shared_ptr<C2Buffer> buffer =
- createLinearBuffer(block, 0, s_encode_op.i4_bytes_generated);
-
- DUMP_TO_FILE(mOutFile, s_encode_op.pu1_output_buf,
- s_encode_op.i4_bytes_generated);
-
- work->worklets.front()->output.ordinal.timestamp = s_encode_op.u8_pts;
- if (s_encode_op.i4_is_key_frame) {
- ALOGV("IDR frame produced");
- buffer->setInfo(
- std::make_shared<C2StreamPictureTypeMaskInfo::output>(
- 0u /* stream id */, C2Config::SYNC_FRAME));
- }
- work->worklets.front()->output.buffers.push_back(buffer);
+ finishWork(s_encode_op.u8_pts, work, pool, &s_encode_op);
}
+
if (eos) {
- mSignalledEos = true;
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
}
}
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 8569a3e..f2c7642 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -35,7 +35,12 @@
diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
((end).tv_usec - (start).tv_usec);
-#define CODEC_MAX_CORES 4
+#define CODEC_MAX_CORES 4
+#define MAX_B_FRAMES 1
+#define MAX_RC_LOOKAHEAD 1
+
+#define DEFAULT_B_FRAMES 0
+#define DEFAULT_RC_LOOKAHEAD 0
struct C2SoftHevcEnc : public SimpleC2Component {
class IntfImpl;
@@ -95,10 +100,15 @@
c2_status_t releaseEncoder();
c2_status_t setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
const C2GraphicView* const input,
- uint64_t timestamp);
+ uint64_t workIndex);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool,
+ ihevce_out_buf_t* ps_encode_op);
+ c2_status_t drainInternal(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
C2_DO_NOT_COPY(C2SoftHevcEnc);
};
-
#ifdef FILE_DUMP_ENABLE
#define INPUT_DUMP_PATH "/data/local/tmp/hevc"
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 3d4a733..7e6685e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -466,9 +466,11 @@
/* TODO: can remove temporary copy after library supports writing to display
* buffer Y, U and V plane pointers using stride info. */
-static void copyOutputBufferToYV12Frame(uint8_t *dst, uint8_t *src, size_t dstYStride,
- size_t srcYStride, uint32_t width, uint32_t height) {
- size_t dstUVStride = align(dstYStride / 2, 16);
+static void copyOutputBufferToYuvPlanarFrame(
+ uint8_t *dst, uint8_t *src,
+ size_t dstYStride, size_t dstUVStride,
+ size_t srcYStride, uint32_t width,
+ uint32_t height) {
size_t srcUVStride = srcYStride / 2;
uint8_t *srcStart = src;
uint8_t *dstStart = dst;
@@ -673,8 +675,14 @@
}
uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
- (void)copyOutputBufferToYV12Frame(outputBufferY, mOutputBuffer[mNumSamplesOutput & 1],
- wView.width(), align(mWidth, 16), mWidth, mHeight);
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ (void)copyOutputBufferToYuvPlanarFrame(
+ outputBufferY,
+ mOutputBuffer[mNumSamplesOutput & 1],
+ dstYStride, dstUVStride,
+ align(mWidth, 16), mWidth, mHeight);
inPos += inSize - (size_t)tmpInSize;
finishWork(workIndex, work);
diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp
index abfd379..34f5753 100644
--- a/media/codec2/components/vpx/Android.bp
+++ b/media/codec2/components/vpx/Android.bp
@@ -7,7 +7,7 @@
srcs: ["C2SoftVpxDec.cpp"],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
cflags: [
"-DVP9",
@@ -23,7 +23,7 @@
srcs: ["C2SoftVpxDec.cpp"],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
}
cc_library_shared {
@@ -38,7 +38,7 @@
"C2SoftVpxEnc.cpp",
],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
cflags: ["-DVP9"],
}
@@ -55,6 +55,6 @@
"C2SoftVpxEnc.cpp",
],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
}
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 3120f7a..42f507f 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -559,12 +559,11 @@
}
}
-static void copyOutputBufferToYV12Frame(uint8_t *dst,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+static void copyOutputBufferToYuvPlanarFrame(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- size_t dstYStride = align(width, 16);
- size_t dstUVStride = align(dstYStride / 2, 16);
uint8_t *dstStart = dst;
for (size_t i = 0; i < height; ++i) {
@@ -654,11 +653,10 @@
static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width, size_t height) {
+ size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstStride * height;
- size_t dstUVStride = align(dstStride / 2, 16);
+ size_t dstYSize = dstYStride * height;
size_t dstUVSize = dstUVStride * height / 2;
uint8_t *dstV = dstY + dstYSize;
uint8_t *dstU = dstV + dstUVSize;
@@ -669,7 +667,7 @@
}
srcY += srcYStride;
- dstY += dstStride;
+ dstY += dstYStride;
}
for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -751,6 +749,9 @@
size_t srcYStride = img->stride[VPX_PLANE_Y];
size_t srcUStride = img->stride[VPX_PLANE_U];
size_t srcVStride = img->stride[VPX_PLANE_V];
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
if (img->fmt == VPX_IMG_FMT_I42016) {
const uint16_t *srcY = (const uint16_t *)img->planes[VPX_PLANE_Y];
@@ -760,20 +761,23 @@
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
+ dstYStride / sizeof(uint32_t),
mWidth, mHeight);
} else {
convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
+ dstYStride, dstUVStride,
mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
- copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+ copyOutputBufferToYuvPlanarFrame(
+ dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
finishWork(*(int64_t *)img->user_priv, work, std::move(block));
return true;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 715e78b..7669421 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1203,6 +1203,9 @@
void CCodecBufferChannel::onInputBufferDone(
uint64_t frameIndex, size_t arrayIndex) {
+ if (mInputSurface) {
+ return;
+ }
std::shared_ptr<C2Buffer> buffer =
mPipelineWatcher.lock()->onInputBufferReleased(frameIndex, arrayIndex);
bool newInputSlotAvailable;
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 10c4dcc..f8afa7c 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -872,7 +872,6 @@
emplace("libcodec2_soft_vp8enc.so");
emplace("libcodec2_soft_vp9dec.so");
emplace("libcodec2_soft_vp9enc.so");
- emplace("libcodec2_soft_xaacdec.so");
}
c2_status_t C2PlatformComponentStore::copyBuffer(
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index b4fd811..e01fc90 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1972,6 +1972,8 @@
return err;
}
+ adjustRawDefaultFrameSize();
+
size_t max_size;
err = mLastTrack->sampleTable->getMaxSampleSize(&max_size);
@@ -4606,6 +4608,20 @@
return OK;
}
+void MPEG4Extractor::adjustRawDefaultFrameSize() {
+ int32_t chanCount = 0;
+ int32_t bitWidth = 0;
+ const char *mimeStr = NULL;
+
+ if(AMediaFormat_getString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME, &mimeStr) &&
+ !strcasecmp(mimeStr, MEDIA_MIMETYPE_AUDIO_RAW) &&
+ AMediaFormat_getInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &chanCount) &&
+ AMediaFormat_getInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_BITS_PER_SAMPLE, &bitWidth)) {
+ // samplesize in stsz may not right , so updade default samplesize
+ mLastTrack->sampleTable->setPredictSampleSize(chanCount * bitWidth / 8);
+ }
+}
+
////////////////////////////////////////////////////////////////////////////////
MPEG4Source::MPEG4Source(
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 031e793..e10bf8a 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -179,6 +179,7 @@
status_t parseAC3SpecificBox(off64_t offset);
status_t parseEAC3SpecificBox(off64_t offset);
status_t parseAC4SpecificBox(off64_t offset);
+ void adjustRawDefaultFrameSize();
MPEG4Extractor(const MPEG4Extractor &);
MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/extractors/mp4/SampleTable.h b/media/extractors/mp4/SampleTable.h
index 57f6e62..076f4c3 100644
--- a/media/extractors/mp4/SampleTable.h
+++ b/media/extractors/mp4/SampleTable.h
@@ -89,6 +89,10 @@
status_t findThumbnailSample(uint32_t *sample_index);
+ void setPredictSampleSize(uint32_t sampleSize) {
+ mDefaultSampleSize = sampleSize;
+ }
+
protected:
~SampleTable();
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 05e8a49..73b5f8d 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -44,20 +44,20 @@
public class CamcorderProfiles {
ctor public CamcorderProfiles();
method public int getCameraId();
- method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile();
- method public java.util.List<media.profiles.CamcorderProfiles.ImageDecoding> getImageDecoding();
- method public java.util.List<media.profiles.CamcorderProfiles.ImageEncoding> getImageEncoding();
+ method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile_optional();
+ method public java.util.List<media.profiles.CamcorderProfiles.ImageDecodingOptional> getImageDecoding_optional();
+ method public java.util.List<media.profiles.CamcorderProfiles.ImageEncodingOptional> getImageEncoding_optional();
method public void setCameraId(int);
}
- public static class CamcorderProfiles.ImageDecoding {
- ctor public CamcorderProfiles.ImageDecoding();
+ public static class CamcorderProfiles.ImageDecodingOptional {
+ ctor public CamcorderProfiles.ImageDecodingOptional();
method public int getMemCap();
method public void setMemCap(int);
}
- public static class CamcorderProfiles.ImageEncoding {
- ctor public CamcorderProfiles.ImageEncoding();
+ public static class CamcorderProfiles.ImageEncodingOptional {
+ ctor public CamcorderProfiles.ImageEncodingOptional();
method public int getQuality();
method public void setQuality(int);
}
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index a02252a..9664456 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -35,19 +35,19 @@
</xs:complexType>
</xs:element>
<xs:complexType name="CamcorderProfiles">
- <xs:sequence>
- <xs:element name="EncoderProfile" type="EncoderProfile" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="ImageEncoding" minOccurs="0" maxOccurs="unbounded">
+ <xs:choice minOccurs="0" maxOccurs="unbounded">
+ <xs:element name="EncoderProfile" type="EncoderProfile"/>
+ <xs:element name="ImageEncoding">
<xs:complexType>
<xs:attribute name="quality" type="xs:int"/>
</xs:complexType>
</xs:element>
- <xs:element name="ImageDecoding" minOccurs="0" maxOccurs="unbounded">
+ <xs:element name="ImageDecoding">
<xs:complexType>
<xs:attribute name="memCap" type="xs:int"/>
</xs:complexType>
</xs:element>
- </xs:sequence>
+ </xs:choice>
<xs:attribute name="cameraId" type="xs:int"/>
</xs:complexType>
<xs:complexType name="EncoderProfile">
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 317b5ec..3d67c91 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1138,6 +1138,8 @@
return err;
}
+ static_cast<Surface *>(mNativeWindow.get())->setDequeueTimeout(-1);
+
// Exits here for tunneled video playback codecs -- i.e. skips native window
// buffer allocation step as this is managed by the tunneled OMX omponent
// itself and explicitly sets def.nBufferCountActual to 0.
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 5932518..9170805 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -212,7 +212,6 @@
"libstagefright_mediafilter",
"libstagefright_webm",
"libstagefright_timedtext",
- "libvpx",
"libogg",
"libwebm",
"libstagefright_esds",
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/libstagefright/codecs/amrnb/common/Android.bp
index 5177593..772ebf9 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.bp
+++ b/media/libstagefright/codecs/amrnb/common/Android.bp
@@ -1,9 +1,6 @@
cc_library_shared {
name: "libstagefright_amrnb_common",
vendor_available: true,
- vndk: {
- enabled: true,
- },
srcs: [
"src/add.cpp",
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 3beed66..084be0a 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -129,6 +129,7 @@
shared_libs: [
"libstagefright_enc_common",
+ "liblog",
],
cflags: ["-Werror"],
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
index 95f9494..9442fc4 100644
--- a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
@@ -14,6 +14,7 @@
shared_libs: [
"libdl",
+ "liblog",
],
static_libs: [
diff --git a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
index 8cebb09..f2e28c4 100644
--- a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
+++ b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
@@ -47,6 +47,10 @@
#include "q_pulse.h"
+#undef LOG_TAG
+#define LOG_TAG "amrwbenc"
+#include "log/log.h"
+
static Word16 tipos[36] = {
0, 1, 2, 3, /* starting point &ipos[0], 1st iter */
1, 2, 3, 0, /* starting point &ipos[4], 2nd iter */
@@ -745,11 +749,16 @@
i = (Word16)((vo_L_mult(track, NPMAXPT) >> 1));
- while (ind[i] >= 0)
+ while (i < NPMAXPT * NB_TRACK && ind[i] >= 0)
{
i += 1;
}
- ind[i] = index;
+ if (i < NPMAXPT * NB_TRACK) {
+ ind[i] = index;
+ } else {
+ ALOGE("b/132647222, OOB access in ind array track=%d i=%d", track, i);
+ android_errorWriteLog(0x534e4554, "132647222");
+ }
}
k = 0;
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 3726922..c5a076a 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -1,9 +1,6 @@
cc_library {
name: "libstagefright_enc_common",
vendor_available: true,
- vndk: {
- enabled: true,
- },
srcs: ["cmnMemory.c"],
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index 577231c..82bb8d1 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -4,7 +4,7 @@
srcs: ["SoftVPX.cpp"],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
version_script: "exports.lds",
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 82c215e..cd69e0d 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -20,5 +20,5 @@
cfi: true,
},
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
}
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index d136d9e..d685321 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -395,11 +395,16 @@
dst.mStride, src.cropWidth(), src.cropHeight());
break;
- case OMX_COLOR_Format32BitRGBA8888:
+ case OMX_COLOR_Format32bitBGRA8888:
libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
dst.mStride, src.cropWidth(), src.cropHeight());
break;
+ case OMX_COLOR_Format32BitRGBA8888:
+ libyuv::NV12ToABGR(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
+ dst.mStride, src.cropWidth(), src.cropHeight());
+ break;
+
default:
return ERROR_UNSUPPORTED;
}
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index b270808..b494e16 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,9 +1,6 @@
cc_library {
name: "libstagefright_flacdec",
vendor_available: true,
- vndk: {
- enabled: true,
- },
srcs: [
"FLACDecoder.cpp",
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 5e7f90a..9cf97c7 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1384,7 +1384,7 @@
while (index > lowestBandwidth) {
// be conservative (70%) to avoid overestimating and immediately
// switching down again.
- size_t adjustedBandwidthBps = bandwidthBps * 7 / 10;
+ size_t adjustedBandwidthBps = bandwidthBps * .7f;
const BandwidthItem &item = mBandwidthItems[index];
if (item.mBandwidth <= adjustedBandwidthBps
&& isBandwidthValid(item)) {
diff --git a/media/libstagefright/webm/WebmFrame.cpp b/media/libstagefright/webm/WebmFrame.cpp
index 4b0d47c..52c30ec 100644
--- a/media/libstagefright/webm/WebmFrame.cpp
+++ b/media/libstagefright/webm/WebmFrame.cpp
@@ -62,6 +62,14 @@
mData);
}
+uint64_t WebmFrame::getAbsTimecode() {
+ return mAbsTimecode;
+}
+
+void WebmFrame::updateAbsTimecode(uint64_t newAbsTimecode) {
+ mAbsTimecode = newAbsTimecode;
+}
+
bool WebmFrame::operator<(const WebmFrame &other) const {
if (this->mEos) {
return false;
diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/WebmFrame.h
index a410a87..47f2523 100644
--- a/media/libstagefright/webm/WebmFrame.h
+++ b/media/libstagefright/webm/WebmFrame.h
@@ -25,7 +25,7 @@
public:
const int mType;
const bool mKey;
- const uint64_t mAbsTimecode;
+ uint64_t mAbsTimecode;
const sp<ABuffer> mData;
const bool mEos;
@@ -33,6 +33,8 @@
WebmFrame(int type, bool key, uint64_t absTimecode, MediaBufferBase *buf);
~WebmFrame() {}
+ uint64_t getAbsTimecode();
+ void updateAbsTimecode(uint64_t newAbsTimecode);
sp<WebmElement> SimpleBlock(uint64_t baseTimecode) const;
bool operator<(const WebmFrame &other) const;
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 4b6f928..631a2ab 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -78,6 +78,7 @@
mVideoFrames(videoThread->mSink),
mAudioFrames(audioThread->mSink),
mCues(cues),
+ mStartOffsetTimecode(UINT64_MAX),
mDone(true) {
}
@@ -92,6 +93,7 @@
mVideoFrames(videoSource),
mAudioFrames(audioSource),
mCues(cues),
+ mStartOffsetTimecode(UINT64_MAX),
mDone(true) {
}
@@ -213,6 +215,11 @@
const sp<WebmFrame> audioFrame = mAudioFrames.peek();
ALOGV("a frame: %p", audioFrame.get());
+ if (mStartOffsetTimecode == UINT64_MAX) {
+ mStartOffsetTimecode =
+ std::min(audioFrame->getAbsTimecode(), videoFrame->getAbsTimecode());
+ }
+
if (videoFrame->mEos && audioFrame->mEos) {
break;
}
@@ -220,10 +227,12 @@
if (*audioFrame < *videoFrame) {
ALOGV("take a frame");
mAudioFrames.take();
+ audioFrame->updateAbsTimecode(audioFrame->getAbsTimecode() - mStartOffsetTimecode);
outstandingFrames.push_back(audioFrame);
} else {
ALOGV("take v frame");
mVideoFrames.take();
+ videoFrame->updateAbsTimecode(videoFrame->getAbsTimecode() - mStartOffsetTimecode);
outstandingFrames.push_back(videoFrame);
if (videoFrame->mKey)
numVideoKeyFrames++;
@@ -350,7 +359,6 @@
if (mStartTimeUs == kUninitialized) {
mStartTimeUs = timestampUs;
}
- timestampUs -= mStartTimeUs;
if (mPaused && !mResumed) {
lastDurationUs = timestampUs - lastTimestampUs;
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h
index 76c91f1..1ddaf9a 100644
--- a/media/libstagefright/webm/WebmFrameThread.h
+++ b/media/libstagefright/webm/WebmFrameThread.h
@@ -83,6 +83,7 @@
LinkedBlockingQueue<const sp<WebmFrame> >& mVideoFrames;
LinkedBlockingQueue<const sp<WebmFrame> >& mAudioFrames;
List<sp<WebmElement> >& mCues;
+ uint64_t mStartOffsetTimecode;
volatile bool mDone;
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 26e0884..5eaadbd 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -124,6 +124,11 @@
return NULL;
}
+ int32_t bitsPerSample = 0;
+ if (!md->findInt32(kKeyBitsPerSample, &bitsPerSample)) {
+ ALOGV("kKeyBitsPerSample not available");
+ }
+
if (!strncasecmp(mimeType, MEDIA_MIMETYPE_AUDIO_OPUS, strlen(MEDIA_MIMETYPE_AUDIO_OPUS))) {
// Opus in WebM is a well-known, yet under-documented, format. The codec private data
// of the track is an Opus Ogg header (https://tools.ietf.org/html/rfc7845#section-5.1)
@@ -164,8 +169,8 @@
uint8_t* codecPrivateData = codecPrivateBuf->data();
memcpy(codecPrivateData + off, (uint8_t*)header_data, headerSize);
- sp<WebmElement> entry =
- WebmElement::AudioTrackEntry("A_OPUS", nChannels, samplerate, codecPrivateBuf);
+ sp<WebmElement> entry = WebmElement::AudioTrackEntry("A_OPUS", nChannels, samplerate,
+ codecPrivateBuf, bitsPerSample);
return entry;
} else if (!strncasecmp(mimeType,
MEDIA_MIMETYPE_AUDIO_VORBIS,
@@ -203,8 +208,8 @@
off += headerSize2;
memcpy(codecPrivateData + off, headerData3, headerSize3);
- sp<WebmElement> entry =
- WebmElement::AudioTrackEntry("A_VORBIS", nChannels, samplerate, codecPrivateBuf);
+ sp<WebmElement> entry = WebmElement::AudioTrackEntry("A_VORBIS", nChannels, samplerate,
+ codecPrivateBuf, bitsPerSample);
return entry;
} else {
ALOGE("Track (%s) is not a supported audio format", mimeType);
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 49937f0..30f29d6 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -258,7 +258,7 @@
virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
std::vector<audio_format_t> *formats) = 0;
- virtual void setAppState(uid_t uid, app_state_t state);
+ virtual void setAppState(uid_t uid, app_state_t state) = 0;
virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 530a2e4..07a7e65 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -70,20 +70,7 @@
audio_stream_type_t EngineBase::getStreamTypeForAttributes(const audio_attributes_t &attr) const
{
- audio_stream_type_t engineStream = mProductStrategies.getStreamTypeForAttributes(attr);
- // ensure the audibility flag for sonification is honored for stream types
- // Note this is typically implemented in the product strategy configuration files, but is
- // duplicated here for safety.
- if (attr.usage == AUDIO_USAGE_ASSISTANCE_SONIFICATION
- && ((attr.flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) != 0)) {
- engineStream = AUDIO_STREAM_ENFORCED_AUDIBLE;
- }
- // ensure the ENFORCED_AUDIBLE stream type reflects the "force use" setting:
- if ((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
- && (engineStream == AUDIO_STREAM_ENFORCED_AUDIBLE)) {
- return AUDIO_STREAM_SYSTEM;
- }
- return engineStream;
+ return mProductStrategies.getStreamTypeForAttributes(attr);
}
audio_attributes_t EngineBase::getAttributesForStreamType(audio_stream_type_t stream) const
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e92581d..575a6c2 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -777,6 +777,12 @@
// check for device and output changes triggered by new force usage
checkForDeviceAndOutputChanges();
+ // force client reconnection to reevaluate flag AUDIO_FLAG_AUDIBILITY_ENFORCED
+ if (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM) {
+ mpClientInterface->invalidateStream(AUDIO_STREAM_SYSTEM);
+ mpClientInterface->invalidateStream(AUDIO_STREAM_ENFORCED_AUDIBLE);
+ }
+
//FIXME: workaround for truncated touch sounds
// to be removed when the problem is handled by system UI
uint32_t delayMs = 0;
@@ -910,6 +916,13 @@
}
*dstAttr = mEngine->getAttributesForStreamType(srcStream);
}
+
+ // Only honor audibility enforced when required. The client will be
+ // forced to reconnect if the forced usage changes.
+ if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
+ dstAttr->flags &= ~AUDIO_FLAG_AUDIBILITY_ENFORCED;
+ }
+
return NO_ERROR;
}
@@ -2136,6 +2149,7 @@
for (size_t i = 0; i < mInputs.size(); ) {
sp <AudioInputDescriptor> desc = mInputs.valueAt(i);
if (desc->mProfile != profile) {
+ i++;
continue;
}
// if sound trigger, reuse input if used by other sound trigger on same session
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 7573089..6a9e6cb 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -77,8 +77,10 @@
mTimestampOffset(0),
mNextResultFrameNumber(0),
mNextReprocessResultFrameNumber(0),
+ mNextZslStillResultFrameNumber(0),
mNextShutterFrameNumber(0),
mNextReprocessShutterFrameNumber(0),
+ mNextZslStillShutterFrameNumber(0),
mListener(NULL),
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
mLastTemplateId(-1),
@@ -3464,7 +3466,7 @@
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
- bool reprocess,
+ bool reprocess, bool zslStillCapture,
const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
ATRACE_CALL();
if (pendingMetadata.isEmpty())
@@ -3481,6 +3483,14 @@
return;
}
mNextReprocessResultFrameNumber = frameNumber + 1;
+ } else if (zslStillCapture) {
+ if (frameNumber < mNextZslStillResultFrameNumber) {
+ SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, mNextZslStillResultFrameNumber);
+ return;
+ }
+ mNextZslStillResultFrameNumber = frameNumber + 1;
} else {
if (frameNumber < mNextResultFrameNumber) {
SET_ERR("Out-of-order capture result metadata submitted! "
@@ -3741,7 +3751,8 @@
metadata = result->result;
sendCaptureResult(metadata, request.resultExtras,
collectedPartialResult, frameNumber,
- hasInputBufferInRequest, request.physicalMetadatas);
+ hasInputBufferInRequest, request.zslCapture && request.stillCapture,
+ request.physicalMetadatas);
}
}
@@ -3919,12 +3930,20 @@
// TODO: need to track errors for tighter bounds on expected frame number.
if (r.hasInputBuffer) {
if (msg.frame_number < mNextReprocessShutterFrameNumber) {
- SET_ERR("Shutter notification out-of-order. Expected "
+ SET_ERR("Reprocess shutter notification out-of-order. Expected "
"notification for frame %d, got frame %d",
mNextReprocessShutterFrameNumber, msg.frame_number);
return;
}
mNextReprocessShutterFrameNumber = msg.frame_number + 1;
+ } else if (r.zslCapture && r.stillCapture) {
+ if (msg.frame_number < mNextZslStillShutterFrameNumber) {
+ SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ mNextZslStillShutterFrameNumber, msg.frame_number);
+ return;
+ }
+ mNextZslStillShutterFrameNumber = msg.frame_number + 1;
} else {
if (msg.frame_number < mNextShutterFrameNumber) {
SET_ERR("Shutter notification out-of-order. Expected "
@@ -3948,7 +3967,8 @@
// send pending result and buffers
sendCaptureResult(r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer, r.physicalMetadatas);
+ r.hasInputBuffer, r.zslCapture && r.stillCapture,
+ r.physicalMetadatas);
}
bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
returnOutputBuffers(r.pendingOutputBuffers.array(),
@@ -4032,10 +4052,6 @@
mHidlSession.clear();
}
-bool Camera3Device::HalInterface::supportBatchRequest() {
- return mHidlSession != nullptr;
-}
-
status_t Camera3Device::HalInterface::constructDefaultRequestSettings(
camera3_request_template_t templateId,
/*out*/ camera_metadata_t **requestTemplate) {
@@ -4608,20 +4624,6 @@
return CameraProviderManager::mapToStatusT(status);
}
-status_t Camera3Device::HalInterface::processCaptureRequest(
- camera3_capture_request_t *request) {
- ATRACE_NAME("CameraHal::processCaptureRequest");
- if (!valid()) return INVALID_OPERATION;
- status_t res = OK;
-
- uint32_t numRequestProcessed = 0;
- std::vector<camera3_capture_request_t*> requests(1);
- requests[0] = request;
- res = processBatchCaptureRequests(requests, &numRequestProcessed);
-
- return res;
-}
-
status_t Camera3Device::HalInterface::flush() {
ATRACE_NAME("CameraHal::flush");
if (!valid()) return INVALID_OPERATION;
@@ -5172,43 +5174,6 @@
return true;
}
-bool Camera3Device::RequestThread::sendRequestsOneByOne() {
- status_t res;
-
- for (auto& nextRequest : mNextRequests) {
- // Submit request and block until ready for next one
- ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
- res = mInterface->processCaptureRequest(&nextRequest.halRequest);
-
- if (res != OK) {
- // Should only get a failure here for malformed requests or device-level
- // errors, so consider all errors fatal. Bad metadata failures should
- // come through notify.
- SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
- " device: %s (%d)", nextRequest.halRequest.frame_number, strerror(-res),
- res);
- cleanUpFailedRequests(/*sendRequestError*/ false);
- return false;
- }
-
- // Mark that the request has be submitted successfully.
- nextRequest.submitted = true;
-
- updateNextRequest(nextRequest);
-
- // Remove any previously queued triggers (after unlock)
- res = removeTriggers(mPrevRequest);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to remove triggers "
- "(capture request %d, HAL device: %s (%d)",
- nextRequest.halRequest.frame_number, strerror(-res), res);
- cleanUpFailedRequests(/*sendRequestError*/ false);
- return false;
- }
- }
- return true;
-}
-
nsecs_t Camera3Device::RequestThread::calculateMaxExpectedDuration(const camera_metadata_t *request) {
nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
@@ -5462,11 +5427,8 @@
bool submitRequestSuccess = false;
nsecs_t tRequestStart = systemTime(SYSTEM_TIME_MONOTONIC);
- if (mInterface->supportBatchRequest()) {
- submitRequestSuccess = sendRequestsBatch();
- } else {
- submitRequestSuccess = sendRequestsOneByOne();
- }
+ submitRequestSuccess = sendRequestsBatch();
+
nsecs_t tRequestEnd = systemTime(SYSTEM_TIME_MONOTONIC);
mRequestLatency.add(tRequestStart, tRequestEnd);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 8f74611..4a0d2b6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -289,9 +289,6 @@
// Reset this HalInterface object (does not call close())
void clear();
- // Check if HalInterface support sending requests in batch
- bool supportBatchRequest();
-
// Calls into the HAL interface
// Caller takes ownership of requestTemplate
@@ -300,7 +297,11 @@
status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera3_stream_configuration *config,
const std::vector<uint32_t>& bufferSizes);
- status_t processCaptureRequest(camera3_capture_request_t *request);
+
+ // When the call succeeds, the ownership of acquire fences in requests is transferred to
+ // HalInterface. More specifically, the current implementation will send the fence to
+ // HAL process and close the FD in cameraserver process. When the call fails, the ownership
+ // of the acquire fence still belongs to the caller.
status_t processBatchCaptureRequests(
std::vector<camera3_capture_request_t*>& requests,
/*out*/uint32_t* numRequestProcessed);
@@ -895,9 +896,6 @@
// Clear repeating requests. Must be called with mRequestLock held.
status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
- // send request in mNextRequests to HAL one by one. Return true = sucssess
- bool sendRequestsOneByOne();
-
// send request in mNextRequests to HAL in a batch. Return true = sucssess
bool sendRequestsBatch();
@@ -1186,10 +1184,14 @@
uint32_t mNextResultFrameNumber;
// the minimal frame number of the next reprocess result
uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next ZSL still capture result
+ uint32_t mNextZslStillResultFrameNumber;
// the minimal frame number of the next non-reprocess shutter
uint32_t mNextShutterFrameNumber;
// the minimal frame number of the next reprocess shutter
uint32_t mNextReprocessShutterFrameNumber;
+ // the minimal frame number of the next ZSL still capture shutter
+ uint32_t mNextZslStillShutterFrameNumber;
List<CaptureResult> mResultQueue;
Condition mResultSignal;
wp<NotificationListener> mListener;
@@ -1226,7 +1228,8 @@
void sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess, const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
+ bool reprocess, bool zslStillCapture,
+ const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
bool isLastFullResult(const InFlightRequest& inFlightRequest);
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 1470de2..17c2e02 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -49,7 +49,6 @@
"libcodec2_soft_flacdec",
"libcodec2_soft_flacenc",
"libcodec2_soft_gsmdec",
- "libcodec2_soft_xaacdec",
],
}
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index f75515a..9042cd7 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -42,6 +42,7 @@
lseek: 1
rt_sigprocmask: 1
openat: 1
+open: 1
fstat64: 1
write: 1
nanosleep: 1
@@ -49,6 +50,7 @@
set_tid_address: 1
getdents64: 1
readlinkat: 1
+readlink: 1
read: 1
pread64: 1
fstatfs64: 1