Merge "C2 VTS: Format using clang-format"
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index e8287f9..f4a6e17 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -22,6 +22,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
#include <Codec2Mapper.h>
#include <SimpleC2Interface.h>
@@ -331,6 +332,14 @@
free(mem);
}
+static IV_COLOR_FORMAT_T GetIvColorFormat() {
+ static IV_COLOR_FORMAT_T sColorFormat =
+ (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
+ (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
+ IV_YUV_420P;
+ return sColorFormat;
+}
+
C2SoftAvcDec::C2SoftAvcDec(
const char *name,
c2_node_id_t id,
@@ -339,7 +348,6 @@
mIntf(intfImpl),
mDecHandle(nullptr),
mOutBufferFlush(nullptr),
- mIvColorFormat(IV_YUV_420P),
mOutputDelay(kDefaultOutputDelay),
mWidth(320),
mHeight(240),
@@ -418,7 +426,13 @@
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
- s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
+ s_create_ip.s_ivd_create_ip_t.e_output_format = GetIvColorFormat();
+ switch (s_create_ip.s_ivd_create_ip_t.e_output_format) {
+ case IV_YUV_420P: ALOGD("Flex Planar"); break;
+ case IV_YUV_420SP_UV: ALOGD("Flex Semi-planar UV"); break;
+ case IV_YUV_420SP_VU: ALOGD("Flex Semi-planar VU"); break;
+ default: ALOGD("Unknown"); break;
+ }
s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr;
@@ -555,8 +569,12 @@
ps_decode_ip->u4_num_Bytes = 0;
}
ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize;
- ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
- ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
+ if (GetIvColorFormat() == IV_YUV_420P) {
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
+ } else {
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize * 2;
+ }
if (outBuffer) {
if (outBuffer->height() < displayHeight) {
ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)",
@@ -565,13 +583,23 @@
}
ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y];
ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U];
- ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ if (GetIvColorFormat() == IV_YUV_420P) {
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ } else if (GetIvColorFormat() == IV_YUV_420SP_VU) {
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ }
} else {
ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush;
ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize;
- ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
+ if (GetIvColorFormat() == IV_YUV_420P) {
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
+ }
}
- ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
+ if (GetIvColorFormat() == IV_YUV_420P) {
+ ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
+ } else {
+ ps_decode_ip->s_out_buffer.u4_num_bufs = 2;
+ }
ps_decode_op->u4_size = sizeof(ih264d_video_decode_op_t);
return true;
@@ -781,7 +809,7 @@
mOutBlock.reset();
}
if (!mOutBlock) {
- uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
@@ -797,8 +825,6 @@
}
// TODO: can overall error checking be improved?
-// TODO: allow configuration of color format and usage for graphic buffers instead
-// of hard coding them to HAL_PIXEL_FORMAT_YV12
// TODO: pass coloraspects information to surface
// TODO: test support for dynamic change in resolution
// TODO: verify if the decoder sent back all frames
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 5c07d29..ed99ad1 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -155,7 +155,6 @@
uint8_t *mOutBufferFlush;
size_t mNumCores;
- IV_COLOR_FORMAT_T mIvColorFormat;
uint32_t mOutputDelay;
uint32_t mWidth;
uint32_t mHeight;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 6931244..fc5b75d 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -454,11 +454,19 @@
} // namespace
+static IV_COLOR_FORMAT_T GetIvColorFormat() {
+ static IV_COLOR_FORMAT_T sColorFormat =
+ (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
+ (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
+ IV_YUV_420P;
+ return sColorFormat;
+}
+
C2SoftAvcEnc::C2SoftAvcEnc(
const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
mIntf(intfImpl),
- mIvVideoColorFormat(IV_YUV_420P),
+ mIvVideoColorFormat(GetIvColorFormat()),
mAVCEncProfile(IV_PROFILE_BASE),
mAVCEncLevel(41),
mStarted(false),
@@ -1026,8 +1034,7 @@
// Assume worst case output buffer size to be equal to number of bytes in input
mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
- // TODO
- mIvVideoColorFormat = IV_YUV_420P;
+ mIvVideoColorFormat = GetIvColorFormat();
ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
@@ -1325,7 +1332,6 @@
mSize->width, input->height(), mSize->height);
return C2_BAD_VALUE;
}
- ALOGV("width = %d, height = %d", input->width(), input->height());
const C2PlanarLayout &layout = input->layout();
uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
@@ -1362,7 +1368,8 @@
return C2_BAD_VALUE;
}
- if (layout.planes[layout.PLANE_Y].colInc == 1
+ if (mIvVideoColorFormat == IV_YUV_420P
+ && layout.planes[layout.PLANE_Y].colInc == 1
&& layout.planes[layout.PLANE_U].colInc == 1
&& layout.planes[layout.PLANE_V].colInc == 1
&& uStride == vStride
@@ -1370,21 +1377,61 @@
// I420 compatible - already set up above
break;
}
+ if (mIvVideoColorFormat == IV_YUV_420SP_UV
+ && layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 2
+ && layout.planes[layout.PLANE_V].colInc == 2
+ && uStride == vStride
+ && yStride == vStride
+ && uPlane + 1 == vPlane) {
+ // NV12 compatible - already set up above
+ break;
+ }
+ if (mIvVideoColorFormat == IV_YUV_420SP_VU
+ && layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 2
+ && layout.planes[layout.PLANE_V].colInc == 2
+ && uStride == vStride
+ && yStride == vStride
+ && uPlane == vPlane + 1) {
+ // NV21 compatible - already set up above
+ break;
+ }
// copy to I420
yStride = width;
uStride = vStride = yStride / 2;
MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
- MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ MediaImage2 img;
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ break;
+ case IV_YUV_420SP_VU:
+ img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+ img.mPlane[MediaImage2::U].mOffset++;
+ img.mPlane[MediaImage2::V].mOffset--;
+ yPlane = conversionBuffer.data();
+ vPlane = yPlane + yPlaneSize;
+ uPlane = vPlane + 1;
+ break;
+ case IV_YUV_420SP_UV:
+ default:
+ img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + 1;
+ break;
+ }
status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
if (err != OK) {
ALOGE("Buffer conversion failed: %d", err);
return C2_BAD_VALUE;
}
- yPlane = conversionBuffer.data();
- uPlane = yPlane + yPlaneSize;
- vPlane = uPlane + yPlaneSize / 4;
break;
}
@@ -1406,13 +1453,13 @@
ps_inp_raw_buf->apv_bufs[1] = uPlane;
ps_inp_raw_buf->apv_bufs[2] = vPlane;
- ps_inp_raw_buf->au4_wd[0] = input->width();
- ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
- ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+ ps_inp_raw_buf->au4_wd[0] = mSize->width;
+ ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+ ps_inp_raw_buf->au4_wd[2] = mSize->width / 2;
- ps_inp_raw_buf->au4_ht[0] = input->height();
- ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
- ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[0] = mSize->height;
+ ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
+ ps_inp_raw_buf->au4_ht[2] = mSize->height / 2;
ps_inp_raw_buf->au4_strd[0] = yStride;
ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1430,18 +1477,20 @@
break;
}
- case IV_YUV_420SP_UV:
case IV_YUV_420SP_VU:
+ uPlane = vPlane;
+ [[fallthrough]];
+ case IV_YUV_420SP_UV:
default:
{
ps_inp_raw_buf->apv_bufs[0] = yPlane;
ps_inp_raw_buf->apv_bufs[1] = uPlane;
- ps_inp_raw_buf->au4_wd[0] = input->width();
- ps_inp_raw_buf->au4_wd[1] = input->width();
+ ps_inp_raw_buf->au4_wd[0] = mSize->width;
+ ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
- ps_inp_raw_buf->au4_ht[0] = input->height();
- ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[0] = mSize->height;
+ ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
ps_inp_raw_buf->au4_strd[0] = yStride;
ps_inp_raw_buf->au4_strd[1] = uStride;
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 0b38bc1..bed8aeb 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -19,7 +19,6 @@
#include <android-base/logging.h>
#include <set>
-#include <sstream>
#include <dlfcn.h>
@@ -383,6 +382,9 @@
// Configure the next interface with the params.
std::vector<C2Param *> configParams;
for (size_t i = 0; i < heapParams.size(); ++i) {
+ if (!heapParams[i]) {
+ continue;
+ }
if (heapParams[i]->forStream()) {
heapParams[i] = C2Param::CopyAsStream(
*heapParams[i], false /* output */, heapParams[i]->stream());
@@ -782,10 +784,7 @@
if (C2_OK != mStore->createComponent(filter.traits.name, &comp)) {
return {};
}
- if (C2_OK != mStore->createInterface(filter.traits.name, &intf)) {
- return {};
- }
- filters.push_back({comp, intf, filter.traits, filter.desc});
+ filters.push_back({comp, comp->intf(), filter.traits, filter.desc});
}
return filters;
}
@@ -869,7 +868,7 @@
}
std::vector<Component> filters = createFilters();
std::shared_ptr wrapped = std::make_shared<WrappedDecoder>(
- comp, std::move(filters), weak_from_this());
+ comp, std::vector(filters), weak_from_this());
{
std::unique_lock lock(mWrappedComponentsMutex);
std::vector<std::weak_ptr<const C2Component>> &components =
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 74e7ef1..2f4d6b1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -33,11 +33,13 @@
"libcodec2_vndk",
"libcutils",
"liblog",
+ "libnativewindow",
"libstagefright_foundation",
"libutils",
],
static_libs: [
+ "libarect",
"libyuv_static",
],
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index a54af83..a78d811 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -23,6 +23,7 @@
#include <list>
#include <mutex>
+#include <android/hardware_buffer.h>
#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -136,31 +137,56 @@
int width = view.crop().width;
int height = view.crop().height;
- if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
- // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
- if (IsNV12(view) && IsI420(img)) {
+ if (IsNV12(view)) {
+ if (IsNV12(img)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+ return OK;
+ } else if (IsNV21(img)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(img)) {
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
}
- } else {
+ }
+ } else if (IsNV21(view)) {
+ if (IsNV12(img)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+ return OK;
+ }
+ } else if (IsNV21(img)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+ return OK;
+ } else if (IsI420(img)) {
+ if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+ dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ }
+ } else if (IsI420(view)) {
+ if (IsNV12(img)) {
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
+ } else if (IsNV21(img)) {
+ if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(img)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+ return OK;
}
}
- if (IsNV12(view) && IsNV12(img)) {
- libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
- libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
- return OK;
- }
- if (IsI420(view) && IsI420(img)) {
- libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
- libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
- libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
- return OK;
- }
return _ImageCopy<true>(view, img, imgBase);
}
@@ -182,33 +208,56 @@
int32_t dst_stride_v = view.layout().planes[2].rowInc;
int width = view.crop().width;
int height = view.crop().height;
- if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
- // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
- if (IsNV12(img) && IsI420(view)) {
+ if (IsNV12(img)) {
+ if (IsNV12(view)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+ return OK;
+ } else if (IsNV21(view)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(view)) {
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
}
- } else {
+ }
+ } else if (IsNV21(img)) {
+ if (IsNV12(view)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+ return OK;
+ }
+ } else if (IsNV21(view)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+ return OK;
+ } else if (IsI420(view)) {
+ if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+ dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ }
+ } else if (IsI420(img)) {
+ if (IsNV12(view)) {
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
+ } else if (IsNV21(view)) {
+ if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(view)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+ return OK;
}
}
- if (IsNV12(img) && IsNV12(view)) {
- // For NV12, copy Y and UV plane
- libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
- libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
- return OK;
- }
- if (IsI420(img) && IsI420(view)) {
- // For I420, copy Y, U and V plane.
- libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
- libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
- libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
- return OK;
- }
return _ImageCopy<false>(view, img, imgBase);
}
@@ -250,6 +299,20 @@
&& layout.planes[layout.PLANE_V].offset == 1);
}
+bool IsNV21(const C2GraphicView &view) {
+ if (!IsYUV420(view)) {
+ return false;
+ }
+ const C2PlanarLayout &layout = view.layout();
+ return (layout.rootPlanes == 2
+ && layout.planes[layout.PLANE_U].colInc == 2
+ && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_V
+ && layout.planes[layout.PLANE_U].offset == 1
+ && layout.planes[layout.PLANE_V].colInc == 2
+ && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_V
+ && layout.planes[layout.PLANE_V].offset == 0);
+}
+
bool IsI420(const C2GraphicView &view) {
if (!IsYUV420(view)) {
return false;
@@ -286,6 +349,15 @@
&& (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
}
+bool IsNV21(const MediaImage2 *img) {
+ if (!IsYUV420(img)) {
+ return false;
+ }
+ return (img->mPlane[1].mColInc == 2
+ && img->mPlane[2].mColInc == 2
+ && (img->mPlane[1].mOffset - img->mPlane[2].mOffset == 1));
+}
+
bool IsI420(const MediaImage2 *img) {
if (!IsYUV420(img)) {
return false;
@@ -295,6 +367,76 @@
&& img->mPlane[2].mOffset > img->mPlane[1].mOffset);
}
+FlexLayout GetYuv420FlexibleLayout() {
+ static FlexLayout sLayout = []{
+ AHardwareBuffer_Desc desc = {
+ 16, // width
+ 16, // height
+ 1, // layers
+ AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+ AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+ 0, // stride
+ 0, // rfu0
+ 0, // rfu1
+ };
+ AHardwareBuffer *buffer = nullptr;
+ int ret = AHardwareBuffer_allocate(&desc, &buffer);
+ if (ret != 0) {
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ class AutoCloser {
+ public:
+ AutoCloser(AHardwareBuffer *buffer) : mBuffer(buffer), mLocked(false) {}
+ ~AutoCloser() {
+ if (mLocked) {
+ AHardwareBuffer_unlock(mBuffer, nullptr);
+ }
+ AHardwareBuffer_release(mBuffer);
+ }
+
+ void setLocked() { mLocked = true; }
+
+ private:
+ AHardwareBuffer *mBuffer;
+ bool mLocked;
+ } autoCloser(buffer);
+ AHardwareBuffer_Planes planes;
+ ret = AHardwareBuffer_lockPlanes(
+ buffer,
+ AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+ -1, // fence
+ nullptr, // rect
+ &planes);
+ if (ret != 0) {
+ AHardwareBuffer_release(buffer);
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ autoCloser.setLocked();
+ if (planes.planeCount != 3) {
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ if (planes.planes[0].pixelStride != 1) {
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ if (planes.planes[1].pixelStride == 1 && planes.planes[2].pixelStride == 1) {
+ return FLEX_LAYOUT_PLANAR;
+ }
+ if (planes.planes[1].pixelStride == 2 && planes.planes[2].pixelStride == 2) {
+ ssize_t uvDist =
+ static_cast<uint8_t *>(planes.planes[2].data) -
+ static_cast<uint8_t *>(planes.planes[1].data);
+ if (uvDist == 1) {
+ return FLEX_LAYOUT_SEMIPLANAR_UV;
+ } else if (uvDist == -1) {
+ return FLEX_LAYOUT_SEMIPLANAR_VU;
+ }
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ return FLEX_LAYOUT_UNKNOWN;
+ }();
+ return sLayout;
+}
+
MediaImage2 CreateYUV420PlanarMediaImage2(
uint32_t width, uint32_t height, uint32_t stride, uint32_t vstride) {
return MediaImage2 {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index afadf00..af29e81 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -96,6 +96,11 @@
bool IsNV12(const C2GraphicView &view);
/**
+ * Returns true iff a view has a NV21 layout.
+ */
+bool IsNV21(const C2GraphicView &view);
+
+/**
* Returns true iff a view has a I420 layout.
*/
bool IsI420(const C2GraphicView &view);
@@ -111,10 +116,26 @@
bool IsNV12(const MediaImage2 *img);
/**
+ * Returns true iff a MediaImage2 has a NV21 layout.
+ */
+bool IsNV21(const MediaImage2 *img);
+
+/**
* Returns true iff a MediaImage2 has a I420 layout.
*/
bool IsI420(const MediaImage2 *img);
+enum FlexLayout {
+ FLEX_LAYOUT_UNKNOWN,
+ FLEX_LAYOUT_PLANAR,
+ FLEX_LAYOUT_SEMIPLANAR_UV,
+ FLEX_LAYOUT_SEMIPLANAR_VU,
+};
+/**
+ * Returns layout of YCBCR_420_888 pixel format.
+ */
+FlexLayout GetYuv420FlexibleLayout();
+
/**
* A raw memory block to use for internal buffers.
*
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index 20058a1..4eea04f 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -23,6 +23,7 @@
#include <system/audio.h>
#include "LVM_Private.h"
+#include "ScalarArithmetic.h"
#include "VectorArithmetic.h"
#include "LVM_Coeffs.h"
@@ -178,6 +179,9 @@
* Apply the filter
*/
pInstance->pTEBiquad->process(pProcessed, pProcessed, NrFrames);
+ for (auto i = 0; i < NrChannels * NrFrames; i++) {
+ pProcessed[i] = LVM_Clamp(pProcessed[i]);
+ }
}
/*
* Volume balance
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index df7ca5a..7571a24 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -53,16 +53,16 @@
flags_arr=(
"-csE"
"-eqE"
- "-tE"
- "-csE -tE -eqE"
+ "-tE -trebleLvl:15"
+ "-csE -tE -trebleLvl:15 -eqE"
"-bE -M"
- "-csE -tE"
- "-csE -eqE" "-tE -eqE"
- "-csE -tE -bE -M -eqE"
- "-tE -eqE -vcBal:96 -M"
- "-tE -eqE -vcBal:-96 -M"
- "-tE -eqE -vcBal:0 -M"
- "-tE -eqE -bE -vcBal:30 -M"
+ "-csE -tE -trebleLvl:15"
+ "-csE -eqE" "-tE -trebleLvl:15 -eqE"
+ "-csE -tE -trebleLvl:15 -bE -M -eqE"
+ "-tE -trebleLvl:15 -eqE -vcBal:96 -M"
+ "-tE -trebleLvl:15 -eqE -vcBal:-96 -M"
+ "-tE -trebleLvl:15 -eqE -vcBal:0 -M"
+ "-tE -trebleLvl:15 -eqE -bE -vcBal:30 -M"
)
fs_arr=(
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index e484a1a..e65228c 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -79,6 +79,7 @@
int bassEffectLevel = 0;
int eqPresetLevel = 0;
int frameLength = 256;
+ int trebleEffectLevel = 0;
LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
@@ -303,10 +304,6 @@
params->PSA_Enable = LVM_PSA_OFF;
params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
- /* TE Control parameters */
- params->TE_OperatingMode = LVM_TE_OFF;
- params->TE_EffectLevel = 0;
-
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
@@ -445,6 +442,7 @@
/* Treble Enhancement parameters */
params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+ params->TE_EffectLevel = plvmConfigParams->trebleEffectLevel;
/* PSA Control parameters */
params->PSA_Enable = LVM_PSA_ON;
@@ -604,6 +602,15 @@
return -1;
}
lvmConfigParams.eqPresetLevel = eqPresetLevel;
+ } else if (!strncmp(argv[i], "-trebleLvl:", 11)) {
+ const int trebleEffectLevel = atoi(argv[i] + 11);
+ if (trebleEffectLevel > LVM_TE_MAX_EFFECTLEVEL ||
+ trebleEffectLevel < LVM_TE_MIN_EFFECTLEVEL) {
+ printf("Error: Unsupported Treble Effect Level : %d\n", trebleEffectLevel);
+ printUsage();
+ return -1;
+ }
+ lvmConfigParams.trebleEffectLevel = trebleEffectLevel;
} else if (!strcmp(argv[i], "-bE")) {
lvmConfigParams.bassEnable = LVM_BE_ON;
} else if (!strcmp(argv[i], "-eqE")) {
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 9ab117a..c5d3c1d 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -35,6 +35,7 @@
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
"libbase",
+ "libactivitymanager_aidl",
"libandroid_net",
"libaudioclient",
"libbinder",
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index 819389d..0cc2294 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -4,4 +4,8 @@
lajos@google.com
marcone@google.com
taklee@google.com
-wonsik@google.com
\ No newline at end of file
+wonsik@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index cdbd52c..12179db 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -38,6 +38,7 @@
static_libs: [
"libc_malloc_debug_backtrace",
"libbatterystats_aidl",
+ "libprocessinfoservice_aidl",
],
shared_libs: [
"libaudioutils", // for clock.h
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 113e4a7..e9c9f8d 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -21,9 +21,9 @@
#include <media/stagefright/ProcessInfo.h>
#include <binder/IPCThreadState.h>
-#include <binder/IProcessInfoService.h>
#include <binder/IServiceManager.h>
#include <private/android_filesystem_config.h>
+#include <processinfo/IProcessInfoService.h>
namespace android {
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index ca29591..2038aa9 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -218,7 +218,9 @@
add(devices);
return size();
}
- return SortedVector::merge(devices);
+ ssize_t ret = SortedVector::merge(devices);
+ refreshTypes();
+ return ret;
}
/**
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
index 7015b7b..7be10c4 100644
--- a/services/audiopolicy/service/Android.mk
+++ b/services/audiopolicy/service/Android.mk
@@ -19,6 +19,7 @@
libaudiopolicymanager_interface_headers
LOCAL_SHARED_LIBRARIES := \
+ libactivitymanager_aidl \
libcutils \
libutils \
liblog \
@@ -36,6 +37,7 @@
capture_state_listener-aidl-cpp
LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
+ libactivitymanager_aidl \
libsensorprivacy
LOCAL_STATIC_LIBRARIES := \
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 11df5f3..46698b3 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -113,6 +113,7 @@
"libutilscallstack",
"libutils",
"libbinder",
+ "libactivitymanager_aidl",
"libcutils",
"libmedia",
"libmediautils",
@@ -150,11 +151,13 @@
],
static_libs: [
+ "libprocessinfoservice_aidl",
"libbinderthreadstateutils",
],
export_shared_lib_headers: [
"libbinder",
+ "libactivitymanager_aidl",
"libcamera_client",
"libfmq",
"libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3d9998a..91dda92 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -41,7 +41,6 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/PermissionController.h>
-#include <binder/ProcessInfoService.h>
#include <binder/IResultReceiver.h>
#include <binderthreadstate/CallerUtils.h>
#include <cutils/atomic.h>
@@ -57,6 +56,7 @@
#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
#include <mediautils/BatteryNotifier.h>
+#include <processinfo/ProcessInfoService.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/String16.h>
@@ -252,10 +252,16 @@
proxyBinder->pingForUserUpdate();
}
-void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status) {
+void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status,
+ SystemCameraKind systemCameraKind) {
Mutex::Autolock lock(mStatusListenerLock);
-
for (auto& i : mListenerList) {
+ if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
+ i->getListenerUid())) {
+ ALOGV("Skipping torch callback for system-only camera device %s",
+ cameraId.c_str());
+ continue;
+ }
i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
}
}
@@ -341,7 +347,7 @@
Mutex::Autolock al(mTorchStatusMutex);
mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
- broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF);
+ broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF, deviceKind);
}
updateCameraNumAndIds();
@@ -502,12 +508,19 @@
void CameraService::onTorchStatusChanged(const String8& cameraId,
TorchModeStatus newStatus) {
+ SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+ status_t res = getSystemCameraKind(cameraId, &systemCameraKind);
+ if (res != OK) {
+ ALOGE("%s: Could not get system camera kind for camera id %s", __FUNCTION__,
+ cameraId.string());
+ return;
+ }
Mutex::Autolock al(mTorchStatusMutex);
- onTorchStatusChangedLocked(cameraId, newStatus);
+ onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
}
void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
- TorchModeStatus newStatus) {
+ TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
__FUNCTION__, cameraId.string(), newStatus);
@@ -556,8 +569,7 @@
}
}
}
-
- broadcastTorchModeStatus(cameraId, newStatus);
+ broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
@@ -1864,6 +1876,10 @@
String8 id = String8(cameraId.string());
int uid = CameraThreadState::getCallingUid();
+ if (shouldRejectSystemCameraConnection(id)) {
+ return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
+ " for system only device %s: ", id.string());
+ }
// verify id is valid.
auto state = getCameraState(id);
if (state == nullptr) {
@@ -2220,6 +2236,11 @@
return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
clientUid);}), cameraStatuses->end());
+ //cameraStatuses will have non-eligible camera ids removed.
+ std::set<String16> idsChosenForCallback;
+ for (const auto &s : *cameraStatuses) {
+ idsChosenForCallback.insert(String16(s.cameraId));
+ }
/*
* Immediately signal current torch status to this listener only
@@ -2229,7 +2250,11 @@
Mutex::Autolock al(mTorchStatusMutex);
for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
String16 id = String16(mTorchStatusMap.keyAt(i).string());
- listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+ // The camera id is visible to the client. Fine to send torch
+ // callback.
+ if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
+ listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+ }
}
}
@@ -3766,7 +3791,7 @@
TorchModeStatus::AVAILABLE_OFF :
TorchModeStatus::NOT_AVAILABLE;
if (torchStatus != newTorchStatus) {
- onTorchStatusChangedLocked(cameraId, newTorchStatus);
+ onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
}
}
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 6771718..2853b0c 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -996,7 +996,8 @@
// handle torch mode status change and invoke callbacks. mTorchStatusMutex
// should be locked.
void onTorchStatusChangedLocked(const String8& cameraId,
- hardware::camera::common::V1_0::TorchModeStatus newStatus);
+ hardware::camera::common::V1_0::TorchModeStatus newStatus,
+ SystemCameraKind systemCameraKind);
// get a camera's torch status. mTorchStatusMutex should be locked.
status_t getTorchStatusLocked(const String8 &cameraId,
@@ -1085,7 +1086,8 @@
static void pingCameraServiceProxy();
void broadcastTorchModeStatus(const String8& cameraId,
- hardware::camera::common::V1_0::TorchModeStatus status);
+ hardware::camera::common::V1_0::TorchModeStatus status,
+ SystemCameraKind systemCameraKind);
void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index 86e047e..a856c05 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -25,6 +25,7 @@
],
shared_libs: [
+ "libactivitymanager_aidl",
"libbinder",
"libbinder_ndk",
"liblog",