Merge "CodecCapabilities: Initialize native CodecCapabilities in MediaCodecInfo" into main
diff --git a/Android.bp b/Android.bp
index 0c7ed6e..c716a06 100644
--- a/Android.bp
+++ b/Android.bp
@@ -135,6 +135,7 @@
aidl_interface {
name: "av-audio-types-aidl",
+ unstable: true,
host_supported: true,
vendor_available: true,
double_loadable: true,
@@ -154,28 +155,4 @@
sdk_version: "module_current",
},
},
- versions_with_info: [
- {
- version: "1",
- imports: ["android.hardware.audio.core-V2"],
- },
- ],
- frozen: false,
-
-}
-
-latest_av_audio_types_aidl = "av-audio-types-aidl-V2"
-
-cc_defaults {
- name: "latest_av_audio_types_aidl_ndk_shared",
- shared_libs: [
- latest_av_audio_types_aidl + "-ndk",
- ],
-}
-
-cc_defaults {
- name: "latest_av_audio_types_aidl_ndk_static",
- static_libs: [
- latest_av_audio_types_aidl + "-ndk",
- ],
}
diff --git a/aidl_api/av-audio-types-aidl/1/.hash b/aidl_api/av-audio-types-aidl/1/.hash
deleted file mode 100644
index 0002682..0000000
--- a/aidl_api/av-audio-types-aidl/1/.hash
+++ /dev/null
@@ -1 +0,0 @@
-ef1bc5ed9db445fbfc116cdec6e6ad081458ee40
diff --git a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
deleted file mode 100644
index a9aa2c1..0000000
--- a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-///////////////////////////////////////////////////////////////////////////////
-// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
-///////////////////////////////////////////////////////////////////////////////
-
-// This file is a snapshot of an AIDL file. Do not edit it manually. There are
-// two cases:
-// 1). this is a frozen version file - do not edit this in any case.
-// 2). this is a 'current' file. If you make a backwards compatible change to
-// the interface (from the latest frozen version), the build system will
-// prompt you to update this file with `m <name>-update-api`.
-//
-// You must not make a backward incompatible change to any AIDL file built
-// with the aidl_interface module type with versions property set. The module
-// type is used to build AIDL files in a way that they can be used across
-// independently updatable components of the system. If a device is shipped
-// with such a backward incompatible change, it has a high risk of breaking
-// later when a module using the interface is updated, e.g., Mainline modules.
-
-package android.media.audio;
-/* @hide */
-interface IHalAdapterVendorExtension {
- @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
- void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
- android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
- android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
- @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
- enum ParameterScope {
- MODULE = 0,
- STREAM = 1,
- }
-}
diff --git a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
deleted file mode 100644
index a9aa2c1..0000000
--- a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-///////////////////////////////////////////////////////////////////////////////
-// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
-///////////////////////////////////////////////////////////////////////////////
-
-// This file is a snapshot of an AIDL file. Do not edit it manually. There are
-// two cases:
-// 1). this is a frozen version file - do not edit this in any case.
-// 2). this is a 'current' file. If you make a backwards compatible change to
-// the interface (from the latest frozen version), the build system will
-// prompt you to update this file with `m <name>-update-api`.
-//
-// You must not make a backward incompatible change to any AIDL file built
-// with the aidl_interface module type with versions property set. The module
-// type is used to build AIDL files in a way that they can be used across
-// independently updatable components of the system. If a device is shipped
-// with such a backward incompatible change, it has a high risk of breaking
-// later when a module using the interface is updated, e.g., Mainline modules.
-
-package android.media.audio;
-/* @hide */
-interface IHalAdapterVendorExtension {
- @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
- void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
- android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
- android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
- @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
- enum ParameterScope {
- MODULE = 0,
- STREAM = 1,
- }
-}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index c820a2c..96fb3e3 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -7,6 +7,7 @@
flag {
name: "aidl_hal_input_surface"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flags for enabling AIDL HAL InputSurface handling"
bug: "201479783"
diff --git a/media/codec2/components/apv/C2SoftApvDec.cpp b/media/codec2/components/apv/C2SoftApvDec.cpp
index 0064cec..77305ce 100644
--- a/media/codec2/components/apv/C2SoftApvDec.cpp
+++ b/media/codec2/components/apv/C2SoftApvDec.cpp
@@ -279,6 +279,10 @@
if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+ pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
+ }
+
// If color format surface isn't added to supported formats, there is no way to know
// when the color-format is configured to surface. This is necessary to be able to
// choose 10-bit format while decoding 10-bit clips in surface mode.
@@ -374,6 +378,8 @@
(void)mayBlock;
ALOGV("%s", __FUNCTION__);
// take default values for all unspecified fields, and coded values for specified ones
+ ALOGV("%s - coded range: %u, primaries: %u, transfer: %u, matrix: %u",
+ __func__, coded.v.range, coded.v.primaries, coded.v.transfer, coded.v.matrix);
me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
me.set().primaries =
coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
@@ -717,6 +723,22 @@
}
}
+static void copyBufferP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+ const uint16_t *srcUV, size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+ size_t dstUVStride, size_t width, size_t height) {
+ for (size_t y = 0; y < height; ++y) {
+ memcpy(dstY, srcY, width * sizeof(uint16_t));
+ srcY += srcYStride;
+ dstY += dstYStride;
+ }
+
+ for (size_t y = 0; y < height; ++y) {
+ memcpy(dstUV, srcUV, width * sizeof(uint16_t));
+ srcUV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+}
+
static void copyBufferFromYUV422ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
const uint8_t* srcY, const uint8_t* srcU,
const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
@@ -992,28 +1014,81 @@
}
}
+void C2SoftApvDec::getVuiParams(VuiColorAspects* buffer) {
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = buffer->primaries;
+ vuiColorAspects.transfer = buffer->transfer;
+ vuiColorAspects.coeffs = buffer->coeffs;
+ vuiColorAspects.fullRange = buffer->fullRange;
+
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ ALOGV("colorAspects: primaries:%d, transfer:%d, coeffs:%d, fullRange:%d",
+ codedAspects.primaries, codedAspects.transfer, codedAspects.matrix,
+ codedAspects.range);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+ }
+}
+
status_t C2SoftApvDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
const std::unique_ptr<C2Work>& work) {
if (!(work && pool)) return BAD_VALUE;
- oapv_imgb_t* imgbOutput;
+ oapv_imgb_t* imgbOutput = nullptr;
std::shared_ptr<C2GraphicBlock> block;
if (ofrms.num_frms > 0) {
- oapv_frm_t* frm = &ofrms.frm[0];
- imgbOutput = frm->imgb;
+ for(int i = 0; i < ofrms.num_frms; i++) {
+ oapv_frm_t* frm = &ofrms.frm[0];
+ if(frm->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) {
+ imgbOutput = frm->imgb;
+ break;
+ }
+ }
+ if(imgbOutput == nullptr) {
+ ALOGW("No OAPV primary frame");
+ return false;
+ }
} else {
ALOGW("No output frames");
return false;
}
bool isMonochrome = OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CS_YCBCR400;
+ // TODO: use bitstream color aspect after vui parsing
+ VuiColorAspects colorAspect;
+ colorAspect.primaries = 2;
+ colorAspect.transfer = 2;
+ colorAspect.coeffs = 2;
+ colorAspect.fullRange = 1;
+ getVuiParams(&colorAspect);
+
uint32_t format = HAL_PIXEL_FORMAT_YV12;
std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10 &&
mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
IntfImpl::Lock lock = mIntf->lock();
codedColorAspects = mIntf->getColorAspects_l();
+
bool allowRGBA1010102 = false;
if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -1070,7 +1145,34 @@
size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
- if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ if(format == AHARDWAREBUFFER_FORMAT_YCbCr_P210) {
+ if(OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+ const uint16_t *srcY = (const uint16_t *)imgbOutput->a[0];
+ const uint16_t *srcU = (const uint16_t *)imgbOutput->a[1];
+ const uint16_t *srcV = (const uint16_t *)imgbOutput->a[2];
+ size_t srcYStride = imgbOutput->s[0] / 2;
+ size_t srcUStride = imgbOutput->s[1] / 2;
+ size_t srcVStride = imgbOutput->s[2] / 2;
+ dstYStride /= 2;
+ dstUStride /= 2;
+ dstVStride /= 2;
+ ALOGV("OAPV_CS_P210 buffer");
+ copyBufferP210((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU,
+ srcYStride, srcUStride, dstYStride, dstUStride, mWidth, mHeight);
+ } else {
+ ALOGE("Not supported convder from bd:%d, format: %d(%s), to format: %d(%s)",
+ OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs),
+ OAPV_CS_GET_FORMAT(imgbOutput->cs),
+ OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420 ?
+ "YUV420" : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ?
+ "YUV422" : "UNKNOWN"),
+ format,
+ format == HAL_PIXEL_FORMAT_YCBCR_P010 ?
+ "P010" : (format == HAL_PIXEL_FORMAT_YCBCR_420_888 ?
+ "YUV420" : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN"))
+ );
+ }
+ } else if(format == HAL_PIXEL_FORMAT_YCBCR_P010) {
if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
const uint16_t* srcU = (const uint16_t*)imgbOutput->a[1];
diff --git a/media/codec2/components/apv/C2SoftApvDec.h b/media/codec2/components/apv/C2SoftApvDec.h
index f5beb8f..05afdb2 100644
--- a/media/codec2/components/apv/C2SoftApvDec.h
+++ b/media/codec2/components/apv/C2SoftApvDec.h
@@ -118,6 +118,26 @@
uint32_t mHeight;
bool mSignalledOutputEos;
bool mSignalledError;
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+ transfer(C2Color::TRANSFER_UNSPECIFIED),
+ coeffs(C2Color::MATRIX_UNSPECIFIED),
+ fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
oapvd_t oapvdHandle;
oapvm_t oapvmHandle;
@@ -126,6 +146,8 @@
int outputCsp;
+ void getVuiParams(VuiColorAspects* buffer);
+
C2_DO_NOT_COPY(C2SoftApvDec);
};
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index a61cfd6..9c5e0b2 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -221,11 +221,14 @@
.withSetter(CodedColorAspectsSetter, mColorAspects)
.build());
std::vector<uint32_t> pixelFormats = {
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
};
if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+ pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
+ }
addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
.withDefault(new C2StreamPixelFormatInfo::input(
0u, HAL_PIXEL_FORMAT_YCBCR_P010))
@@ -623,12 +626,6 @@
return C2_NO_MEMORY;
}
- /* Calculate SDR to HDR mapping values */
- mSdrToHdrMapping.clear();
- for (int32_t i = 0; i < 256; i++) {
- mSdrToHdrMapping.push_back((uint16_t)(i * 1023 / 255 + 0.5));
- }
-
mStarted = true;
mInitEncoder = true;
return C2_OK;
@@ -692,26 +689,48 @@
switch (layout.type) {
case C2PlanarLayout::TYPE_RGB:
- [[fallthrough]];
- case C2PlanarLayout::TYPE_RGBA: {
- // TODO: Add RGBA1010102 support
ALOGE("Not supported RGB color format");
return C2_BAD_VALUE;
+ case C2PlanarLayout::TYPE_RGBA: {
+ [[fallthrough]];
+ }
+ case C2PlanarLayout::TYPE_YUVA: {
+ ALOGV("Convert from ABGR2101010 to P210");
+ uint16_t *dstY, *dstU, *dstV;
+ dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+ dstU = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+ dstV = (uint16_t*)inputFrames->frm[0].imgb->a[2];
+ convertRGBA1010102ToYUV420Planar16(dstY, dstU, dstV, (uint32_t*)(input->data()[0]),
+ layout.planes[layout.PLANE_Y].rowInc / 4, width,
+ height, mColorAspects->matrix,
+ mColorAspects->range);
+ break;
}
case C2PlanarLayout::TYPE_YUV: {
if (IsP010(*input)) {
if (mColorFormat == OAPV_CF_YCBCR422) {
ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
} else if (mColorFormat == OAPV_CF_PLANAR2) {
- ColorConvertP010ToP210(input, inputFrames->frm[0].imgb);
+ uint16_t *srcY = (uint16_t*)(input->data()[0]);
+ uint16_t *srcUV = (uint16_t*)(input->data()[1]);
+ uint16_t *dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+ uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+ convertP010ToP210(dstY, dstUV, srcY, srcUV,
+ input->width(), input->width(), input->width(),
+ input->height());
} else {
ALOGE("Not supported color format. %d", mColorFormat);
return C2_BAD_VALUE;
}
} else if (IsNV12(*input)) {
- ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
- } else if (IsNV21(*input)) {
- ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
+ uint8_t *srcY = (uint8_t*)input->data()[0];
+ uint8_t *srcUV = (uint8_t*)input->data()[1];
+ uint16_t *dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+ uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+ convertSemiPlanar8ToP210(dstY, dstUV, srcY, srcUV,
+ input->width(), input->width(), input->width(),
+ input->width(), input->width(), input->height(),
+ CONV_FORMAT_I420);
} else if (IsYUV420(*input)) {
return C2_BAD_VALUE;
} else if (IsI420(*input)) {
@@ -731,46 +750,6 @@
return C2_OK;
}
-void C2SoftApvEnc::ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
- auto width = input->width();
- auto height = input->height();
-
- auto* yPlane = (uint8_t*)input->data()[0];
- auto* uvPlane = (uint8_t*)input->data()[1];
-
- auto* dst = (uint16_t*)imgb->a[0];
- int32_t lumaOffset = 0;
- for (int32_t y = 0; y < height; ++y) {
- for (int32_t x = 0; x < width; ++x) {
- lumaOffset = y * width + x;
- dst[lumaOffset] = (mSdrToHdrMapping[yPlane[lumaOffset]] << 6) |
- ((mSdrToHdrMapping[yPlane[lumaOffset]] & 0x300) >> 3);
- }
- }
-
- auto* dst_uv = (uint16_t*)imgb->a[1];
- uint32_t uvDstStride = width;
- int32_t srcOffset = 0;
- int32_t dstOffset1 = 0, dstOffset2 = 0;
- int32_t tmp1 = 0, tmp2 = 0;
- for (int32_t y = 0; y < height / 2; ++y) {
- for (int32_t x = 0; x < width; x += 2) {
- srcOffset = y * width + x;
- dstOffset1 = (y * 2) * width + x;
- dstOffset2 = ((y * 2) + 1) * width + x;
-
- tmp1 = (mSdrToHdrMapping[uvPlane[srcOffset]] << 6) |
- ((mSdrToHdrMapping[uvPlane[srcOffset]] & 0x300) >> 3);
- tmp2 = (mSdrToHdrMapping[uvPlane[srcOffset + 1]] << 6) |
- ((mSdrToHdrMapping[uvPlane[srcOffset + 1]] & 0x300) >> 3);
- dst_uv[dstOffset1] = (uint16_t)tmp1;
- dst_uv[dstOffset1 + 1] = (uint16_t)tmp2;
- dst_uv[dstOffset2] = (uint16_t)tmp1;
- dst_uv[dstOffset2 + 1] = (uint16_t)tmp2;
- }
- }
-}
-
C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
int32_t bitrate, int32_t band) {
C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
@@ -885,30 +864,6 @@
return level;
}
-void C2SoftApvEnc::ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
- auto width = input->width();
- auto height = input->height();
-
- auto* yPlane = (uint8_t*)input->data()[0];
- auto* uvPlane = (uint8_t*)input->data()[1];
- uint32_t uvStride = width * 2;
-
- auto* src = yPlane;
- auto* dst = (uint8_t*)imgb->a[0];
- std::memcpy(dst, src, width * height * 2);
-
- auto* dst_uv = (uint8_t*)imgb->a[1];
- int32_t offset1 = 0, offset2 = 0;
- for (int32_t y = 0; y < height / 2; ++y) {
- offset1 = (y * 2) * uvStride;
- offset2 = (y * 2 + 1) * uvStride;
- src = uvPlane + (y * uvStride);
-
- std::memcpy(dst_uv + offset1, src, uvStride);
- std::memcpy(dst_uv + offset2, src, uvStride);
- }
-}
-
void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
oapv_imgb_t* imgb) {
uint32_t width = input->width();
@@ -1003,108 +958,98 @@
finish(workIndex, fillWork);
}
}
-void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
- uint32_t encodedSize) {
- uint32_t csdStart = 0, csdEnd = 0;
- uint32_t bitOffset = 0;
- uint8_t* buf = (uint8_t*)bitb->addr + csdStart;
- if (encodedSize == 0) {
- ALOGE("the first frame size is zero, so no csd data will be created.");
+void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work,
+ oapv_bitb_t* bitb,
+ uint32_t encodedSize) {
+ if (encodedSize < 31) {
+ ALOGE("the first frame size is too small, so no csd data will be created.");
return;
}
- ABitReader reader(buf, encodedSize);
+ ABitReader reader((uint8_t*)bitb->addr, encodedSize);
+
+ uint8_t number_of_configuration_entry = 0;
+ uint8_t pbu_type = 0;
+ uint8_t number_of_frame_info = 0;
+ bool color_description_present_flag = false;
+ bool capture_time_distance_ignored = false;
+ uint8_t profile_idc = 0;
+ uint8_t level_idc = 0;
+ uint8_t band_idc = 0;
+ uint32_t frame_width_minus1 = 0;
+ uint32_t frame_height_minus1 = 0;
+ uint8_t chroma_format_idc = 0;
+ uint8_t bit_depth_minus8 = 0;
+ uint8_t capture_time_distance = 0;
+ uint8_t color_primaries = 0;
+ uint8_t transfer_characteristics = 0;
+ uint8_t matrix_coefficients = 0;
/* pbu_header() */
- reader.skipBits(32);
- bitOffset += 32; // pbu_size
- reader.skipBits(32);
- bitOffset += 32; // currReadSize
- csdStart = bitOffset / 8;
-
- int32_t pbu_type = reader.getBits(8);
- bitOffset += 8; // pbu_type
- reader.skipBits(16);
- bitOffset += 16; // group_id
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bits
+ reader.skipBits(32); // pbu_size
+ reader.skipBits(32); // currReadSize
+ pbu_type = reader.getBits(8); // pbu_type
+ reader.skipBits(16); // group_id
+ reader.skipBits(8); // reserved_zero_8bits
/* frame info() */
- int32_t profile_idc = reader.getBits(8);
- bitOffset += 8; // profile_idc
- int32_t level_idc = reader.getBits(8);
- bitOffset += 8; // level_idc
- int32_t band_idc = reader.getBits(3);
- bitOffset += 3; // band_idc
- reader.skipBits(5);
- bitOffset += 5; // reserved_zero_5bits
- int32_t width = reader.getBits(32);
- bitOffset += 32; // width
- int32_t height = reader.getBits(32);
- bitOffset += 32; // height
- int32_t chroma_idc = reader.getBits(4);
- bitOffset += 4; // chroma_format_idc
- reader.skipBits(4);
- bitOffset += 4; // bit_depth
- reader.skipBits(8);
- bitOffset += 8; // capture_time_distance
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bits
+ profile_idc = reader.getBits(8); // profile_idc
+ level_idc = reader.getBits(8); // level_idc
+ band_idc = reader.getBits(3); // band_idc
+ reader.skipBits(5); // reserved_zero_5bits
+ frame_width_minus1 = reader.getBits(32); // width
+ frame_height_minus1 = reader.getBits(32); // height
+ chroma_format_idc = reader.getBits(4); // chroma_format_idc
+ bit_depth_minus8 = reader.getBits(4); // bit_depth
+ capture_time_distance = reader.getBits(8); // capture_time_distance
+ reader.skipBits(8); // reserved_zero_8bits
/* frame header() */
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bit
- bool color_description_present_flag = reader.getBits(1);
- bitOffset += 1; // color_description_present_flag
+ reader.skipBits(8); // reserved_zero_8bit
+ color_description_present_flag = reader.getBits(1); // color_description_present_flag
if (color_description_present_flag) {
- reader.skipBits(8);
- bitOffset += 8; // color_primaries
- reader.skipBits(8);
- bitOffset += 8; // transfer_characteristics
- reader.skipBits(8);
- bitOffset += 8; // matrix_coefficients
- }
- bool use_q_matrix = reader.getBits(1);
- bitOffset += 1; // use_q_matrix
- if (use_q_matrix) {
- /* quantization_matrix() */
- int32_t numComp = chroma_idc == 0 ? 1
- : chroma_idc == 2 ? 3
- : chroma_idc == 3 ? 3
- : chroma_idc == 4 ? 4
- : -1;
- int32_t needBitsForQ = 64 * 8 * numComp;
- reader.skipBits(needBitsForQ);
- bitOffset += needBitsForQ;
+ color_primaries = reader.getBits(8); // color_primaries
+ transfer_characteristics = reader.getBits(8); // transfer_characteristics
+ matrix_coefficients = reader.getBits(8); // matrix_coefficients
}
- /* tile_info() */
- int32_t tile_width_in_mbs_minus1 = reader.getBits(28);
- bitOffset += 28;
- int32_t tile_height_in_mbs_minus1 = reader.getBits(28);
- bitOffset += 28;
- bool tile_size_present_in_fh_flag = reader.getBits(1);
- bitOffset += 1;
- if (tile_size_present_in_fh_flag) {
- int32_t numTiles = ceil((double)width / (double)tile_width_in_mbs_minus1) *
- ceil((double)height / (double)tile_height_in_mbs_minus1);
- reader.skipBits(32 * numTiles);
- bitOffset += (32 * numTiles);
- }
+ number_of_configuration_entry = 1; // The real-time encoding on the device is assumed to be 1.
+ number_of_frame_info = 1; // The real-time encoding on the device is assumed to be 1.
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bits
+ std::vector<uint8_t> csdData;
+ csdData.push_back((uint8_t)0x1);
+ csdData.push_back(number_of_configuration_entry);
- /* byte_alignmenet() */
- while (bitOffset % 8) {
- reader.skipBits(1);
- bitOffset += 1;
+ for (uint8_t i = 0; i < number_of_configuration_entry; i++) {
+ csdData.push_back(pbu_type);
+ csdData.push_back(number_of_frame_info);
+ for (uint8_t j = 0; j < number_of_frame_info; j++) {
+ csdData.push_back((uint8_t)((color_description_present_flag << 1) |
+ capture_time_distance_ignored));
+ csdData.push_back(profile_idc);
+ csdData.push_back(level_idc);
+ csdData.push_back(band_idc);
+ csdData.push_back((uint8_t)((frame_width_minus1 >> 24) & 0xff));
+ csdData.push_back((uint8_t)((frame_width_minus1 >> 16) & 0xff));
+ csdData.push_back((uint8_t)((frame_width_minus1 >> 8) & 0xff));
+ csdData.push_back((uint8_t)(frame_width_minus1 & 0xff));
+ csdData.push_back((uint8_t)((frame_height_minus1 >> 24) & 0xff));
+ csdData.push_back((uint8_t)((frame_height_minus1 >> 16) & 0xff));
+ csdData.push_back((uint8_t)((frame_height_minus1 >> 8) & 0xff));
+ csdData.push_back((uint8_t)(frame_height_minus1 & 0xff));
+ csdData.push_back((uint8_t)(((chroma_format_idc << 4) & 0xf0) |
+ (bit_depth_minus8 & 0xf)));
+ csdData.push_back((uint8_t)(capture_time_distance));
+ if (color_description_present_flag) {
+ csdData.push_back(color_primaries);
+ csdData.push_back(transfer_characteristics);
+ csdData.push_back(matrix_coefficients);
+ }
+ }
}
- csdEnd = bitOffset / 8;
- int32_t csdSize = csdEnd - csdStart + 1;
std::unique_ptr<C2StreamInitDataInfo::output> csd =
- C2StreamInitDataInfo::output::AllocUnique(csdSize, 0u);
+ C2StreamInitDataInfo::output::AllocUnique(csdData.size(), 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
@@ -1113,10 +1058,10 @@
return;
}
- buf = buf + csdStart;
- memcpy(csd->m.value, buf, csdSize);
+ memcpy(csd->m.value, csdData.data(), csdData.size());
work->worklets.front()->output.configUpdate.push_back(std::move(csd));
}
+
c2_status_t C2SoftApvEnc::drainInternal(uint32_t drainMode,
const std::shared_ptr<C2BlockPool>& pool,
const std::unique_ptr<C2Work>& work) {
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
index 441c663..fc4ad7d 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.h
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -65,9 +65,7 @@
void showEncoderParams(oapve_cdesc_t* cdsc);
- void ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
void ColorConvertP010ToYUV422P10le(const C2GraphicView* const input, oapv_imgb_t* imgb);
- void ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
void createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
uint32_t encodedSize);
@@ -106,7 +104,6 @@
oapve_t mEncoderId;
oapvm_t mMetaId;
uint8_t* mBitstreamBuf = nullptr;
- std::vector<uint16_t> mSdrToHdrMapping;
bool mReceivedFirstFrame = false;
C2_DO_NOT_COPY(C2SoftApvEnc);
};
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index aec6523..a03f24f 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -463,6 +463,19 @@
}
}
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY, const uint16_t *srcUV,
+ size_t srcUVStride, size_t dstUVStride, size_t width, size_t height) {
+ std::memcpy(dstY, srcY, width * height * sizeof(uint16_t));
+
+ int32_t offsetTop, offsetBot;
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ offsetTop = (y * 2) * dstUVStride;
+ offsetBot = (y * 2 + 1) * dstUVStride;
+ std::memcpy(dstUV + offsetTop, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+ std::memcpy(dstUV + offsetBot, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+ }
+}
+
static const int16_t bt709Matrix_10bit[2][3][3] = {
{ { 218, 732, 74 }, { -117, -395, 512 }, { 512, -465, -47 } }, /* RANGE_FULL */
{ { 186, 627, 63 }, { -103, -345, 448 }, { 448, -407, -41 } }, /* RANGE_LIMITED */
@@ -517,6 +530,45 @@
}
}
+void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+ size_t srcRGBStride, size_t width, size_t height,
+ C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
+ uint16_t r, g, b;
+ int32_t i32Y, i32U, i32V;
+ uint16_t zeroLvl = colorRange == C2Color::RANGE_FULL ? 0 : 64;
+ uint16_t maxLvlLuma = colorRange == C2Color::RANGE_FULL ? 1023 : 940;
+ uint16_t maxLvlChroma = colorRange == C2Color::RANGE_FULL ? 1023 : 960;
+ // set default range as limited
+ if (colorRange != C2Color::RANGE_FULL) {
+ colorRange = C2Color::RANGE_LIMITED;
+ }
+ const int16_t(*weights)[3] = (colorMatrix == C2Color::MATRIX_BT709)
+ ? bt709Matrix_10bit[colorRange - 1]
+ : bt2020Matrix_10bit[colorRange - 1];
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ b = (srcRGBA[x] >> 20) & 0x3FF;
+ g = (srcRGBA[x] >> 10) & 0x3FF;
+ r = srcRGBA[x] & 0x3FF;
+
+ i32Y = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2] + 512) >> 10) +
+ zeroLvl;
+ dstY[x] = (CLIP3(zeroLvl, i32Y, maxLvlLuma) << 6) & 0xFFC0;
+ if (x % 2 == 0) {
+ i32U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2] + 512) >> 10) +
+ 512;
+ i32V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2] + 512) >> 10) +
+ 512;
+ dstUV[x] = (CLIP3(zeroLvl, i32U, maxLvlChroma) << 6) & 0xFFC0;
+ dstUV[x + 1] = (CLIP3(zeroLvl, i32V, maxLvlChroma) << 6) & 0xFFC0;
+ }
+ }
+ srcRGBA += srcRGBStride;
+ dstY += width;
+ }
+}
+
void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
size_t srcVStride, size_t dstStride, size_t width,
@@ -631,6 +683,36 @@
isMonochrome);
}
}
+
+void convertSemiPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+ const uint8_t *srcY, const uint8_t *srcUV,
+ size_t srcYStride, size_t srcUVStride,
+ size_t dstYStride, size_t dstUVStride,
+ uint32_t width, uint32_t height,
+ CONV_FORMAT_T format) {
+ if (format != CONV_FORMAT_I420) {
+ ALOGE("No support for semi-planar8 to P210. format is %d", format);
+ return;
+ }
+
+ for (int32_t y = 0; y < height; ++y) {
+ for (int32_t x = 0; x < width; ++x) {
+ dstY[x] = ((uint16_t)((double)srcY[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+ }
+ dstY += dstYStride;
+ srcY += srcYStride;
+ }
+
+ for (int32_t y = 0; y < height / 2; ++y) {
+ for (int32_t x = 0; x < width; ++x) {
+ dstUV[x] = dstUV[dstUVStride + x] =
+ ((uint16_t)((double)srcUV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+ }
+ srcUV += srcUVStride;
+ dstUV += dstUVStride << 1;
+ }
+}
+
std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index b28c47e..4306e55 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -68,10 +68,19 @@
size_t dstUStride, size_t dstVStride, size_t width,
size_t height, bool isMonochrome = false);
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+ const uint16_t *srcUV, size_t srcUVStride, size_t dstUVStride,
+ size_t width, size_t height);
+
void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
size_t height, C2Color::matrix_t colorMatrix,
C2Color::range_t colorRange);
+
+void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+ size_t srcRGBStride, size_t width, size_t height,
+ C2Color::matrix_t colorMatrix, C2Color::range_t colorRange);
+
void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
size_t srcVStride, size_t dstStride, size_t width,
@@ -96,6 +105,12 @@
size_t srcUStride, size_t srcVStride, size_t dstYStride,
size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
bool isMonochrome, CONV_FORMAT_T format);
+void convertSemiPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+ const uint8_t *srcY, const uint8_t *srcUV,
+ size_t srcYStride, size_t srcUVStride,
+ size_t dstYStride, size_t dstUVStride,
+ uint32_t width, uint32_t height,
+ CONV_FORMAT_T format);
class SimpleC2Component
: public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 069d6ad..fa5ce77 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -70,6 +70,7 @@
enum platform_level_t : uint32_t; ///< platform level
enum prepend_header_mode_t : uint32_t; ///< prepend header operational modes
enum profile_t : uint32_t; ///< coding profile
+ enum resource_kind_t : uint32_t; ///< resource kinds
enum scaling_method_t : uint32_t; ///< scaling methods
enum scan_order_t : uint32_t; ///< scan orders
enum secure_mode_t : uint32_t; ///< secure/protected modes
@@ -101,6 +102,7 @@
kParamIndexMasteringDisplayColorVolume,
kParamIndexChromaOffset,
kParamIndexGopLayer,
+ kParamIndexSystemResource,
/* =================================== parameter indices =================================== */
@@ -167,6 +169,10 @@
/* Region of Interest Encoding parameters */
kParamIndexQpOffsetMapBuffer, // info-buffer, used to signal qp-offset map for a frame
+ /* resource capacity and resources excluded */
+ kParamIndexResourcesCapacity,
+ kParamIndexResourcesExcluded,
+
// deprecated
kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -1257,21 +1263,114 @@
/* ----------------------------------------- resources ----------------------------------------- */
/**
- * Resources needed and resources reserved for current configuration.
- *
- * Resources are tracked as a vector of positive numbers. Available resources are defined by
- * the vendor.
- *
- * By default, no resources are reserved for a component. If resource reservation is successful,
- * the component shall be able to use those resources exclusively. If however, the component is
- * not using all of the reserved resources, those may be shared with other components.
- *
- * TODO: define some of the resources.
+ * Resource kind.
*/
-typedef C2GlobalParam<C2Tuning, C2Uint64Array, kParamIndexResourcesNeeded> C2ResourcesNeededTuning;
-typedef C2GlobalParam<C2Tuning, C2Uint64Array, kParamIndexResourcesReserved>
- C2ResourcesReservedTuning;
+C2ENUM(C2Config::resource_kind_t, uint32_t,
+ CONST,
+ PER_FRAME,
+ PER_INPUT_BLOCK,
+ PER_OUTPUT_BLOCK
+)
+
+/**
+ * Definition of a system resource use.
+ *
+ * [PROPOSED]
+ *
+ * System resources are defined by the default component store.
+ * They represent any physical or abstract entities of limited availability
+ * that is required for a component instance to execute and process work.
+ *
+ * Each defined resource has an id.
+ * The use of a resource is specified by the amount and the kind (e.g. whether the amount
+ * of resources is required for each frame processed, or whether they are required
+ * regardless of the processing rate (const amount)).
+ *
+ * Note: implementations can shadow this structure with their own custom resource
+ * structure where a uint32_t based enum is used for id.
+ * This can be used to provide a name for each resource, via parameter descriptors.
+ */
+
+struct C2SystemResourceStruct {
+ C2SystemResourceStruct(uint32_t id_,
+ C2Config::resource_kind_t kind_,
+ uint64_t amount_)
+ : id(id_), kind(kind_), amount(amount_) { }
+ uint32_t id;
+ C2Config::resource_kind_t kind;
+ uint64_t amount;
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(SystemResource)
+ C2FIELD(id, "id")
+ C2FIELD(kind, "kind")
+ C2FIELD(amount, "amount")
+};
+
+/**
+ * Total system resource capacity.
+ *
+ * [PROPOSED]
+ *
+ * This setting is implemented by the default component store.
+ * The total resource capacity is specified as the maximum amount for each resource ID
+ * that is supported by the device hardware or firmware.
+ * As such, the kind must be CONST for each element.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesCapacity> C2ResourcesCapacityTuning;
+constexpr char C2_PARAMKEY_RESOURCES_CAPACITY[] = "resources.capacity";
+
+/**
+ * Excluded system resources.
+ *
+ * [PROPOSED]
+ *
+ * This setting is implemented by the default component store.
+ * Some system resources may be used by components and not tracked by the Codec 2.0 API.
+ * This is communicated by this tuning.
+ * Excluded resources are the total resources that are used by non-Codec 2.0 components.
+ * It is specified as the excluded amount for each resource ID that is used by
+ * a non-Codec 2.0 component. As such, the kind must be CONST for each element.
+ *
+ * The platform can calculate the available resources as total capacity minus
+ * excluded resource minus sum of needed resources for each component.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesExcluded> C2ResourcesExcludedTuning;
+constexpr char C2_PARAMKEY_RESOURCES_EXCLUDED[] = "resources.excluded";
+
+/**
+ * System resources needed for the current configuration.
+ *
+ * [PROPOSED]
+ *
+ * Resources are tracked as a list of individual resource use specifications.
+ * The resource kind can be CONST, PER_FRAME, PER_INPUT_BLODCK or PER_OUTPUT_BLOCK.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesNeeded> C2ResourcesNeededTuning;
constexpr char C2_PARAMKEY_RESOURCES_NEEDED[] = "resources.needed";
+
+/**
+ * System resources reserved for this component
+ *
+ * [FUTURE]
+ *
+ * This allows the platform to set aside system resources for the component.
+ * Since this is a static resource reservation, kind must be CONST for each element.
+ * This resource reservation only considers CONST and PER_FRAME use.
+ *
+ * By default, no resources are reserved for a component.
+ * If resource reservation is successful, the component shall be able to use those
+ * resources exclusively. If however, the component is not using all of the
+ * reserved resources, those may be shared with other components.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesReserved> C2ResourcesReservedTuning;
constexpr char C2_PARAMKEY_RESOURCES_RESERVED[] = "resources.reserved";
/**
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 80735cb..6348e42 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -21,7 +21,9 @@
#include <utils/Trace.h>
#include <codec2/aidl/GraphicBufferAllocator.h>
+#include <codec2/common/HalSelection.h>
#include <codec2/hidl/client.h>
+
#include <C2Debug.h>
#include <C2BufferPriv.h>
#include <C2Config.h> // for C2StreamUsageTuning
@@ -1832,9 +1834,7 @@
std::shared_ptr<Codec2Client::InputSurface> Codec2Client::CreateInputSurface(
char const* serviceName) {
- int32_t inputSurfaceSetting = ::android::base::GetIntProperty(
- "debug.stagefright.c2inputsurface", int32_t(0));
- if (inputSurfaceSetting <= 0) {
+ if (!IsCodec2AidlInputSurfaceSelected()) {
return nullptr;
}
size_t index = GetServiceNames().size();
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 0638363..886391b 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -56,13 +56,17 @@
"libaconfig_storage_read_api_cc",
],
- static_libs: ["aconfig_mediacodec_flags_c_lib"],
+ static_libs: [
+ "aconfig_mediacodec_flags_c_lib",
+ "android.media.codec-aconfig-cc",
+ ],
}
cc_defaults {
name: "libcodec2_hal_selection",
static_libs: [
"aconfig_mediacodec_flags_c_lib",
+ "android.media.codec-aconfig-cc",
"libcodec2_hal_selection_static",
],
shared_libs: [
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
index d3ea181..5bf4fbe 100644
--- a/media/codec2/hal/common/HalSelection.cpp
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -21,6 +21,7 @@
// NOTE: due to dependency from mainline modules cannot use libsysprop
// #include <android/sysprop/MediaProperties.sysprop.h>
#include <android-base/properties.h>
+#include <android_media_codec.h>
#include <com_android_media_codec_flags.h>
#include <codec2/common/HalSelection.h>
@@ -66,4 +67,20 @@
return false;
}
+bool IsCodec2AidlInputSurfaceSelected() {
+ if (!IsCodec2AidlHalSelected()) {
+ return false;
+ }
+
+ int32_t inputSurfaceSetting = ::android::base::GetIntProperty(
+ "debug.stagefright.c2inputsurface", int32_t(0));
+ if (inputSurfaceSetting <= 0) {
+ return false;
+ }
+ if (!android::media::codec::provider_->aidl_hal_input_surface()) {
+ return false;
+ }
+ return true;
+}
+
} // namespace android
diff --git a/media/codec2/hal/common/include/codec2/common/HalSelection.h b/media/codec2/hal/common/include/codec2/common/HalSelection.h
index 7c77515..bf0c7c5 100644
--- a/media/codec2/hal/common/include/codec2/common/HalSelection.h
+++ b/media/codec2/hal/common/include/codec2/common/HalSelection.h
@@ -22,6 +22,9 @@
// Returns true iff AIDL c2 HAL is selected for the system
bool IsCodec2AidlHalSelected();
+// Returns true iff AIDL c2 InputSurface interface is selected for the system
+bool IsCodec2AidlInputSurfaceSelected();
+
} // namespace android
#endif // CODEC2_HAL_SELECTION_H
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index a943626..897a696 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1907,7 +1907,7 @@
bottom = c2_min(bottom, height);
if (right > left && bottom > top) {
C2Rect rect(right - left, bottom - top);
- rect.at(left, top);
+ rect = rect.at(left, top);
c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
} else {
ALOGE("Rects configuration %s is not valid.", box);
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index e1ae75c..0f5cdd6 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -747,6 +747,7 @@
pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_P010] = COLOR_FormatYUVP010;
pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_1010102] = COLOR_Format32bitABGR2101010;
pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_FP16] = COLOR_Format64bitABGRFloat;
+ pixelFormatMap[AHARDWAREBUFFER_FORMAT_YCbCr_P210] = COLOR_FormatYUVP210;
std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
std::vector<std::unique_ptr<C2Param>> heapParams;
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 7a33af4..aa87e97 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -64,6 +64,13 @@
return kVendorApiLevel >= __ANDROID_API_T__;
}
+static bool isP210Allowed() {
+ static const int32_t kVendorApiLevel =
+ base::GetIntProperty<int32_t>("ro.vendor.api_level", 0);
+
+ return kVendorApiLevel > __ANDROID_API_V__;
+}
+
bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
// HAL_PIXEL_FORMAT_YCBCR_P010 requirement was added in T VSR, although it could have been
// supported prior to this.
@@ -76,6 +83,12 @@
return false;
}
+ // P210 is not available before Android B
+ if (format == (AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210 &&
+ !isP210Allowed()) {
+ return false;
+ }
+
// Default scenario --- the consumer is display or GPU
const AHardwareBuffer_Desc consumableForDisplayOrGpu = {
.width = 320,
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 95c2b97..004fa30 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -17,3 +17,8 @@
# MediaRouter and native mirroring only:
aquilescanta@google.com
+
+# Emergency rollbacks and fixes outside LON timezone
+jmtrivi@google.com # US-MTV
+lajos@google.com # US-MTV
+scottnien@google.com # TW-NTC
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index f5dec56..0dd0f74 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -201,11 +201,11 @@
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
- "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
+ "av-audio-types-aidl-ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index d084f10..9ed5343 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -99,6 +99,7 @@
static_libs: [
"libstagefright_esds",
+ "android.media.extractor.flags-aconfig-cc",
],
export_include_dirs: [
@@ -321,6 +322,7 @@
static_libs: [
"android.media.codec-aconfig-cc",
+ "android.media.extractor.flags-aconfig-cc",
"com.android.media.flags.editing-aconfig-cc",
"libstagefright_esds",
"libstagefright_color_conversion",
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 86741a6..50eeb62 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -44,6 +44,8 @@
#include <media/AudioParameter.h>
#include <system/audio.h>
+#include <com_android_media_extractor_flags.h>
+
// TODO : Remove the defines once mainline media is built against NDK >= 31.
// The mp4 extractor is part of mainline and builds against NDK 29 as of
// writing. These keys are available only from NDK 31:
@@ -1443,6 +1445,17 @@
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
parseAV1ProfileLevelFromCsd(buffer, msg);
+ } else if (com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ meta->findData(kKeyAPVC, &type, &data, &size)) {
+ sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+ if (buffer.get() == NULL || buffer->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
if (esds.InitCheck() != (status_t)OK) {
@@ -2091,6 +2104,9 @@
} else if (mime == MEDIA_MIMETYPE_VIDEO_AV1 ||
mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+ } else if (com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ mime == MEDIA_MIMETYPE_VIDEO_APV) {
+ meta->setData(kKeyAPVC, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
int32_t profile = -1;
uint8_t blCompatibilityId = -1;
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 4601831..20c97dc 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -261,6 +261,7 @@
<Limit name="bitrate" range="1-240000000"/>
<Limit name="block-size" value="16x16" />
<Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Feature name="adaptive-playback" />
<Attribute name="software-codec"/>
</MediaCodec>
</Decoders>
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index 1c1c1e1..0b2c1ba 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
name: "android.hardware.audio.parameter_parser.example_defaults",
defaults: [
"latest_android_hardware_audio_core_ndk_shared",
- "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
+ "av-audio-types-aidl-ndk",
"libbase",
"libbinder_ndk",
],